2018-01-22 Sebastian Perta <sebastian.perta@renesas.com>
[official-gcc.git] / gcc / omp-low.c
blob3fcda29d32649c6811fbca5084c655c8ec011d07
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 break;
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1201 && varpool_node::get_create (decl)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl)))
1204 break;
1205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1206 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx->stmt)
1211 && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1213 break;
1215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1220 if (TREE_CODE (decl) == COMPONENT_REF
1221 || (TREE_CODE (decl) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 == REFERENCE_TYPE)))
1225 break;
1226 if (DECL_SIZE (decl)
1227 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1229 tree decl2 = DECL_VALUE_EXPR (decl);
1230 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 decl2 = TREE_OPERAND (decl2, 0);
1232 gcc_assert (DECL_P (decl2));
1233 install_var_local (decl2, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 if (DECL_P (decl))
1240 if (DECL_SIZE (decl)
1241 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1243 tree decl2 = DECL_VALUE_EXPR (decl);
1244 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 decl2 = TREE_OPERAND (decl2, 0);
1246 gcc_assert (DECL_P (decl2));
1247 install_var_field (decl2, true, 3, ctx);
1248 install_var_local (decl2, ctx);
1249 install_var_local (decl, ctx);
1251 else
1253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 7, ctx);
1258 else
1259 install_var_field (decl, true, 3, ctx,
1260 base_pointers_restrict);
1261 if (is_gimple_omp_offloaded (ctx->stmt)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1263 install_var_local (decl, ctx);
1266 else
1268 tree base = get_base_address (decl);
1269 tree nc = OMP_CLAUSE_CHAIN (c);
1270 if (DECL_P (base)
1271 && nc != NULL_TREE
1272 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc) == base
1274 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1280 else
1282 if (ctx->outer)
1284 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 decl = OMP_CLAUSE_DECL (c);
1287 gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 (splay_tree_key) decl));
1289 tree field
1290 = build_decl (OMP_CLAUSE_LOCATION (c),
1291 FIELD_DECL, NULL_TREE, ptr_type_node);
1292 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1293 insert_field_into_struct (ctx->record_type, field);
1294 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 (splay_tree_value) field);
1298 break;
1300 case OMP_CLAUSE__GRIDDIM_:
1301 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1306 break;
1308 case OMP_CLAUSE_NOWAIT:
1309 case OMP_CLAUSE_ORDERED:
1310 case OMP_CLAUSE_COLLAPSE:
1311 case OMP_CLAUSE_UNTIED:
1312 case OMP_CLAUSE_MERGEABLE:
1313 case OMP_CLAUSE_PROC_BIND:
1314 case OMP_CLAUSE_SAFELEN:
1315 case OMP_CLAUSE_SIMDLEN:
1316 case OMP_CLAUSE_THREADS:
1317 case OMP_CLAUSE_SIMD:
1318 case OMP_CLAUSE_NOGROUP:
1319 case OMP_CLAUSE_DEFAULTMAP:
1320 case OMP_CLAUSE_ASYNC:
1321 case OMP_CLAUSE_WAIT:
1322 case OMP_CLAUSE_GANG:
1323 case OMP_CLAUSE_WORKER:
1324 case OMP_CLAUSE_VECTOR:
1325 case OMP_CLAUSE_INDEPENDENT:
1326 case OMP_CLAUSE_AUTO:
1327 case OMP_CLAUSE_SEQ:
1328 case OMP_CLAUSE_TILE:
1329 case OMP_CLAUSE__SIMT_:
1330 case OMP_CLAUSE_DEFAULT:
1331 break;
1333 case OMP_CLAUSE_ALIGNED:
1334 decl = OMP_CLAUSE_DECL (c);
1335 if (is_global_var (decl)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_local (decl, ctx);
1338 break;
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE_ASYNC:
1489 case OMP_CLAUSE_WAIT:
1490 case OMP_CLAUSE_NUM_GANGS:
1491 case OMP_CLAUSE_NUM_WORKERS:
1492 case OMP_CLAUSE_VECTOR_LENGTH:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
1499 case OMP_CLAUSE_TILE:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE__CACHE_:
1505 default:
1506 gcc_unreachable ();
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx->stmt));
1512 if (scan_array_reductions)
1514 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1521 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1530 /* Create a new name for omp child function. Returns an identifier. */
1532 static tree
1533 create_omp_child_function_name (bool task_copy)
1535 return clone_function_name (current_function_decl,
1536 task_copy ? "_omp_cpyfn" : "_omp_fn");
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1542 static bool
1543 omp_maybe_offloaded_ctx (omp_context *ctx)
1545 if (cgraph_node::get (current_function_decl)->offloadable)
1546 return true;
1547 for (; ctx; ctx = ctx->outer)
1548 if (is_gimple_omp_offloaded (ctx->stmt))
1549 return true;
1550 return false;
1553 /* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1556 static void
1557 create_omp_child_function (omp_context *ctx, bool task_copy)
1559 tree decl, type, name, t;
1561 name = create_omp_child_function_name (task_copy);
1562 if (task_copy)
1563 type = build_function_type_list (void_type_node, ptr_type_node,
1564 ptr_type_node, NULL_TREE);
1565 else
1566 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1568 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 || !task_copy);
1572 if (!task_copy)
1573 ctx->cb.dst_fn = decl;
1574 else
1575 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1577 TREE_STATIC (decl) = 1;
1578 TREE_USED (decl) = 1;
1579 DECL_ARTIFICIAL (decl) = 1;
1580 DECL_IGNORED_P (decl) = 0;
1581 TREE_PUBLIC (decl) = 0;
1582 DECL_UNINLINABLE (decl) = 1;
1583 DECL_EXTERNAL (decl) = 0;
1584 DECL_CONTEXT (decl) = NULL_TREE;
1585 DECL_INITIAL (decl) = make_node (BLOCK);
1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1587 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1588 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1589 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1590 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1591 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1592 DECL_FUNCTION_VERSIONED (decl)
1593 = DECL_FUNCTION_VERSIONED (current_function_decl);
1595 if (omp_maybe_offloaded_ctx (ctx))
1597 cgraph_node::get_create (decl)->offloadable = 1;
1598 if (ENABLE_OFFLOADING)
1599 g->have_offload = true;
1602 if (cgraph_node::get_create (decl)->offloadable
1603 && !lookup_attribute ("omp declare target",
1604 DECL_ATTRIBUTES (current_function_decl)))
1606 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1607 ? "omp target entrypoint"
1608 : "omp declare target");
1609 DECL_ATTRIBUTES (decl)
1610 = tree_cons (get_identifier (target_attr),
1611 NULL_TREE, DECL_ATTRIBUTES (decl));
1614 t = build_decl (DECL_SOURCE_LOCATION (decl),
1615 RESULT_DECL, NULL_TREE, void_type_node);
1616 DECL_ARTIFICIAL (t) = 1;
1617 DECL_IGNORED_P (t) = 1;
1618 DECL_CONTEXT (t) = decl;
1619 DECL_RESULT (decl) = t;
1621 tree data_name = get_identifier (".omp_data_i");
1622 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1623 ptr_type_node);
1624 DECL_ARTIFICIAL (t) = 1;
1625 DECL_NAMELESS (t) = 1;
1626 DECL_ARG_TYPE (t) = ptr_type_node;
1627 DECL_CONTEXT (t) = current_function_decl;
1628 TREE_USED (t) = 1;
1629 TREE_READONLY (t) = 1;
1630 DECL_ARGUMENTS (decl) = t;
1631 if (!task_copy)
1632 ctx->receiver_decl = t;
1633 else
1635 t = build_decl (DECL_SOURCE_LOCATION (decl),
1636 PARM_DECL, get_identifier (".omp_data_o"),
1637 ptr_type_node);
1638 DECL_ARTIFICIAL (t) = 1;
1639 DECL_NAMELESS (t) = 1;
1640 DECL_ARG_TYPE (t) = ptr_type_node;
1641 DECL_CONTEXT (t) = current_function_decl;
1642 TREE_USED (t) = 1;
1643 TREE_ADDRESSABLE (t) = 1;
1644 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1645 DECL_ARGUMENTS (decl) = t;
1648 /* Allocate memory for the function structure. The call to
1649 allocate_struct_function clobbers CFUN, so we need to restore
1650 it afterward. */
1651 push_struct_function (decl);
1652 cfun->function_end_locus = gimple_location (ctx->stmt);
1653 init_tree_ssa (cfun);
1654 pop_cfun ();
1657 /* Callback for walk_gimple_seq. Check if combined parallel
1658 contains gimple_omp_for_combined_into_p OMP_FOR. */
1660 tree
1661 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1662 bool *handled_ops_p,
1663 struct walk_stmt_info *wi)
1665 gimple *stmt = gsi_stmt (*gsi_p);
1667 *handled_ops_p = true;
1668 switch (gimple_code (stmt))
1670 WALK_SUBSTMTS;
1672 case GIMPLE_OMP_FOR:
1673 if (gimple_omp_for_combined_into_p (stmt)
1674 && gimple_omp_for_kind (stmt)
1675 == *(const enum gf_mask *) (wi->info))
1677 wi->info = stmt;
1678 return integer_zero_node;
1680 break;
1681 default:
1682 break;
1684 return NULL;
1687 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1689 static void
1690 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1691 omp_context *outer_ctx)
1693 struct walk_stmt_info wi;
1695 memset (&wi, 0, sizeof (wi));
1696 wi.val_only = true;
1697 wi.info = (void *) &msk;
1698 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1699 if (wi.info != (void *) &msk)
1701 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1702 struct omp_for_data fd;
1703 omp_extract_for_data (for_stmt, &fd, NULL);
1704 /* We need two temporaries with fd.loop.v type (istart/iend)
1705 and then (fd.collapse - 1) temporaries with the same
1706 type for count2 ... countN-1 vars if not constant. */
1707 size_t count = 2, i;
1708 tree type = fd.iter_type;
1709 if (fd.collapse > 1
1710 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1712 count += fd.collapse - 1;
1713 /* If there are lastprivate clauses on the inner
1714 GIMPLE_OMP_FOR, add one more temporaries for the total number
1715 of iterations (product of count1 ... countN-1). */
1716 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1717 OMP_CLAUSE_LASTPRIVATE))
1718 count++;
1719 else if (msk == GF_OMP_FOR_KIND_FOR
1720 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1721 OMP_CLAUSE_LASTPRIVATE))
1722 count++;
1724 for (i = 0; i < count; i++)
1726 tree temp = create_tmp_var (type);
1727 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1728 insert_decl_map (&outer_ctx->cb, temp, temp);
1729 OMP_CLAUSE_DECL (c) = temp;
1730 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1731 gimple_omp_taskreg_set_clauses (stmt, c);
1736 /* Scan an OpenMP parallel directive. */
1738 static void
1739 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1741 omp_context *ctx;
1742 tree name;
1743 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1745 /* Ignore parallel directives with empty bodies, unless there
1746 are copyin clauses. */
1747 if (optimize > 0
1748 && empty_body_p (gimple_omp_body (stmt))
1749 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1750 OMP_CLAUSE_COPYIN) == NULL)
1752 gsi_replace (gsi, gimple_build_nop (), false);
1753 return;
1756 if (gimple_omp_parallel_combined_p (stmt))
1757 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1759 ctx = new_omp_context (stmt, outer_ctx);
1760 taskreg_contexts.safe_push (ctx);
1761 if (taskreg_nesting_level > 1)
1762 ctx->is_nested = true;
1763 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1764 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1765 name = create_tmp_var_name (".omp_data_s");
1766 name = build_decl (gimple_location (stmt),
1767 TYPE_DECL, name, ctx->record_type);
1768 DECL_ARTIFICIAL (name) = 1;
1769 DECL_NAMELESS (name) = 1;
1770 TYPE_NAME (ctx->record_type) = name;
1771 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1772 if (!gimple_omp_parallel_grid_phony (stmt))
1774 create_omp_child_function (ctx, false);
1775 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1778 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1779 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1781 if (TYPE_FIELDS (ctx->record_type) == NULL)
1782 ctx->record_type = ctx->receiver_decl = NULL;
1785 /* Scan an OpenMP task directive. */
1787 static void
1788 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1790 omp_context *ctx;
1791 tree name, t;
1792 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1794 /* Ignore task directives with empty bodies, unless they have depend
1795 clause. */
1796 if (optimize > 0
1797 && empty_body_p (gimple_omp_body (stmt))
1798 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1800 gsi_replace (gsi, gimple_build_nop (), false);
1801 return;
1804 if (gimple_omp_task_taskloop_p (stmt))
1805 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1807 ctx = new_omp_context (stmt, outer_ctx);
1808 taskreg_contexts.safe_push (ctx);
1809 if (taskreg_nesting_level > 1)
1810 ctx->is_nested = true;
1811 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1812 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1813 name = create_tmp_var_name (".omp_data_s");
1814 name = build_decl (gimple_location (stmt),
1815 TYPE_DECL, name, ctx->record_type);
1816 DECL_ARTIFICIAL (name) = 1;
1817 DECL_NAMELESS (name) = 1;
1818 TYPE_NAME (ctx->record_type) = name;
1819 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1820 create_omp_child_function (ctx, false);
1821 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1823 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1825 if (ctx->srecord_type)
1827 name = create_tmp_var_name (".omp_data_a");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->srecord_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->srecord_type) = name;
1833 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1834 create_omp_child_function (ctx, true);
1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1841 ctx->record_type = ctx->receiver_decl = NULL;
1842 t = build_int_cst (long_integer_type_node, 0);
1843 gimple_omp_task_set_arg_size (stmt, t);
1844 t = build_int_cst (long_integer_type_node, 1);
1845 gimple_omp_task_set_arg_align (stmt, t);
1849 /* Helper function for finish_taskreg_scan, called through walk_tree.
1850 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1851 tree, replace it in the expression. */
1853 static tree
1854 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1856 if (VAR_P (*tp))
1858 omp_context *ctx = (omp_context *) data;
1859 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1860 if (t != *tp)
1862 if (DECL_HAS_VALUE_EXPR_P (t))
1863 t = unshare_expr (DECL_VALUE_EXPR (t));
1864 *tp = t;
1866 *walk_subtrees = 0;
1868 else if (IS_TYPE_OR_DECL_P (*tp))
1869 *walk_subtrees = 0;
1870 return NULL_TREE;
1873 /* If any decls have been made addressable during scan_omp,
1874 adjust their fields if needed, and layout record types
1875 of parallel/task constructs. */
1877 static void
1878 finish_taskreg_scan (omp_context *ctx)
1880 if (ctx->record_type == NULL_TREE)
1881 return;
1883 /* If any task_shared_vars were needed, verify all
1884 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1885 statements if use_pointer_for_field hasn't changed
1886 because of that. If it did, update field types now. */
1887 if (task_shared_vars)
1889 tree c;
1891 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1892 c; c = OMP_CLAUSE_CHAIN (c))
1893 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1894 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1896 tree decl = OMP_CLAUSE_DECL (c);
1898 /* Global variables don't need to be copied,
1899 the receiver side will use them directly. */
1900 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1901 continue;
1902 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1903 || !use_pointer_for_field (decl, ctx))
1904 continue;
1905 tree field = lookup_field (decl, ctx);
1906 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1907 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1908 continue;
1909 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1910 TREE_THIS_VOLATILE (field) = 0;
1911 DECL_USER_ALIGN (field) = 0;
1912 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1913 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1914 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1915 if (ctx->srecord_type)
1917 tree sfield = lookup_sfield (decl, ctx);
1918 TREE_TYPE (sfield) = TREE_TYPE (field);
1919 TREE_THIS_VOLATILE (sfield) = 0;
1920 DECL_USER_ALIGN (sfield) = 0;
1921 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1922 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1923 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1928 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1930 layout_type (ctx->record_type);
1931 fixup_child_record_type (ctx);
1933 else
1935 location_t loc = gimple_location (ctx->stmt);
1936 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1937 /* Move VLA fields to the end. */
1938 p = &TYPE_FIELDS (ctx->record_type);
1939 while (*p)
1940 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1941 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1943 *q = *p;
1944 *p = TREE_CHAIN (*p);
1945 TREE_CHAIN (*q) = NULL_TREE;
1946 q = &TREE_CHAIN (*q);
1948 else
1949 p = &DECL_CHAIN (*p);
1950 *p = vla_fields;
1951 if (gimple_omp_task_taskloop_p (ctx->stmt))
1953 /* Move fields corresponding to first and second _looptemp_
1954 clause first. There are filled by GOMP_taskloop
1955 and thus need to be in specific positions. */
1956 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1957 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1958 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1959 OMP_CLAUSE__LOOPTEMP_);
1960 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1961 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1962 p = &TYPE_FIELDS (ctx->record_type);
1963 while (*p)
1964 if (*p == f1 || *p == f2)
1965 *p = DECL_CHAIN (*p);
1966 else
1967 p = &DECL_CHAIN (*p);
1968 DECL_CHAIN (f1) = f2;
1969 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1970 TYPE_FIELDS (ctx->record_type) = f1;
1971 if (ctx->srecord_type)
1973 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1974 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1975 p = &TYPE_FIELDS (ctx->srecord_type);
1976 while (*p)
1977 if (*p == f1 || *p == f2)
1978 *p = DECL_CHAIN (*p);
1979 else
1980 p = &DECL_CHAIN (*p);
1981 DECL_CHAIN (f1) = f2;
1982 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1983 TYPE_FIELDS (ctx->srecord_type) = f1;
1986 layout_type (ctx->record_type);
1987 fixup_child_record_type (ctx);
1988 if (ctx->srecord_type)
1989 layout_type (ctx->srecord_type);
1990 tree t = fold_convert_loc (loc, long_integer_type_node,
1991 TYPE_SIZE_UNIT (ctx->record_type));
1992 if (TREE_CODE (t) != INTEGER_CST)
1994 t = unshare_expr (t);
1995 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
1997 gimple_omp_task_set_arg_size (ctx->stmt, t);
1998 t = build_int_cst (long_integer_type_node,
1999 TYPE_ALIGN_UNIT (ctx->record_type));
2000 gimple_omp_task_set_arg_align (ctx->stmt, t);
2004 /* Find the enclosing offload context. */
2006 static omp_context *
2007 enclosing_target_ctx (omp_context *ctx)
2009 for (; ctx; ctx = ctx->outer)
2010 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2011 break;
2013 return ctx;
2016 /* Return true if ctx is part of an oacc kernels region. */
2018 static bool
2019 ctx_in_oacc_kernels_region (omp_context *ctx)
2021 for (;ctx != NULL; ctx = ctx->outer)
2023 gimple *stmt = ctx->stmt;
2024 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2025 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2026 return true;
2029 return false;
2032 /* Check the parallelism clauses inside a kernels regions.
2033 Until kernels handling moves to use the same loop indirection
2034 scheme as parallel, we need to do this checking early. */
2036 static unsigned
2037 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2039 bool checking = true;
2040 unsigned outer_mask = 0;
2041 unsigned this_mask = 0;
2042 bool has_seq = false, has_auto = false;
2044 if (ctx->outer)
2045 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2046 if (!stmt)
2048 checking = false;
2049 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2050 return outer_mask;
2051 stmt = as_a <gomp_for *> (ctx->stmt);
2054 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2056 switch (OMP_CLAUSE_CODE (c))
2058 case OMP_CLAUSE_GANG:
2059 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2060 break;
2061 case OMP_CLAUSE_WORKER:
2062 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2063 break;
2064 case OMP_CLAUSE_VECTOR:
2065 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2066 break;
2067 case OMP_CLAUSE_SEQ:
2068 has_seq = true;
2069 break;
2070 case OMP_CLAUSE_AUTO:
2071 has_auto = true;
2072 break;
2073 default:
2074 break;
2078 if (checking)
2080 if (has_seq && (this_mask || has_auto))
2081 error_at (gimple_location (stmt), "%<seq%> overrides other"
2082 " OpenACC loop specifiers");
2083 else if (has_auto && this_mask)
2084 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2085 " OpenACC loop specifiers");
2087 if (this_mask & outer_mask)
2088 error_at (gimple_location (stmt), "inner loop uses same"
2089 " OpenACC parallelism as containing loop");
2092 return outer_mask | this_mask;
2095 /* Scan a GIMPLE_OMP_FOR. */
2097 static omp_context *
2098 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2100 omp_context *ctx;
2101 size_t i;
2102 tree clauses = gimple_omp_for_clauses (stmt);
2104 ctx = new_omp_context (stmt, outer_ctx);
2106 if (is_gimple_omp_oacc (stmt))
2108 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2110 if (!tgt || is_oacc_parallel (tgt))
2111 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2113 char const *check = NULL;
2115 switch (OMP_CLAUSE_CODE (c))
2117 case OMP_CLAUSE_GANG:
2118 check = "gang";
2119 break;
2121 case OMP_CLAUSE_WORKER:
2122 check = "worker";
2123 break;
2125 case OMP_CLAUSE_VECTOR:
2126 check = "vector";
2127 break;
2129 default:
2130 break;
2133 if (check && OMP_CLAUSE_OPERAND (c, 0))
2134 error_at (gimple_location (stmt),
2135 "argument not permitted on %qs clause in"
2136 " OpenACC %<parallel%>", check);
2139 if (tgt && is_oacc_kernels (tgt))
2141 /* Strip out reductions, as they are not handled yet. */
2142 tree *prev_ptr = &clauses;
2144 while (tree probe = *prev_ptr)
2146 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2148 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2149 *prev_ptr = *next_ptr;
2150 else
2151 prev_ptr = next_ptr;
2154 gimple_omp_for_set_clauses (stmt, clauses);
2155 check_oacc_kernel_gwv (stmt, ctx);
2159 scan_sharing_clauses (clauses, ctx);
2161 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2162 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2164 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2165 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2166 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2167 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2169 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2170 return ctx;
2173 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2175 static void
2176 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2177 omp_context *outer_ctx)
2179 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2180 gsi_replace (gsi, bind, false);
2181 gimple_seq seq = NULL;
2182 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2183 tree cond = create_tmp_var_raw (integer_type_node);
2184 DECL_CONTEXT (cond) = current_function_decl;
2185 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2186 gimple_bind_set_vars (bind, cond);
2187 gimple_call_set_lhs (g, cond);
2188 gimple_seq_add_stmt (&seq, g);
2189 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2190 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2191 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2192 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2193 gimple_seq_add_stmt (&seq, g);
2194 g = gimple_build_label (lab1);
2195 gimple_seq_add_stmt (&seq, g);
2196 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2197 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2198 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2199 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2200 gimple_omp_for_set_clauses (new_stmt, clause);
2201 gimple_seq_add_stmt (&seq, new_stmt);
2202 g = gimple_build_goto (lab3);
2203 gimple_seq_add_stmt (&seq, g);
2204 g = gimple_build_label (lab2);
2205 gimple_seq_add_stmt (&seq, g);
2206 gimple_seq_add_stmt (&seq, stmt);
2207 g = gimple_build_label (lab3);
2208 gimple_seq_add_stmt (&seq, g);
2209 gimple_bind_set_body (bind, seq);
2210 update_stmt (bind);
2211 scan_omp_for (new_stmt, outer_ctx);
2212 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2215 /* Scan an OpenMP sections directive. */
2217 static void
2218 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2220 omp_context *ctx;
2222 ctx = new_omp_context (stmt, outer_ctx);
2223 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2224 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2227 /* Scan an OpenMP single directive. */
2229 static void
2230 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2232 omp_context *ctx;
2233 tree name;
2235 ctx = new_omp_context (stmt, outer_ctx);
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_copy_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 TYPE_NAME (ctx->record_type) = name;
2243 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2244 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2246 if (TYPE_FIELDS (ctx->record_type) == NULL)
2247 ctx->record_type = NULL;
2248 else
2249 layout_type (ctx->record_type);
2252 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2253 used in the corresponding offloaded function are restrict. */
2255 static bool
2256 omp_target_base_pointers_restrict_p (tree clauses)
2258 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2259 used by OpenACC. */
2260 if (flag_openacc == 0)
2261 return false;
2263 /* I. Basic example:
2265 void foo (void)
2267 unsigned int a[2], b[2];
2269 #pragma acc kernels \
2270 copyout (a) \
2271 copyout (b)
2273 a[0] = 0;
2274 b[0] = 1;
2278 After gimplification, we have:
2280 #pragma omp target oacc_kernels \
2281 map(force_from:a [len: 8]) \
2282 map(force_from:b [len: 8])
2284 a[0] = 0;
2285 b[0] = 1;
2288 Because both mappings have the force prefix, we know that they will be
2289 allocated when calling the corresponding offloaded function, which means we
2290 can mark the base pointers for a and b in the offloaded function as
2291 restrict. */
2293 tree c;
2294 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2296 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2297 return false;
2299 switch (OMP_CLAUSE_MAP_KIND (c))
2301 case GOMP_MAP_FORCE_ALLOC:
2302 case GOMP_MAP_FORCE_TO:
2303 case GOMP_MAP_FORCE_FROM:
2304 case GOMP_MAP_FORCE_TOFROM:
2305 break;
2306 default:
2307 return false;
2311 return true;
2314 /* Scan a GIMPLE_OMP_TARGET. */
2316 static void
2317 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2319 omp_context *ctx;
2320 tree name;
2321 bool offloaded = is_gimple_omp_offloaded (stmt);
2322 tree clauses = gimple_omp_target_clauses (stmt);
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2326 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2327 name = create_tmp_var_name (".omp_data_t");
2328 name = build_decl (gimple_location (stmt),
2329 TYPE_DECL, name, ctx->record_type);
2330 DECL_ARTIFICIAL (name) = 1;
2331 DECL_NAMELESS (name) = 1;
2332 TYPE_NAME (ctx->record_type) = name;
2333 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2335 bool base_pointers_restrict = false;
2336 if (offloaded)
2338 create_omp_child_function (ctx, false);
2339 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2341 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2342 if (base_pointers_restrict
2343 && dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file,
2345 "Base pointers in offloaded function are restrict\n");
2348 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2349 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2351 if (TYPE_FIELDS (ctx->record_type) == NULL)
2352 ctx->record_type = ctx->receiver_decl = NULL;
2353 else
2355 TYPE_FIELDS (ctx->record_type)
2356 = nreverse (TYPE_FIELDS (ctx->record_type));
2357 if (flag_checking)
2359 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2360 for (tree field = TYPE_FIELDS (ctx->record_type);
2361 field;
2362 field = DECL_CHAIN (field))
2363 gcc_assert (DECL_ALIGN (field) == align);
2365 layout_type (ctx->record_type);
2366 if (offloaded)
2367 fixup_child_record_type (ctx);
2371 /* Scan an OpenMP teams directive. */
2373 static void
2374 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2376 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2377 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2381 /* Check nesting restrictions. */
2382 static bool
2383 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2385 tree c;
2387 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2388 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2389 the original copy of its contents. */
2390 return true;
2392 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2393 inside an OpenACC CTX. */
2394 if (!(is_gimple_omp (stmt)
2395 && is_gimple_omp_oacc (stmt))
2396 /* Except for atomic codes that we share with OpenMP. */
2397 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2398 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2400 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2402 error_at (gimple_location (stmt),
2403 "non-OpenACC construct inside of OpenACC routine");
2404 return false;
2406 else
2407 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2408 if (is_gimple_omp (octx->stmt)
2409 && is_gimple_omp_oacc (octx->stmt))
2411 error_at (gimple_location (stmt),
2412 "non-OpenACC construct inside of OpenACC region");
2413 return false;
2417 if (ctx != NULL)
2419 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2420 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2422 c = NULL_TREE;
2423 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2425 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2426 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2428 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2429 && (ctx->outer == NULL
2430 || !gimple_omp_for_combined_into_p (ctx->stmt)
2431 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2432 || (gimple_omp_for_kind (ctx->outer->stmt)
2433 != GF_OMP_FOR_KIND_FOR)
2434 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2436 error_at (gimple_location (stmt),
2437 "%<ordered simd threads%> must be closely "
2438 "nested inside of %<for simd%> region");
2439 return false;
2441 return true;
2444 error_at (gimple_location (stmt),
2445 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2446 " may not be nested inside %<simd%> region");
2447 return false;
2449 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2451 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2452 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2453 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2454 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2456 error_at (gimple_location (stmt),
2457 "only %<distribute%> or %<parallel%> regions are "
2458 "allowed to be strictly nested inside %<teams%> "
2459 "region");
2460 return false;
2464 switch (gimple_code (stmt))
2466 case GIMPLE_OMP_FOR:
2467 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2468 return true;
2469 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2471 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2473 error_at (gimple_location (stmt),
2474 "%<distribute%> region must be strictly nested "
2475 "inside %<teams%> construct");
2476 return false;
2478 return true;
2480 /* We split taskloop into task and nested taskloop in it. */
2481 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2482 return true;
2483 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2485 bool ok = false;
2487 if (ctx)
2488 switch (gimple_code (ctx->stmt))
2490 case GIMPLE_OMP_FOR:
2491 ok = (gimple_omp_for_kind (ctx->stmt)
2492 == GF_OMP_FOR_KIND_OACC_LOOP);
2493 break;
2495 case GIMPLE_OMP_TARGET:
2496 switch (gimple_omp_target_kind (ctx->stmt))
2498 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2499 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2500 ok = true;
2501 break;
2503 default:
2504 break;
2507 default:
2508 break;
2510 else if (oacc_get_fn_attrib (current_function_decl))
2511 ok = true;
2512 if (!ok)
2514 error_at (gimple_location (stmt),
2515 "OpenACC loop directive must be associated with"
2516 " an OpenACC compute region");
2517 return false;
2520 /* FALLTHRU */
2521 case GIMPLE_CALL:
2522 if (is_gimple_call (stmt)
2523 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2524 == BUILT_IN_GOMP_CANCEL
2525 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2526 == BUILT_IN_GOMP_CANCELLATION_POINT))
2528 const char *bad = NULL;
2529 const char *kind = NULL;
2530 const char *construct
2531 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2532 == BUILT_IN_GOMP_CANCEL)
2533 ? "#pragma omp cancel"
2534 : "#pragma omp cancellation point";
2535 if (ctx == NULL)
2537 error_at (gimple_location (stmt), "orphaned %qs construct",
2538 construct);
2539 return false;
2541 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2542 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2543 : 0)
2545 case 1:
2546 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2547 bad = "#pragma omp parallel";
2548 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 == BUILT_IN_GOMP_CANCEL
2550 && !integer_zerop (gimple_call_arg (stmt, 1)))
2551 ctx->cancellable = true;
2552 kind = "parallel";
2553 break;
2554 case 2:
2555 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2556 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2557 bad = "#pragma omp for";
2558 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 == BUILT_IN_GOMP_CANCEL
2560 && !integer_zerop (gimple_call_arg (stmt, 1)))
2562 ctx->cancellable = true;
2563 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2564 OMP_CLAUSE_NOWAIT))
2565 warning_at (gimple_location (stmt), 0,
2566 "%<#pragma omp cancel for%> inside "
2567 "%<nowait%> for construct");
2568 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2569 OMP_CLAUSE_ORDERED))
2570 warning_at (gimple_location (stmt), 0,
2571 "%<#pragma omp cancel for%> inside "
2572 "%<ordered%> for construct");
2574 kind = "for";
2575 break;
2576 case 4:
2577 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2578 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2579 bad = "#pragma omp sections";
2580 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2581 == BUILT_IN_GOMP_CANCEL
2582 && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2586 ctx->cancellable = true;
2587 if (omp_find_clause (gimple_omp_sections_clauses
2588 (ctx->stmt),
2589 OMP_CLAUSE_NOWAIT))
2590 warning_at (gimple_location (stmt), 0,
2591 "%<#pragma omp cancel sections%> inside "
2592 "%<nowait%> sections construct");
2594 else
2596 gcc_assert (ctx->outer
2597 && gimple_code (ctx->outer->stmt)
2598 == GIMPLE_OMP_SECTIONS);
2599 ctx->outer->cancellable = true;
2600 if (omp_find_clause (gimple_omp_sections_clauses
2601 (ctx->outer->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel sections%> inside "
2605 "%<nowait%> sections construct");
2608 kind = "sections";
2609 break;
2610 case 8:
2611 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2612 bad = "#pragma omp task";
2613 else
2615 for (omp_context *octx = ctx->outer;
2616 octx; octx = octx->outer)
2618 switch (gimple_code (octx->stmt))
2620 case GIMPLE_OMP_TASKGROUP:
2621 break;
2622 case GIMPLE_OMP_TARGET:
2623 if (gimple_omp_target_kind (octx->stmt)
2624 != GF_OMP_TARGET_KIND_REGION)
2625 continue;
2626 /* FALLTHRU */
2627 case GIMPLE_OMP_PARALLEL:
2628 case GIMPLE_OMP_TEAMS:
2629 error_at (gimple_location (stmt),
2630 "%<%s taskgroup%> construct not closely "
2631 "nested inside of %<taskgroup%> region",
2632 construct);
2633 return false;
2634 default:
2635 continue;
2637 break;
2639 ctx->cancellable = true;
2641 kind = "taskgroup";
2642 break;
2643 default:
2644 error_at (gimple_location (stmt), "invalid arguments");
2645 return false;
2647 if (bad)
2649 error_at (gimple_location (stmt),
2650 "%<%s %s%> construct not closely nested inside of %qs",
2651 construct, kind, bad);
2652 return false;
2655 /* FALLTHRU */
2656 case GIMPLE_OMP_SECTIONS:
2657 case GIMPLE_OMP_SINGLE:
2658 for (; ctx != NULL; ctx = ctx->outer)
2659 switch (gimple_code (ctx->stmt))
2661 case GIMPLE_OMP_FOR:
2662 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2663 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2664 break;
2665 /* FALLTHRU */
2666 case GIMPLE_OMP_SECTIONS:
2667 case GIMPLE_OMP_SINGLE:
2668 case GIMPLE_OMP_ORDERED:
2669 case GIMPLE_OMP_MASTER:
2670 case GIMPLE_OMP_TASK:
2671 case GIMPLE_OMP_CRITICAL:
2672 if (is_gimple_call (stmt))
2674 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2675 != BUILT_IN_GOMP_BARRIER)
2676 return true;
2677 error_at (gimple_location (stmt),
2678 "barrier region may not be closely nested inside "
2679 "of work-sharing, %<critical%>, %<ordered%>, "
2680 "%<master%>, explicit %<task%> or %<taskloop%> "
2681 "region");
2682 return false;
2684 error_at (gimple_location (stmt),
2685 "work-sharing region may not be closely nested inside "
2686 "of work-sharing, %<critical%>, %<ordered%>, "
2687 "%<master%>, explicit %<task%> or %<taskloop%> region");
2688 return false;
2689 case GIMPLE_OMP_PARALLEL:
2690 case GIMPLE_OMP_TEAMS:
2691 return true;
2692 case GIMPLE_OMP_TARGET:
2693 if (gimple_omp_target_kind (ctx->stmt)
2694 == GF_OMP_TARGET_KIND_REGION)
2695 return true;
2696 break;
2697 default:
2698 break;
2700 break;
2701 case GIMPLE_OMP_MASTER:
2702 for (; ctx != NULL; ctx = ctx->outer)
2703 switch (gimple_code (ctx->stmt))
2705 case GIMPLE_OMP_FOR:
2706 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2707 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2708 break;
2709 /* FALLTHRU */
2710 case GIMPLE_OMP_SECTIONS:
2711 case GIMPLE_OMP_SINGLE:
2712 case GIMPLE_OMP_TASK:
2713 error_at (gimple_location (stmt),
2714 "%<master%> region may not be closely nested inside "
2715 "of work-sharing, explicit %<task%> or %<taskloop%> "
2716 "region");
2717 return false;
2718 case GIMPLE_OMP_PARALLEL:
2719 case GIMPLE_OMP_TEAMS:
2720 return true;
2721 case GIMPLE_OMP_TARGET:
2722 if (gimple_omp_target_kind (ctx->stmt)
2723 == GF_OMP_TARGET_KIND_REGION)
2724 return true;
2725 break;
2726 default:
2727 break;
2729 break;
2730 case GIMPLE_OMP_TASK:
2731 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2733 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2734 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2736 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2737 error_at (OMP_CLAUSE_LOCATION (c),
2738 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2739 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2740 return false;
2742 break;
2743 case GIMPLE_OMP_ORDERED:
2744 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2745 c; c = OMP_CLAUSE_CHAIN (c))
2747 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2749 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2750 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2751 continue;
2753 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2755 || kind == OMP_CLAUSE_DEPEND_SINK)
2757 tree oclause;
2758 /* Look for containing ordered(N) loop. */
2759 if (ctx == NULL
2760 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2761 || (oclause
2762 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2763 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2765 error_at (OMP_CLAUSE_LOCATION (c),
2766 "%<ordered%> construct with %<depend%> clause "
2767 "must be closely nested inside an %<ordered%> "
2768 "loop");
2769 return false;
2771 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2773 error_at (OMP_CLAUSE_LOCATION (c),
2774 "%<ordered%> construct with %<depend%> clause "
2775 "must be closely nested inside a loop with "
2776 "%<ordered%> clause with a parameter");
2777 return false;
2780 else
2782 error_at (OMP_CLAUSE_LOCATION (c),
2783 "invalid depend kind in omp %<ordered%> %<depend%>");
2784 return false;
2787 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2788 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2790 /* ordered simd must be closely nested inside of simd region,
2791 and simd region must not encounter constructs other than
2792 ordered simd, therefore ordered simd may be either orphaned,
2793 or ctx->stmt must be simd. The latter case is handled already
2794 earlier. */
2795 if (ctx != NULL)
2797 error_at (gimple_location (stmt),
2798 "%<ordered%> %<simd%> must be closely nested inside "
2799 "%<simd%> region");
2800 return false;
2803 for (; ctx != NULL; ctx = ctx->outer)
2804 switch (gimple_code (ctx->stmt))
2806 case GIMPLE_OMP_CRITICAL:
2807 case GIMPLE_OMP_TASK:
2808 case GIMPLE_OMP_ORDERED:
2809 ordered_in_taskloop:
2810 error_at (gimple_location (stmt),
2811 "%<ordered%> region may not be closely nested inside "
2812 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2813 "%<taskloop%> region");
2814 return false;
2815 case GIMPLE_OMP_FOR:
2816 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2817 goto ordered_in_taskloop;
2818 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2819 OMP_CLAUSE_ORDERED) == NULL)
2821 error_at (gimple_location (stmt),
2822 "%<ordered%> region must be closely nested inside "
2823 "a loop region with an %<ordered%> clause");
2824 return false;
2826 return true;
2827 case GIMPLE_OMP_TARGET:
2828 if (gimple_omp_target_kind (ctx->stmt)
2829 != GF_OMP_TARGET_KIND_REGION)
2830 break;
2831 /* FALLTHRU */
2832 case GIMPLE_OMP_PARALLEL:
2833 case GIMPLE_OMP_TEAMS:
2834 error_at (gimple_location (stmt),
2835 "%<ordered%> region must be closely nested inside "
2836 "a loop region with an %<ordered%> clause");
2837 return false;
2838 default:
2839 break;
2841 break;
2842 case GIMPLE_OMP_CRITICAL:
2844 tree this_stmt_name
2845 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2846 for (; ctx != NULL; ctx = ctx->outer)
2847 if (gomp_critical *other_crit
2848 = dyn_cast <gomp_critical *> (ctx->stmt))
2849 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2851 error_at (gimple_location (stmt),
2852 "%<critical%> region may not be nested inside "
2853 "a %<critical%> region with the same name");
2854 return false;
2857 break;
2858 case GIMPLE_OMP_TEAMS:
2859 if (ctx == NULL
2860 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2861 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2863 error_at (gimple_location (stmt),
2864 "%<teams%> construct not closely nested inside of "
2865 "%<target%> construct");
2866 return false;
2868 break;
2869 case GIMPLE_OMP_TARGET:
2870 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2871 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2872 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2873 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2875 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2876 error_at (OMP_CLAUSE_LOCATION (c),
2877 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2878 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2879 return false;
2881 if (is_gimple_omp_offloaded (stmt)
2882 && oacc_get_fn_attrib (cfun->decl) != NULL)
2884 error_at (gimple_location (stmt),
2885 "OpenACC region inside of OpenACC routine, nested "
2886 "parallelism not supported yet");
2887 return false;
2889 for (; ctx != NULL; ctx = ctx->outer)
2891 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2893 if (is_gimple_omp (stmt)
2894 && is_gimple_omp_oacc (stmt)
2895 && is_gimple_omp (ctx->stmt))
2897 error_at (gimple_location (stmt),
2898 "OpenACC construct inside of non-OpenACC region");
2899 return false;
2901 continue;
2904 const char *stmt_name, *ctx_stmt_name;
2905 switch (gimple_omp_target_kind (stmt))
2907 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2908 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2909 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2910 case GF_OMP_TARGET_KIND_ENTER_DATA:
2911 stmt_name = "target enter data"; break;
2912 case GF_OMP_TARGET_KIND_EXIT_DATA:
2913 stmt_name = "target exit data"; break;
2914 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2915 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2916 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2917 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2918 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2919 stmt_name = "enter/exit data"; break;
2920 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2921 break;
2922 default: gcc_unreachable ();
2924 switch (gimple_omp_target_kind (ctx->stmt))
2926 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2927 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2928 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2929 ctx_stmt_name = "parallel"; break;
2930 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2931 ctx_stmt_name = "kernels"; break;
2932 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2933 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2934 ctx_stmt_name = "host_data"; break;
2935 default: gcc_unreachable ();
2938 /* OpenACC/OpenMP mismatch? */
2939 if (is_gimple_omp_oacc (stmt)
2940 != is_gimple_omp_oacc (ctx->stmt))
2942 error_at (gimple_location (stmt),
2943 "%s %qs construct inside of %s %qs region",
2944 (is_gimple_omp_oacc (stmt)
2945 ? "OpenACC" : "OpenMP"), stmt_name,
2946 (is_gimple_omp_oacc (ctx->stmt)
2947 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2948 return false;
2950 if (is_gimple_omp_offloaded (ctx->stmt))
2952 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2953 if (is_gimple_omp_oacc (ctx->stmt))
2955 error_at (gimple_location (stmt),
2956 "%qs construct inside of %qs region",
2957 stmt_name, ctx_stmt_name);
2958 return false;
2960 else
2962 warning_at (gimple_location (stmt), 0,
2963 "%qs construct inside of %qs region",
2964 stmt_name, ctx_stmt_name);
2968 break;
2969 default:
2970 break;
2972 return true;
2976 /* Helper function scan_omp.
2978 Callback for walk_tree or operators in walk_gimple_stmt used to
2979 scan for OMP directives in TP. */
2981 static tree
2982 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2984 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2985 omp_context *ctx = (omp_context *) wi->info;
2986 tree t = *tp;
2988 switch (TREE_CODE (t))
2990 case VAR_DECL:
2991 case PARM_DECL:
2992 case LABEL_DECL:
2993 case RESULT_DECL:
2994 if (ctx)
2996 tree repl = remap_decl (t, &ctx->cb);
2997 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2998 *tp = repl;
3000 break;
3002 default:
3003 if (ctx && TYPE_P (t))
3004 *tp = remap_type (t, &ctx->cb);
3005 else if (!DECL_P (t))
3007 *walk_subtrees = 1;
3008 if (ctx)
3010 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3011 if (tem != TREE_TYPE (t))
3013 if (TREE_CODE (t) == INTEGER_CST)
3014 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3015 else
3016 TREE_TYPE (t) = tem;
3020 break;
3023 return NULL_TREE;
3026 /* Return true if FNDECL is a setjmp or a longjmp. */
3028 static bool
3029 setjmp_or_longjmp_p (const_tree fndecl)
3031 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3032 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3033 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3034 return true;
3036 tree declname = DECL_NAME (fndecl);
3037 if (!declname)
3038 return false;
3039 const char *name = IDENTIFIER_POINTER (declname);
3040 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3044 /* Helper function for scan_omp.
3046 Callback for walk_gimple_stmt used to scan for OMP directives in
3047 the current statement in GSI. */
3049 static tree
3050 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3051 struct walk_stmt_info *wi)
3053 gimple *stmt = gsi_stmt (*gsi);
3054 omp_context *ctx = (omp_context *) wi->info;
3056 if (gimple_has_location (stmt))
3057 input_location = gimple_location (stmt);
3059 /* Check the nesting restrictions. */
3060 bool remove = false;
3061 if (is_gimple_omp (stmt))
3062 remove = !check_omp_nesting_restrictions (stmt, ctx);
3063 else if (is_gimple_call (stmt))
3065 tree fndecl = gimple_call_fndecl (stmt);
3066 if (fndecl)
3068 if (setjmp_or_longjmp_p (fndecl)
3069 && ctx
3070 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3071 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3073 remove = true;
3074 error_at (gimple_location (stmt),
3075 "setjmp/longjmp inside simd construct");
3077 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3078 switch (DECL_FUNCTION_CODE (fndecl))
3080 case BUILT_IN_GOMP_BARRIER:
3081 case BUILT_IN_GOMP_CANCEL:
3082 case BUILT_IN_GOMP_CANCELLATION_POINT:
3083 case BUILT_IN_GOMP_TASKYIELD:
3084 case BUILT_IN_GOMP_TASKWAIT:
3085 case BUILT_IN_GOMP_TASKGROUP_START:
3086 case BUILT_IN_GOMP_TASKGROUP_END:
3087 remove = !check_omp_nesting_restrictions (stmt, ctx);
3088 break;
3089 default:
3090 break;
3094 if (remove)
3096 stmt = gimple_build_nop ();
3097 gsi_replace (gsi, stmt, false);
3100 *handled_ops_p = true;
3102 switch (gimple_code (stmt))
3104 case GIMPLE_OMP_PARALLEL:
3105 taskreg_nesting_level++;
3106 scan_omp_parallel (gsi, ctx);
3107 taskreg_nesting_level--;
3108 break;
3110 case GIMPLE_OMP_TASK:
3111 taskreg_nesting_level++;
3112 scan_omp_task (gsi, ctx);
3113 taskreg_nesting_level--;
3114 break;
3116 case GIMPLE_OMP_FOR:
3117 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3118 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3119 && omp_maybe_offloaded_ctx (ctx)
3120 && omp_max_simt_vf ())
3121 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3122 else
3123 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3124 break;
3126 case GIMPLE_OMP_SECTIONS:
3127 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3128 break;
3130 case GIMPLE_OMP_SINGLE:
3131 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3132 break;
3134 case GIMPLE_OMP_SECTION:
3135 case GIMPLE_OMP_MASTER:
3136 case GIMPLE_OMP_TASKGROUP:
3137 case GIMPLE_OMP_ORDERED:
3138 case GIMPLE_OMP_CRITICAL:
3139 case GIMPLE_OMP_GRID_BODY:
3140 ctx = new_omp_context (stmt, ctx);
3141 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3142 break;
3144 case GIMPLE_OMP_TARGET:
3145 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3146 break;
3148 case GIMPLE_OMP_TEAMS:
3149 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3150 break;
3152 case GIMPLE_BIND:
3154 tree var;
3156 *handled_ops_p = false;
3157 if (ctx)
3158 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3159 var ;
3160 var = DECL_CHAIN (var))
3161 insert_decl_map (&ctx->cb, var, var);
3163 break;
3164 default:
3165 *handled_ops_p = false;
3166 break;
3169 return NULL_TREE;
3173 /* Scan all the statements starting at the current statement. CTX
3174 contains context information about the OMP directives and
3175 clauses found during the scan. */
3177 static void
3178 scan_omp (gimple_seq *body_p, omp_context *ctx)
3180 location_t saved_location;
3181 struct walk_stmt_info wi;
3183 memset (&wi, 0, sizeof (wi));
3184 wi.info = ctx;
3185 wi.want_locations = true;
3187 saved_location = input_location;
3188 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3189 input_location = saved_location;
3192 /* Re-gimplification and code generation routines. */
3194 /* If a context was created for STMT when it was scanned, return it. */
3196 static omp_context *
3197 maybe_lookup_ctx (gimple *stmt)
3199 splay_tree_node n;
3200 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3201 return n ? (omp_context *) n->value : NULL;
3205 /* Find the mapping for DECL in CTX or the immediately enclosing
3206 context that has a mapping for DECL.
3208 If CTX is a nested parallel directive, we may have to use the decl
3209 mappings created in CTX's parent context. Suppose that we have the
3210 following parallel nesting (variable UIDs showed for clarity):
3212 iD.1562 = 0;
3213 #omp parallel shared(iD.1562) -> outer parallel
3214 iD.1562 = iD.1562 + 1;
3216 #omp parallel shared (iD.1562) -> inner parallel
3217 iD.1562 = iD.1562 - 1;
3219 Each parallel structure will create a distinct .omp_data_s structure
3220 for copying iD.1562 in/out of the directive:
3222 outer parallel .omp_data_s.1.i -> iD.1562
3223 inner parallel .omp_data_s.2.i -> iD.1562
3225 A shared variable mapping will produce a copy-out operation before
3226 the parallel directive and a copy-in operation after it. So, in
3227 this case we would have:
3229 iD.1562 = 0;
3230 .omp_data_o.1.i = iD.1562;
3231 #omp parallel shared(iD.1562) -> outer parallel
3232 .omp_data_i.1 = &.omp_data_o.1
3233 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3235 .omp_data_o.2.i = iD.1562; -> **
3236 #omp parallel shared(iD.1562) -> inner parallel
3237 .omp_data_i.2 = &.omp_data_o.2
3238 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3241 ** This is a problem. The symbol iD.1562 cannot be referenced
3242 inside the body of the outer parallel region. But since we are
3243 emitting this copy operation while expanding the inner parallel
3244 directive, we need to access the CTX structure of the outer
3245 parallel directive to get the correct mapping:
3247 .omp_data_o.2.i = .omp_data_i.1->i
3249 Since there may be other workshare or parallel directives enclosing
3250 the parallel directive, it may be necessary to walk up the context
3251 parent chain. This is not a problem in general because nested
3252 parallelism happens only rarely. */
3254 static tree
3255 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3257 tree t;
3258 omp_context *up;
3260 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3261 t = maybe_lookup_decl (decl, up);
3263 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3265 return t ? t : decl;
3269 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3270 in outer contexts. */
3272 static tree
3273 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3275 tree t = NULL;
3276 omp_context *up;
3278 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3279 t = maybe_lookup_decl (decl, up);
3281 return t ? t : decl;
3285 /* Construct the initialization value for reduction operation OP. */
3287 tree
3288 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3290 switch (op)
3292 case PLUS_EXPR:
3293 case MINUS_EXPR:
3294 case BIT_IOR_EXPR:
3295 case BIT_XOR_EXPR:
3296 case TRUTH_OR_EXPR:
3297 case TRUTH_ORIF_EXPR:
3298 case TRUTH_XOR_EXPR:
3299 case NE_EXPR:
3300 return build_zero_cst (type);
3302 case MULT_EXPR:
3303 case TRUTH_AND_EXPR:
3304 case TRUTH_ANDIF_EXPR:
3305 case EQ_EXPR:
3306 return fold_convert_loc (loc, type, integer_one_node);
3308 case BIT_AND_EXPR:
3309 return fold_convert_loc (loc, type, integer_minus_one_node);
3311 case MAX_EXPR:
3312 if (SCALAR_FLOAT_TYPE_P (type))
3314 REAL_VALUE_TYPE max, min;
3315 if (HONOR_INFINITIES (type))
3317 real_inf (&max);
3318 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3320 else
3321 real_maxval (&min, 1, TYPE_MODE (type));
3322 return build_real (type, min);
3324 else if (POINTER_TYPE_P (type))
3326 wide_int min
3327 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3328 return wide_int_to_tree (type, min);
3330 else
3332 gcc_assert (INTEGRAL_TYPE_P (type));
3333 return TYPE_MIN_VALUE (type);
3336 case MIN_EXPR:
3337 if (SCALAR_FLOAT_TYPE_P (type))
3339 REAL_VALUE_TYPE max;
3340 if (HONOR_INFINITIES (type))
3341 real_inf (&max);
3342 else
3343 real_maxval (&max, 0, TYPE_MODE (type));
3344 return build_real (type, max);
3346 else if (POINTER_TYPE_P (type))
3348 wide_int max
3349 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3350 return wide_int_to_tree (type, max);
3352 else
3354 gcc_assert (INTEGRAL_TYPE_P (type));
3355 return TYPE_MAX_VALUE (type);
3358 default:
3359 gcc_unreachable ();
3363 /* Construct the initialization value for reduction CLAUSE. */
3365 tree
3366 omp_reduction_init (tree clause, tree type)
3368 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3369 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3372 /* Return alignment to be assumed for var in CLAUSE, which should be
3373 OMP_CLAUSE_ALIGNED. */
3375 static tree
3376 omp_clause_aligned_alignment (tree clause)
3378 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3379 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3381 /* Otherwise return implementation defined alignment. */
3382 unsigned int al = 1;
3383 opt_scalar_mode mode_iter;
3384 auto_vector_sizes sizes;
3385 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3386 poly_uint64 vs = 0;
3387 for (unsigned int i = 0; i < sizes.length (); ++i)
3388 vs = ordered_max (vs, sizes[i]);
3389 static enum mode_class classes[]
3390 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3391 for (int i = 0; i < 4; i += 2)
3392 /* The for loop above dictates that we only walk through scalar classes. */
3393 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3395 scalar_mode mode = mode_iter.require ();
3396 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3397 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3398 continue;
3399 while (maybe_ne (vs, 0U)
3400 && known_lt (GET_MODE_SIZE (vmode), vs)
3401 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3402 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3404 tree type = lang_hooks.types.type_for_mode (mode, 1);
3405 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3406 continue;
3407 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3408 GET_MODE_SIZE (mode));
3409 type = build_vector_type (type, nelts);
3410 if (TYPE_MODE (type) != vmode)
3411 continue;
3412 if (TYPE_ALIGN_UNIT (type) > al)
3413 al = TYPE_ALIGN_UNIT (type);
3415 return build_int_cst (integer_type_node, al);
3419 /* This structure is part of the interface between lower_rec_simd_input_clauses
3420 and lower_rec_input_clauses. */
3422 struct omplow_simd_context {
3423 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3424 tree idx;
3425 tree lane;
3426 vec<tree, va_heap> simt_eargs;
3427 gimple_seq simt_dlist;
3428 poly_uint64_pod max_vf;
3429 bool is_simt;
3432 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3433 privatization. */
3435 static bool
3436 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3437 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3439 if (known_eq (sctx->max_vf, 0U))
3441 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3442 if (maybe_gt (sctx->max_vf, 1U))
3444 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3445 OMP_CLAUSE_SAFELEN);
3446 if (c)
3448 poly_uint64 safe_len;
3449 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3450 || maybe_lt (safe_len, 1U))
3451 sctx->max_vf = 1;
3452 else
3453 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3456 if (maybe_gt (sctx->max_vf, 1U))
3458 sctx->idx = create_tmp_var (unsigned_type_node);
3459 sctx->lane = create_tmp_var (unsigned_type_node);
3462 if (known_eq (sctx->max_vf, 1U))
3463 return false;
3465 if (sctx->is_simt)
3467 if (is_gimple_reg (new_var))
3469 ivar = lvar = new_var;
3470 return true;
3472 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3473 ivar = lvar = create_tmp_var (type);
3474 TREE_ADDRESSABLE (ivar) = 1;
3475 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3476 NULL, DECL_ATTRIBUTES (ivar));
3477 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3478 tree clobber = build_constructor (type, NULL);
3479 TREE_THIS_VOLATILE (clobber) = 1;
3480 gimple *g = gimple_build_assign (ivar, clobber);
3481 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3483 else
3485 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3486 tree avar = create_tmp_var_raw (atype);
3487 if (TREE_ADDRESSABLE (new_var))
3488 TREE_ADDRESSABLE (avar) = 1;
3489 DECL_ATTRIBUTES (avar)
3490 = tree_cons (get_identifier ("omp simd array"), NULL,
3491 DECL_ATTRIBUTES (avar));
3492 gimple_add_tmp_var (avar);
3493 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3494 NULL_TREE, NULL_TREE);
3495 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3496 NULL_TREE, NULL_TREE);
3498 if (DECL_P (new_var))
3500 SET_DECL_VALUE_EXPR (new_var, lvar);
3501 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3503 return true;
3506 /* Helper function of lower_rec_input_clauses. For a reference
3507 in simd reduction, add an underlying variable it will reference. */
3509 static void
3510 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3512 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3513 if (TREE_CONSTANT (z))
3515 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3516 get_name (new_vard));
3517 gimple_add_tmp_var (z);
3518 TREE_ADDRESSABLE (z) = 1;
3519 z = build_fold_addr_expr_loc (loc, z);
3520 gimplify_assign (new_vard, z, ilist);
3524 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3525 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3526 private variables. Initialization statements go in ILIST, while calls
3527 to destructors go in DLIST. */
3529 static void
3530 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3531 omp_context *ctx, struct omp_for_data *fd)
3533 tree c, dtor, copyin_seq, x, ptr;
3534 bool copyin_by_ref = false;
3535 bool lastprivate_firstprivate = false;
3536 bool reduction_omp_orig_ref = false;
3537 int pass;
3538 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3539 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3540 omplow_simd_context sctx = omplow_simd_context ();
3541 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3542 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3543 gimple_seq llist[3] = { };
3545 copyin_seq = NULL;
3546 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3548 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3549 with data sharing clauses referencing variable sized vars. That
3550 is unnecessarily hard to support and very unlikely to result in
3551 vectorized code anyway. */
3552 if (is_simd)
3553 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3554 switch (OMP_CLAUSE_CODE (c))
3556 case OMP_CLAUSE_LINEAR:
3557 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3558 sctx.max_vf = 1;
3559 /* FALLTHRU */
3560 case OMP_CLAUSE_PRIVATE:
3561 case OMP_CLAUSE_FIRSTPRIVATE:
3562 case OMP_CLAUSE_LASTPRIVATE:
3563 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3564 sctx.max_vf = 1;
3565 break;
3566 case OMP_CLAUSE_REDUCTION:
3567 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3568 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3569 sctx.max_vf = 1;
3570 break;
3571 default:
3572 continue;
3575 /* Add a placeholder for simduid. */
3576 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3577 sctx.simt_eargs.safe_push (NULL_TREE);
3579 /* Do all the fixed sized types in the first pass, and the variable sized
3580 types in the second pass. This makes sure that the scalar arguments to
3581 the variable sized types are processed before we use them in the
3582 variable sized operations. */
3583 for (pass = 0; pass < 2; ++pass)
3585 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3587 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3588 tree var, new_var;
3589 bool by_ref;
3590 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3592 switch (c_kind)
3594 case OMP_CLAUSE_PRIVATE:
3595 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3596 continue;
3597 break;
3598 case OMP_CLAUSE_SHARED:
3599 /* Ignore shared directives in teams construct. */
3600 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3601 continue;
3602 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3604 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3605 || is_global_var (OMP_CLAUSE_DECL (c)));
3606 continue;
3608 case OMP_CLAUSE_FIRSTPRIVATE:
3609 case OMP_CLAUSE_COPYIN:
3610 break;
3611 case OMP_CLAUSE_LINEAR:
3612 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3613 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3614 lastprivate_firstprivate = true;
3615 break;
3616 case OMP_CLAUSE_REDUCTION:
3617 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3618 reduction_omp_orig_ref = true;
3619 break;
3620 case OMP_CLAUSE__LOOPTEMP_:
3621 /* Handle _looptemp_ clauses only on parallel/task. */
3622 if (fd)
3623 continue;
3624 break;
3625 case OMP_CLAUSE_LASTPRIVATE:
3626 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3628 lastprivate_firstprivate = true;
3629 if (pass != 0 || is_taskloop_ctx (ctx))
3630 continue;
3632 /* Even without corresponding firstprivate, if
3633 decl is Fortran allocatable, it needs outer var
3634 reference. */
3635 else if (pass == 0
3636 && lang_hooks.decls.omp_private_outer_ref
3637 (OMP_CLAUSE_DECL (c)))
3638 lastprivate_firstprivate = true;
3639 break;
3640 case OMP_CLAUSE_ALIGNED:
3641 if (pass == 0)
3642 continue;
3643 var = OMP_CLAUSE_DECL (c);
3644 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3645 && !is_global_var (var))
3647 new_var = maybe_lookup_decl (var, ctx);
3648 if (new_var == NULL_TREE)
3649 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3650 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3651 tree alarg = omp_clause_aligned_alignment (c);
3652 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3653 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3654 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3655 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3656 gimplify_and_add (x, ilist);
3658 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3659 && is_global_var (var))
3661 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3662 new_var = lookup_decl (var, ctx);
3663 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3664 t = build_fold_addr_expr_loc (clause_loc, t);
3665 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3666 tree alarg = omp_clause_aligned_alignment (c);
3667 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3668 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3669 t = fold_convert_loc (clause_loc, ptype, t);
3670 x = create_tmp_var (ptype);
3671 t = build2 (MODIFY_EXPR, ptype, x, t);
3672 gimplify_and_add (t, ilist);
3673 t = build_simple_mem_ref_loc (clause_loc, x);
3674 SET_DECL_VALUE_EXPR (new_var, t);
3675 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3677 continue;
3678 default:
3679 continue;
3682 new_var = var = OMP_CLAUSE_DECL (c);
3683 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3685 var = TREE_OPERAND (var, 0);
3686 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3687 var = TREE_OPERAND (var, 0);
3688 if (TREE_CODE (var) == INDIRECT_REF
3689 || TREE_CODE (var) == ADDR_EXPR)
3690 var = TREE_OPERAND (var, 0);
3691 if (is_variable_sized (var))
3693 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3694 var = DECL_VALUE_EXPR (var);
3695 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3696 var = TREE_OPERAND (var, 0);
3697 gcc_assert (DECL_P (var));
3699 new_var = var;
3701 if (c_kind != OMP_CLAUSE_COPYIN)
3702 new_var = lookup_decl (var, ctx);
3704 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3706 if (pass != 0)
3707 continue;
3709 /* C/C++ array section reductions. */
3710 else if (c_kind == OMP_CLAUSE_REDUCTION
3711 && var != OMP_CLAUSE_DECL (c))
3713 if (pass == 0)
3714 continue;
3716 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3717 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3718 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3720 tree b = TREE_OPERAND (orig_var, 1);
3721 b = maybe_lookup_decl (b, ctx);
3722 if (b == NULL)
3724 b = TREE_OPERAND (orig_var, 1);
3725 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3727 if (integer_zerop (bias))
3728 bias = b;
3729 else
3731 bias = fold_convert_loc (clause_loc,
3732 TREE_TYPE (b), bias);
3733 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3734 TREE_TYPE (b), b, bias);
3736 orig_var = TREE_OPERAND (orig_var, 0);
3738 if (TREE_CODE (orig_var) == INDIRECT_REF
3739 || TREE_CODE (orig_var) == ADDR_EXPR)
3740 orig_var = TREE_OPERAND (orig_var, 0);
3741 tree d = OMP_CLAUSE_DECL (c);
3742 tree type = TREE_TYPE (d);
3743 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3744 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3745 const char *name = get_name (orig_var);
3746 if (TREE_CONSTANT (v))
3748 x = create_tmp_var_raw (type, name);
3749 gimple_add_tmp_var (x);
3750 TREE_ADDRESSABLE (x) = 1;
3751 x = build_fold_addr_expr_loc (clause_loc, x);
3753 else
3755 tree atmp
3756 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3757 tree t = maybe_lookup_decl (v, ctx);
3758 if (t)
3759 v = t;
3760 else
3761 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3762 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3763 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3764 TREE_TYPE (v), v,
3765 build_int_cst (TREE_TYPE (v), 1));
3766 t = fold_build2_loc (clause_loc, MULT_EXPR,
3767 TREE_TYPE (v), t,
3768 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3769 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3770 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3773 tree ptype = build_pointer_type (TREE_TYPE (type));
3774 x = fold_convert_loc (clause_loc, ptype, x);
3775 tree y = create_tmp_var (ptype, name);
3776 gimplify_assign (y, x, ilist);
3777 x = y;
3778 tree yb = y;
3780 if (!integer_zerop (bias))
3782 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3783 bias);
3784 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3786 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3787 pointer_sized_int_node, yb, bias);
3788 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3789 yb = create_tmp_var (ptype, name);
3790 gimplify_assign (yb, x, ilist);
3791 x = yb;
3794 d = TREE_OPERAND (d, 0);
3795 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3796 d = TREE_OPERAND (d, 0);
3797 if (TREE_CODE (d) == ADDR_EXPR)
3799 if (orig_var != var)
3801 gcc_assert (is_variable_sized (orig_var));
3802 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3804 gimplify_assign (new_var, x, ilist);
3805 tree new_orig_var = lookup_decl (orig_var, ctx);
3806 tree t = build_fold_indirect_ref (new_var);
3807 DECL_IGNORED_P (new_var) = 0;
3808 TREE_THIS_NOTRAP (t);
3809 SET_DECL_VALUE_EXPR (new_orig_var, t);
3810 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3812 else
3814 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3815 build_int_cst (ptype, 0));
3816 SET_DECL_VALUE_EXPR (new_var, x);
3817 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3820 else
3822 gcc_assert (orig_var == var);
3823 if (TREE_CODE (d) == INDIRECT_REF)
3825 x = create_tmp_var (ptype, name);
3826 TREE_ADDRESSABLE (x) = 1;
3827 gimplify_assign (x, yb, ilist);
3828 x = build_fold_addr_expr_loc (clause_loc, x);
3830 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3831 gimplify_assign (new_var, x, ilist);
3833 tree y1 = create_tmp_var (ptype, NULL);
3834 gimplify_assign (y1, y, ilist);
3835 tree i2 = NULL_TREE, y2 = NULL_TREE;
3836 tree body2 = NULL_TREE, end2 = NULL_TREE;
3837 tree y3 = NULL_TREE, y4 = NULL_TREE;
3838 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3840 y2 = create_tmp_var (ptype, NULL);
3841 gimplify_assign (y2, y, ilist);
3842 tree ref = build_outer_var_ref (var, ctx);
3843 /* For ref build_outer_var_ref already performs this. */
3844 if (TREE_CODE (d) == INDIRECT_REF)
3845 gcc_assert (omp_is_reference (var));
3846 else if (TREE_CODE (d) == ADDR_EXPR)
3847 ref = build_fold_addr_expr (ref);
3848 else if (omp_is_reference (var))
3849 ref = build_fold_addr_expr (ref);
3850 ref = fold_convert_loc (clause_loc, ptype, ref);
3851 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3852 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3854 y3 = create_tmp_var (ptype, NULL);
3855 gimplify_assign (y3, unshare_expr (ref), ilist);
3857 if (is_simd)
3859 y4 = create_tmp_var (ptype, NULL);
3860 gimplify_assign (y4, ref, dlist);
3863 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3864 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3865 tree body = create_artificial_label (UNKNOWN_LOCATION);
3866 tree end = create_artificial_label (UNKNOWN_LOCATION);
3867 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3868 if (y2)
3870 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3871 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3872 body2 = create_artificial_label (UNKNOWN_LOCATION);
3873 end2 = create_artificial_label (UNKNOWN_LOCATION);
3874 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3876 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3878 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3879 tree decl_placeholder
3880 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3881 SET_DECL_VALUE_EXPR (decl_placeholder,
3882 build_simple_mem_ref (y1));
3883 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3884 SET_DECL_VALUE_EXPR (placeholder,
3885 y3 ? build_simple_mem_ref (y3)
3886 : error_mark_node);
3887 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3888 x = lang_hooks.decls.omp_clause_default_ctor
3889 (c, build_simple_mem_ref (y1),
3890 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3891 if (x)
3892 gimplify_and_add (x, ilist);
3893 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3895 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3896 lower_omp (&tseq, ctx);
3897 gimple_seq_add_seq (ilist, tseq);
3899 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3900 if (is_simd)
3902 SET_DECL_VALUE_EXPR (decl_placeholder,
3903 build_simple_mem_ref (y2));
3904 SET_DECL_VALUE_EXPR (placeholder,
3905 build_simple_mem_ref (y4));
3906 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3907 lower_omp (&tseq, ctx);
3908 gimple_seq_add_seq (dlist, tseq);
3909 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3911 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3912 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3913 x = lang_hooks.decls.omp_clause_dtor
3914 (c, build_simple_mem_ref (y2));
3915 if (x)
3917 gimple_seq tseq = NULL;
3918 dtor = x;
3919 gimplify_stmt (&dtor, &tseq);
3920 gimple_seq_add_seq (dlist, tseq);
3923 else
3925 x = omp_reduction_init (c, TREE_TYPE (type));
3926 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3928 /* reduction(-:var) sums up the partial results, so it
3929 acts identically to reduction(+:var). */
3930 if (code == MINUS_EXPR)
3931 code = PLUS_EXPR;
3933 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3934 if (is_simd)
3936 x = build2 (code, TREE_TYPE (type),
3937 build_simple_mem_ref (y4),
3938 build_simple_mem_ref (y2));
3939 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3942 gimple *g
3943 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3944 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3945 gimple_seq_add_stmt (ilist, g);
3946 if (y3)
3948 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3950 gimple_seq_add_stmt (ilist, g);
3952 g = gimple_build_assign (i, PLUS_EXPR, i,
3953 build_int_cst (TREE_TYPE (i), 1));
3954 gimple_seq_add_stmt (ilist, g);
3955 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3956 gimple_seq_add_stmt (ilist, g);
3957 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3958 if (y2)
3960 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3962 gimple_seq_add_stmt (dlist, g);
3963 if (y4)
3965 g = gimple_build_assign
3966 (y4, POINTER_PLUS_EXPR, y4,
3967 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3968 gimple_seq_add_stmt (dlist, g);
3970 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3971 build_int_cst (TREE_TYPE (i2), 1));
3972 gimple_seq_add_stmt (dlist, g);
3973 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3974 gimple_seq_add_stmt (dlist, g);
3975 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3977 continue;
3979 else if (is_variable_sized (var))
3981 /* For variable sized types, we need to allocate the
3982 actual storage here. Call alloca and store the
3983 result in the pointer decl that we created elsewhere. */
3984 if (pass == 0)
3985 continue;
3987 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3989 gcall *stmt;
3990 tree tmp, atmp;
3992 ptr = DECL_VALUE_EXPR (new_var);
3993 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3994 ptr = TREE_OPERAND (ptr, 0);
3995 gcc_assert (DECL_P (ptr));
3996 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3998 /* void *tmp = __builtin_alloca */
3999 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4000 stmt = gimple_build_call (atmp, 2, x,
4001 size_int (DECL_ALIGN (var)));
4002 tmp = create_tmp_var_raw (ptr_type_node);
4003 gimple_add_tmp_var (tmp);
4004 gimple_call_set_lhs (stmt, tmp);
4006 gimple_seq_add_stmt (ilist, stmt);
4008 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4009 gimplify_assign (ptr, x, ilist);
4012 else if (omp_is_reference (var))
4014 /* For references that are being privatized for Fortran,
4015 allocate new backing storage for the new pointer
4016 variable. This allows us to avoid changing all the
4017 code that expects a pointer to something that expects
4018 a direct variable. */
4019 if (pass == 0)
4020 continue;
4022 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4023 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4025 x = build_receiver_ref (var, false, ctx);
4026 x = build_fold_addr_expr_loc (clause_loc, x);
4028 else if (TREE_CONSTANT (x))
4030 /* For reduction in SIMD loop, defer adding the
4031 initialization of the reference, because if we decide
4032 to use SIMD array for it, the initilization could cause
4033 expansion ICE. */
4034 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4035 x = NULL_TREE;
4036 else
4038 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4039 get_name (var));
4040 gimple_add_tmp_var (x);
4041 TREE_ADDRESSABLE (x) = 1;
4042 x = build_fold_addr_expr_loc (clause_loc, x);
4045 else
4047 tree atmp
4048 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4049 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4050 tree al = size_int (TYPE_ALIGN (rtype));
4051 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4054 if (x)
4056 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4057 gimplify_assign (new_var, x, ilist);
4060 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4062 else if (c_kind == OMP_CLAUSE_REDUCTION
4063 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4065 if (pass == 0)
4066 continue;
4068 else if (pass != 0)
4069 continue;
4071 switch (OMP_CLAUSE_CODE (c))
4073 case OMP_CLAUSE_SHARED:
4074 /* Ignore shared directives in teams construct. */
4075 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4076 continue;
4077 /* Shared global vars are just accessed directly. */
4078 if (is_global_var (new_var))
4079 break;
4080 /* For taskloop firstprivate/lastprivate, represented
4081 as firstprivate and shared clause on the task, new_var
4082 is the firstprivate var. */
4083 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4084 break;
4085 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4086 needs to be delayed until after fixup_child_record_type so
4087 that we get the correct type during the dereference. */
4088 by_ref = use_pointer_for_field (var, ctx);
4089 x = build_receiver_ref (var, by_ref, ctx);
4090 SET_DECL_VALUE_EXPR (new_var, x);
4091 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4093 /* ??? If VAR is not passed by reference, and the variable
4094 hasn't been initialized yet, then we'll get a warning for
4095 the store into the omp_data_s structure. Ideally, we'd be
4096 able to notice this and not store anything at all, but
4097 we're generating code too early. Suppress the warning. */
4098 if (!by_ref)
4099 TREE_NO_WARNING (var) = 1;
4100 break;
4102 case OMP_CLAUSE_LASTPRIVATE:
4103 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4104 break;
4105 /* FALLTHRU */
4107 case OMP_CLAUSE_PRIVATE:
4108 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4109 x = build_outer_var_ref (var, ctx);
4110 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4112 if (is_task_ctx (ctx))
4113 x = build_receiver_ref (var, false, ctx);
4114 else
4115 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4117 else
4118 x = NULL;
4119 do_private:
4120 tree nx;
4121 nx = lang_hooks.decls.omp_clause_default_ctor
4122 (c, unshare_expr (new_var), x);
4123 if (is_simd)
4125 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4126 if ((TREE_ADDRESSABLE (new_var) || nx || y
4127 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4128 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4129 ivar, lvar))
4131 if (nx)
4132 x = lang_hooks.decls.omp_clause_default_ctor
4133 (c, unshare_expr (ivar), x);
4134 if (nx && x)
4135 gimplify_and_add (x, &llist[0]);
4136 if (y)
4138 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4139 if (y)
4141 gimple_seq tseq = NULL;
4143 dtor = y;
4144 gimplify_stmt (&dtor, &tseq);
4145 gimple_seq_add_seq (&llist[1], tseq);
4148 break;
4151 if (nx)
4152 gimplify_and_add (nx, ilist);
4153 /* FALLTHRU */
4155 do_dtor:
4156 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4157 if (x)
4159 gimple_seq tseq = NULL;
4161 dtor = x;
4162 gimplify_stmt (&dtor, &tseq);
4163 gimple_seq_add_seq (dlist, tseq);
4165 break;
4167 case OMP_CLAUSE_LINEAR:
4168 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4169 goto do_firstprivate;
4170 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4171 x = NULL;
4172 else
4173 x = build_outer_var_ref (var, ctx);
4174 goto do_private;
4176 case OMP_CLAUSE_FIRSTPRIVATE:
4177 if (is_task_ctx (ctx))
4179 if (omp_is_reference (var) || is_variable_sized (var))
4180 goto do_dtor;
4181 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4182 ctx))
4183 || use_pointer_for_field (var, NULL))
4185 x = build_receiver_ref (var, false, ctx);
4186 SET_DECL_VALUE_EXPR (new_var, x);
4187 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4188 goto do_dtor;
4191 do_firstprivate:
4192 x = build_outer_var_ref (var, ctx);
4193 if (is_simd)
4195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4196 && gimple_omp_for_combined_into_p (ctx->stmt))
4198 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4199 tree stept = TREE_TYPE (t);
4200 tree ct = omp_find_clause (clauses,
4201 OMP_CLAUSE__LOOPTEMP_);
4202 gcc_assert (ct);
4203 tree l = OMP_CLAUSE_DECL (ct);
4204 tree n1 = fd->loop.n1;
4205 tree step = fd->loop.step;
4206 tree itype = TREE_TYPE (l);
4207 if (POINTER_TYPE_P (itype))
4208 itype = signed_type_for (itype);
4209 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4210 if (TYPE_UNSIGNED (itype)
4211 && fd->loop.cond_code == GT_EXPR)
4212 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4213 fold_build1 (NEGATE_EXPR, itype, l),
4214 fold_build1 (NEGATE_EXPR,
4215 itype, step));
4216 else
4217 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4218 t = fold_build2 (MULT_EXPR, stept,
4219 fold_convert (stept, l), t);
4221 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4223 x = lang_hooks.decls.omp_clause_linear_ctor
4224 (c, new_var, x, t);
4225 gimplify_and_add (x, ilist);
4226 goto do_dtor;
4229 if (POINTER_TYPE_P (TREE_TYPE (x)))
4230 x = fold_build2 (POINTER_PLUS_EXPR,
4231 TREE_TYPE (x), x, t);
4232 else
4233 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4236 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4237 || TREE_ADDRESSABLE (new_var))
4238 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4239 ivar, lvar))
4241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4243 tree iv = create_tmp_var (TREE_TYPE (new_var));
4244 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4245 gimplify_and_add (x, ilist);
4246 gimple_stmt_iterator gsi
4247 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4248 gassign *g
4249 = gimple_build_assign (unshare_expr (lvar), iv);
4250 gsi_insert_before_without_update (&gsi, g,
4251 GSI_SAME_STMT);
4252 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4253 enum tree_code code = PLUS_EXPR;
4254 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4255 code = POINTER_PLUS_EXPR;
4256 g = gimple_build_assign (iv, code, iv, t);
4257 gsi_insert_before_without_update (&gsi, g,
4258 GSI_SAME_STMT);
4259 break;
4261 x = lang_hooks.decls.omp_clause_copy_ctor
4262 (c, unshare_expr (ivar), x);
4263 gimplify_and_add (x, &llist[0]);
4264 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4265 if (x)
4267 gimple_seq tseq = NULL;
4269 dtor = x;
4270 gimplify_stmt (&dtor, &tseq);
4271 gimple_seq_add_seq (&llist[1], tseq);
4273 break;
4276 x = lang_hooks.decls.omp_clause_copy_ctor
4277 (c, unshare_expr (new_var), x);
4278 gimplify_and_add (x, ilist);
4279 goto do_dtor;
4281 case OMP_CLAUSE__LOOPTEMP_:
4282 gcc_assert (is_taskreg_ctx (ctx));
4283 x = build_outer_var_ref (var, ctx);
4284 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4285 gimplify_and_add (x, ilist);
4286 break;
4288 case OMP_CLAUSE_COPYIN:
4289 by_ref = use_pointer_for_field (var, NULL);
4290 x = build_receiver_ref (var, by_ref, ctx);
4291 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4292 append_to_statement_list (x, &copyin_seq);
4293 copyin_by_ref |= by_ref;
4294 break;
4296 case OMP_CLAUSE_REDUCTION:
4297 /* OpenACC reductions are initialized using the
4298 GOACC_REDUCTION internal function. */
4299 if (is_gimple_omp_oacc (ctx->stmt))
4300 break;
4301 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4303 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4304 gimple *tseq;
4305 x = build_outer_var_ref (var, ctx);
4307 if (omp_is_reference (var)
4308 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4309 TREE_TYPE (x)))
4310 x = build_fold_addr_expr_loc (clause_loc, x);
4311 SET_DECL_VALUE_EXPR (placeholder, x);
4312 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4313 tree new_vard = new_var;
4314 if (omp_is_reference (var))
4316 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4317 new_vard = TREE_OPERAND (new_var, 0);
4318 gcc_assert (DECL_P (new_vard));
4320 if (is_simd
4321 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4322 ivar, lvar))
4324 if (new_vard == new_var)
4326 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4327 SET_DECL_VALUE_EXPR (new_var, ivar);
4329 else
4331 SET_DECL_VALUE_EXPR (new_vard,
4332 build_fold_addr_expr (ivar));
4333 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4335 x = lang_hooks.decls.omp_clause_default_ctor
4336 (c, unshare_expr (ivar),
4337 build_outer_var_ref (var, ctx));
4338 if (x)
4339 gimplify_and_add (x, &llist[0]);
4340 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4342 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4343 lower_omp (&tseq, ctx);
4344 gimple_seq_add_seq (&llist[0], tseq);
4346 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4347 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4348 lower_omp (&tseq, ctx);
4349 gimple_seq_add_seq (&llist[1], tseq);
4350 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4351 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4352 if (new_vard == new_var)
4353 SET_DECL_VALUE_EXPR (new_var, lvar);
4354 else
4355 SET_DECL_VALUE_EXPR (new_vard,
4356 build_fold_addr_expr (lvar));
4357 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4358 if (x)
4360 tseq = NULL;
4361 dtor = x;
4362 gimplify_stmt (&dtor, &tseq);
4363 gimple_seq_add_seq (&llist[1], tseq);
4365 break;
4367 /* If this is a reference to constant size reduction var
4368 with placeholder, we haven't emitted the initializer
4369 for it because it is undesirable if SIMD arrays are used.
4370 But if they aren't used, we need to emit the deferred
4371 initialization now. */
4372 else if (omp_is_reference (var) && is_simd)
4373 handle_simd_reference (clause_loc, new_vard, ilist);
4374 x = lang_hooks.decls.omp_clause_default_ctor
4375 (c, unshare_expr (new_var),
4376 build_outer_var_ref (var, ctx));
4377 if (x)
4378 gimplify_and_add (x, ilist);
4379 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4382 lower_omp (&tseq, ctx);
4383 gimple_seq_add_seq (ilist, tseq);
4385 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4386 if (is_simd)
4388 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4389 lower_omp (&tseq, ctx);
4390 gimple_seq_add_seq (dlist, tseq);
4391 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4393 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4394 goto do_dtor;
4396 else
4398 x = omp_reduction_init (c, TREE_TYPE (new_var));
4399 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4400 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4402 /* reduction(-:var) sums up the partial results, so it
4403 acts identically to reduction(+:var). */
4404 if (code == MINUS_EXPR)
4405 code = PLUS_EXPR;
4407 tree new_vard = new_var;
4408 if (is_simd && omp_is_reference (var))
4410 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4411 new_vard = TREE_OPERAND (new_var, 0);
4412 gcc_assert (DECL_P (new_vard));
4414 if (is_simd
4415 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4416 ivar, lvar))
4418 tree ref = build_outer_var_ref (var, ctx);
4420 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4422 if (sctx.is_simt)
4424 if (!simt_lane)
4425 simt_lane = create_tmp_var (unsigned_type_node);
4426 x = build_call_expr_internal_loc
4427 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4428 TREE_TYPE (ivar), 2, ivar, simt_lane);
4429 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4430 gimplify_assign (ivar, x, &llist[2]);
4432 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4433 ref = build_outer_var_ref (var, ctx);
4434 gimplify_assign (ref, x, &llist[1]);
4436 if (new_vard != new_var)
4438 SET_DECL_VALUE_EXPR (new_vard,
4439 build_fold_addr_expr (lvar));
4440 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4443 else
4445 if (omp_is_reference (var) && is_simd)
4446 handle_simd_reference (clause_loc, new_vard, ilist);
4447 gimplify_assign (new_var, x, ilist);
4448 if (is_simd)
4450 tree ref = build_outer_var_ref (var, ctx);
4452 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4453 ref = build_outer_var_ref (var, ctx);
4454 gimplify_assign (ref, x, dlist);
4458 break;
4460 default:
4461 gcc_unreachable ();
4466 if (known_eq (sctx.max_vf, 1U))
4467 sctx.is_simt = false;
4469 if (sctx.lane || sctx.is_simt)
4471 uid = create_tmp_var (ptr_type_node, "simduid");
4472 /* Don't want uninit warnings on simduid, it is always uninitialized,
4473 but we use it not for the value, but for the DECL_UID only. */
4474 TREE_NO_WARNING (uid) = 1;
4475 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4476 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4477 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4478 gimple_omp_for_set_clauses (ctx->stmt, c);
4480 /* Emit calls denoting privatized variables and initializing a pointer to
4481 structure that holds private variables as fields after ompdevlow pass. */
4482 if (sctx.is_simt)
4484 sctx.simt_eargs[0] = uid;
4485 gimple *g
4486 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4487 gimple_call_set_lhs (g, uid);
4488 gimple_seq_add_stmt (ilist, g);
4489 sctx.simt_eargs.release ();
4491 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4492 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4493 gimple_call_set_lhs (g, simtrec);
4494 gimple_seq_add_stmt (ilist, g);
4496 if (sctx.lane)
4498 gimple *g
4499 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4500 gimple_call_set_lhs (g, sctx.lane);
4501 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4502 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4503 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4504 build_int_cst (unsigned_type_node, 0));
4505 gimple_seq_add_stmt (ilist, g);
4506 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4507 if (llist[2])
4509 tree simt_vf = create_tmp_var (unsigned_type_node);
4510 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4511 gimple_call_set_lhs (g, simt_vf);
4512 gimple_seq_add_stmt (dlist, g);
4514 tree t = build_int_cst (unsigned_type_node, 1);
4515 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4516 gimple_seq_add_stmt (dlist, g);
4518 t = build_int_cst (unsigned_type_node, 0);
4519 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4520 gimple_seq_add_stmt (dlist, g);
4522 tree body = create_artificial_label (UNKNOWN_LOCATION);
4523 tree header = create_artificial_label (UNKNOWN_LOCATION);
4524 tree end = create_artificial_label (UNKNOWN_LOCATION);
4525 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4526 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4528 gimple_seq_add_seq (dlist, llist[2]);
4530 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4531 gimple_seq_add_stmt (dlist, g);
4533 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4534 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4535 gimple_seq_add_stmt (dlist, g);
4537 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4539 for (int i = 0; i < 2; i++)
4540 if (llist[i])
4542 tree vf = create_tmp_var (unsigned_type_node);
4543 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4544 gimple_call_set_lhs (g, vf);
4545 gimple_seq *seq = i == 0 ? ilist : dlist;
4546 gimple_seq_add_stmt (seq, g);
4547 tree t = build_int_cst (unsigned_type_node, 0);
4548 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4549 gimple_seq_add_stmt (seq, g);
4550 tree body = create_artificial_label (UNKNOWN_LOCATION);
4551 tree header = create_artificial_label (UNKNOWN_LOCATION);
4552 tree end = create_artificial_label (UNKNOWN_LOCATION);
4553 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4554 gimple_seq_add_stmt (seq, gimple_build_label (body));
4555 gimple_seq_add_seq (seq, llist[i]);
4556 t = build_int_cst (unsigned_type_node, 1);
4557 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4558 gimple_seq_add_stmt (seq, g);
4559 gimple_seq_add_stmt (seq, gimple_build_label (header));
4560 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4561 gimple_seq_add_stmt (seq, g);
4562 gimple_seq_add_stmt (seq, gimple_build_label (end));
4565 if (sctx.is_simt)
4567 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4568 gimple *g
4569 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4570 gimple_seq_add_stmt (dlist, g);
4573 /* The copyin sequence is not to be executed by the main thread, since
4574 that would result in self-copies. Perhaps not visible to scalars,
4575 but it certainly is to C++ operator=. */
4576 if (copyin_seq)
4578 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4580 x = build2 (NE_EXPR, boolean_type_node, x,
4581 build_int_cst (TREE_TYPE (x), 0));
4582 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4583 gimplify_and_add (x, ilist);
4586 /* If any copyin variable is passed by reference, we must ensure the
4587 master thread doesn't modify it before it is copied over in all
4588 threads. Similarly for variables in both firstprivate and
4589 lastprivate clauses we need to ensure the lastprivate copying
4590 happens after firstprivate copying in all threads. And similarly
4591 for UDRs if initializer expression refers to omp_orig. */
4592 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4594 /* Don't add any barrier for #pragma omp simd or
4595 #pragma omp distribute. */
4596 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4597 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4598 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4601 /* If max_vf is non-zero, then we can use only a vectorization factor
4602 up to the max_vf we chose. So stick it into the safelen clause. */
4603 if (maybe_ne (sctx.max_vf, 0U))
4605 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4606 OMP_CLAUSE_SAFELEN);
4607 poly_uint64 safe_len;
4608 if (c == NULL_TREE
4609 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4610 && maybe_gt (safe_len, sctx.max_vf)))
4612 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4613 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4614 sctx.max_vf);
4615 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4616 gimple_omp_for_set_clauses (ctx->stmt, c);
4622 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4623 both parallel and workshare constructs. PREDICATE may be NULL if it's
4624 always true. */
4626 static void
4627 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4628 omp_context *ctx)
4630 tree x, c, label = NULL, orig_clauses = clauses;
4631 bool par_clauses = false;
4632 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4634 /* Early exit if there are no lastprivate or linear clauses. */
4635 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4636 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4637 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4638 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4639 break;
4640 if (clauses == NULL)
4642 /* If this was a workshare clause, see if it had been combined
4643 with its parallel. In that case, look for the clauses on the
4644 parallel statement itself. */
4645 if (is_parallel_ctx (ctx))
4646 return;
4648 ctx = ctx->outer;
4649 if (ctx == NULL || !is_parallel_ctx (ctx))
4650 return;
4652 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4653 OMP_CLAUSE_LASTPRIVATE);
4654 if (clauses == NULL)
4655 return;
4656 par_clauses = true;
4659 bool maybe_simt = false;
4660 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4661 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4663 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4664 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4665 if (simduid)
4666 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4669 if (predicate)
4671 gcond *stmt;
4672 tree label_true, arm1, arm2;
4673 enum tree_code pred_code = TREE_CODE (predicate);
4675 label = create_artificial_label (UNKNOWN_LOCATION);
4676 label_true = create_artificial_label (UNKNOWN_LOCATION);
4677 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4679 arm1 = TREE_OPERAND (predicate, 0);
4680 arm2 = TREE_OPERAND (predicate, 1);
4681 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4682 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4684 else
4686 arm1 = predicate;
4687 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4688 arm2 = boolean_false_node;
4689 pred_code = NE_EXPR;
4691 if (maybe_simt)
4693 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4694 c = fold_convert (integer_type_node, c);
4695 simtcond = create_tmp_var (integer_type_node);
4696 gimplify_assign (simtcond, c, stmt_list);
4697 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4698 1, simtcond);
4699 c = create_tmp_var (integer_type_node);
4700 gimple_call_set_lhs (g, c);
4701 gimple_seq_add_stmt (stmt_list, g);
4702 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4703 label_true, label);
4705 else
4706 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4707 gimple_seq_add_stmt (stmt_list, stmt);
4708 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4711 for (c = clauses; c ;)
4713 tree var, new_var;
4714 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4716 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4717 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4718 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4720 var = OMP_CLAUSE_DECL (c);
4721 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4722 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4723 && is_taskloop_ctx (ctx))
4725 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4726 new_var = lookup_decl (var, ctx->outer);
4728 else
4730 new_var = lookup_decl (var, ctx);
4731 /* Avoid uninitialized warnings for lastprivate and
4732 for linear iterators. */
4733 if (predicate
4734 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4735 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4736 TREE_NO_WARNING (new_var) = 1;
4739 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4741 tree val = DECL_VALUE_EXPR (new_var);
4742 if (TREE_CODE (val) == ARRAY_REF
4743 && VAR_P (TREE_OPERAND (val, 0))
4744 && lookup_attribute ("omp simd array",
4745 DECL_ATTRIBUTES (TREE_OPERAND (val,
4746 0))))
4748 if (lastlane == NULL)
4750 lastlane = create_tmp_var (unsigned_type_node);
4751 gcall *g
4752 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4753 2, simduid,
4754 TREE_OPERAND (val, 1));
4755 gimple_call_set_lhs (g, lastlane);
4756 gimple_seq_add_stmt (stmt_list, g);
4758 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4759 TREE_OPERAND (val, 0), lastlane,
4760 NULL_TREE, NULL_TREE);
4763 else if (maybe_simt)
4765 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4766 ? DECL_VALUE_EXPR (new_var)
4767 : new_var);
4768 if (simtlast == NULL)
4770 simtlast = create_tmp_var (unsigned_type_node);
4771 gcall *g = gimple_build_call_internal
4772 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4773 gimple_call_set_lhs (g, simtlast);
4774 gimple_seq_add_stmt (stmt_list, g);
4776 x = build_call_expr_internal_loc
4777 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4778 TREE_TYPE (val), 2, val, simtlast);
4779 new_var = unshare_expr (new_var);
4780 gimplify_assign (new_var, x, stmt_list);
4781 new_var = unshare_expr (new_var);
4784 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4785 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4787 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4788 gimple_seq_add_seq (stmt_list,
4789 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4790 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4792 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4793 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4795 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4796 gimple_seq_add_seq (stmt_list,
4797 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4798 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4801 x = NULL_TREE;
4802 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4803 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4805 gcc_checking_assert (is_taskloop_ctx (ctx));
4806 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4807 ctx->outer->outer);
4808 if (is_global_var (ovar))
4809 x = ovar;
4811 if (!x)
4812 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4813 if (omp_is_reference (var))
4814 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4815 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4816 gimplify_and_add (x, stmt_list);
4818 c = OMP_CLAUSE_CHAIN (c);
4819 if (c == NULL && !par_clauses)
4821 /* If this was a workshare clause, see if it had been combined
4822 with its parallel. In that case, continue looking for the
4823 clauses also on the parallel statement itself. */
4824 if (is_parallel_ctx (ctx))
4825 break;
4827 ctx = ctx->outer;
4828 if (ctx == NULL || !is_parallel_ctx (ctx))
4829 break;
4831 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4832 OMP_CLAUSE_LASTPRIVATE);
4833 par_clauses = true;
4837 if (label)
4838 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4841 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4842 (which might be a placeholder). INNER is true if this is an inner
4843 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4844 join markers. Generate the before-loop forking sequence in
4845 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4846 general form of these sequences is
4848 GOACC_REDUCTION_SETUP
4849 GOACC_FORK
4850 GOACC_REDUCTION_INIT
4852 GOACC_REDUCTION_FINI
4853 GOACC_JOIN
4854 GOACC_REDUCTION_TEARDOWN. */
4856 static void
4857 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4858 gcall *fork, gcall *join, gimple_seq *fork_seq,
4859 gimple_seq *join_seq, omp_context *ctx)
4861 gimple_seq before_fork = NULL;
4862 gimple_seq after_fork = NULL;
4863 gimple_seq before_join = NULL;
4864 gimple_seq after_join = NULL;
4865 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4866 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4867 unsigned offset = 0;
4869 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4870 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4872 tree orig = OMP_CLAUSE_DECL (c);
4873 tree var = maybe_lookup_decl (orig, ctx);
4874 tree ref_to_res = NULL_TREE;
4875 tree incoming, outgoing, v1, v2, v3;
4876 bool is_private = false;
4878 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4879 if (rcode == MINUS_EXPR)
4880 rcode = PLUS_EXPR;
4881 else if (rcode == TRUTH_ANDIF_EXPR)
4882 rcode = BIT_AND_EXPR;
4883 else if (rcode == TRUTH_ORIF_EXPR)
4884 rcode = BIT_IOR_EXPR;
4885 tree op = build_int_cst (unsigned_type_node, rcode);
4887 if (!var)
4888 var = orig;
4890 incoming = outgoing = var;
4892 if (!inner)
4894 /* See if an outer construct also reduces this variable. */
4895 omp_context *outer = ctx;
4897 while (omp_context *probe = outer->outer)
4899 enum gimple_code type = gimple_code (probe->stmt);
4900 tree cls;
4902 switch (type)
4904 case GIMPLE_OMP_FOR:
4905 cls = gimple_omp_for_clauses (probe->stmt);
4906 break;
4908 case GIMPLE_OMP_TARGET:
4909 if (gimple_omp_target_kind (probe->stmt)
4910 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4911 goto do_lookup;
4913 cls = gimple_omp_target_clauses (probe->stmt);
4914 break;
4916 default:
4917 goto do_lookup;
4920 outer = probe;
4921 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4922 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4923 && orig == OMP_CLAUSE_DECL (cls))
4925 incoming = outgoing = lookup_decl (orig, probe);
4926 goto has_outer_reduction;
4928 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4929 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4930 && orig == OMP_CLAUSE_DECL (cls))
4932 is_private = true;
4933 goto do_lookup;
4937 do_lookup:
4938 /* This is the outermost construct with this reduction,
4939 see if there's a mapping for it. */
4940 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4941 && maybe_lookup_field (orig, outer) && !is_private)
4943 ref_to_res = build_receiver_ref (orig, false, outer);
4944 if (omp_is_reference (orig))
4945 ref_to_res = build_simple_mem_ref (ref_to_res);
4947 tree type = TREE_TYPE (var);
4948 if (POINTER_TYPE_P (type))
4949 type = TREE_TYPE (type);
4951 outgoing = var;
4952 incoming = omp_reduction_init_op (loc, rcode, type);
4954 else
4956 /* Try to look at enclosing contexts for reduction var,
4957 use original if no mapping found. */
4958 tree t = NULL_TREE;
4959 omp_context *c = ctx->outer;
4960 while (c && !t)
4962 t = maybe_lookup_decl (orig, c);
4963 c = c->outer;
4965 incoming = outgoing = (t ? t : orig);
4968 has_outer_reduction:;
4971 if (!ref_to_res)
4972 ref_to_res = integer_zero_node;
4974 if (omp_is_reference (orig))
4976 tree type = TREE_TYPE (var);
4977 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4979 if (!inner)
4981 tree x = create_tmp_var (TREE_TYPE (type), id);
4982 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4985 v1 = create_tmp_var (type, id);
4986 v2 = create_tmp_var (type, id);
4987 v3 = create_tmp_var (type, id);
4989 gimplify_assign (v1, var, fork_seq);
4990 gimplify_assign (v2, var, fork_seq);
4991 gimplify_assign (v3, var, fork_seq);
4993 var = build_simple_mem_ref (var);
4994 v1 = build_simple_mem_ref (v1);
4995 v2 = build_simple_mem_ref (v2);
4996 v3 = build_simple_mem_ref (v3);
4997 outgoing = build_simple_mem_ref (outgoing);
4999 if (!TREE_CONSTANT (incoming))
5000 incoming = build_simple_mem_ref (incoming);
5002 else
5003 v1 = v2 = v3 = var;
5005 /* Determine position in reduction buffer, which may be used
5006 by target. The parser has ensured that this is not a
5007 variable-sized type. */
5008 fixed_size_mode mode
5009 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5010 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5011 offset = (offset + align - 1) & ~(align - 1);
5012 tree off = build_int_cst (sizetype, offset);
5013 offset += GET_MODE_SIZE (mode);
5015 if (!init_code)
5017 init_code = build_int_cst (integer_type_node,
5018 IFN_GOACC_REDUCTION_INIT);
5019 fini_code = build_int_cst (integer_type_node,
5020 IFN_GOACC_REDUCTION_FINI);
5021 setup_code = build_int_cst (integer_type_node,
5022 IFN_GOACC_REDUCTION_SETUP);
5023 teardown_code = build_int_cst (integer_type_node,
5024 IFN_GOACC_REDUCTION_TEARDOWN);
5027 tree setup_call
5028 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5029 TREE_TYPE (var), 6, setup_code,
5030 unshare_expr (ref_to_res),
5031 incoming, level, op, off);
5032 tree init_call
5033 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5034 TREE_TYPE (var), 6, init_code,
5035 unshare_expr (ref_to_res),
5036 v1, level, op, off);
5037 tree fini_call
5038 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5039 TREE_TYPE (var), 6, fini_code,
5040 unshare_expr (ref_to_res),
5041 v2, level, op, off);
5042 tree teardown_call
5043 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5044 TREE_TYPE (var), 6, teardown_code,
5045 ref_to_res, v3, level, op, off);
5047 gimplify_assign (v1, setup_call, &before_fork);
5048 gimplify_assign (v2, init_call, &after_fork);
5049 gimplify_assign (v3, fini_call, &before_join);
5050 gimplify_assign (outgoing, teardown_call, &after_join);
5053 /* Now stitch things together. */
5054 gimple_seq_add_seq (fork_seq, before_fork);
5055 if (fork)
5056 gimple_seq_add_stmt (fork_seq, fork);
5057 gimple_seq_add_seq (fork_seq, after_fork);
5059 gimple_seq_add_seq (join_seq, before_join);
5060 if (join)
5061 gimple_seq_add_stmt (join_seq, join);
5062 gimple_seq_add_seq (join_seq, after_join);
5065 /* Generate code to implement the REDUCTION clauses. */
5067 static void
5068 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5070 gimple_seq sub_seq = NULL;
5071 gimple *stmt;
5072 tree x, c;
5073 int count = 0;
5075 /* OpenACC loop reductions are handled elsewhere. */
5076 if (is_gimple_omp_oacc (ctx->stmt))
5077 return;
5079 /* SIMD reductions are handled in lower_rec_input_clauses. */
5080 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5081 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5082 return;
5084 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5085 update in that case, otherwise use a lock. */
5086 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5087 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5089 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5090 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5092 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5093 count = -1;
5094 break;
5096 count++;
5099 if (count == 0)
5100 return;
5102 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5104 tree var, ref, new_var, orig_var;
5105 enum tree_code code;
5106 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5108 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5109 continue;
5111 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5112 orig_var = var = OMP_CLAUSE_DECL (c);
5113 if (TREE_CODE (var) == MEM_REF)
5115 var = TREE_OPERAND (var, 0);
5116 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5117 var = TREE_OPERAND (var, 0);
5118 if (TREE_CODE (var) == ADDR_EXPR)
5119 var = TREE_OPERAND (var, 0);
5120 else
5122 /* If this is a pointer or referenced based array
5123 section, the var could be private in the outer
5124 context e.g. on orphaned loop construct. Pretend this
5125 is private variable's outer reference. */
5126 ccode = OMP_CLAUSE_PRIVATE;
5127 if (TREE_CODE (var) == INDIRECT_REF)
5128 var = TREE_OPERAND (var, 0);
5130 orig_var = var;
5131 if (is_variable_sized (var))
5133 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5134 var = DECL_VALUE_EXPR (var);
5135 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5136 var = TREE_OPERAND (var, 0);
5137 gcc_assert (DECL_P (var));
5140 new_var = lookup_decl (var, ctx);
5141 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5142 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5143 ref = build_outer_var_ref (var, ctx, ccode);
5144 code = OMP_CLAUSE_REDUCTION_CODE (c);
5146 /* reduction(-:var) sums up the partial results, so it acts
5147 identically to reduction(+:var). */
5148 if (code == MINUS_EXPR)
5149 code = PLUS_EXPR;
5151 if (count == 1)
5153 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5155 addr = save_expr (addr);
5156 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5157 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5158 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5159 gimplify_and_add (x, stmt_seqp);
5160 return;
5162 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5164 tree d = OMP_CLAUSE_DECL (c);
5165 tree type = TREE_TYPE (d);
5166 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5167 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5168 tree ptype = build_pointer_type (TREE_TYPE (type));
5169 tree bias = TREE_OPERAND (d, 1);
5170 d = TREE_OPERAND (d, 0);
5171 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5173 tree b = TREE_OPERAND (d, 1);
5174 b = maybe_lookup_decl (b, ctx);
5175 if (b == NULL)
5177 b = TREE_OPERAND (d, 1);
5178 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5180 if (integer_zerop (bias))
5181 bias = b;
5182 else
5184 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5185 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5186 TREE_TYPE (b), b, bias);
5188 d = TREE_OPERAND (d, 0);
5190 /* For ref build_outer_var_ref already performs this, so
5191 only new_var needs a dereference. */
5192 if (TREE_CODE (d) == INDIRECT_REF)
5194 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5195 gcc_assert (omp_is_reference (var) && var == orig_var);
5197 else if (TREE_CODE (d) == ADDR_EXPR)
5199 if (orig_var == var)
5201 new_var = build_fold_addr_expr (new_var);
5202 ref = build_fold_addr_expr (ref);
5205 else
5207 gcc_assert (orig_var == var);
5208 if (omp_is_reference (var))
5209 ref = build_fold_addr_expr (ref);
5211 if (DECL_P (v))
5213 tree t = maybe_lookup_decl (v, ctx);
5214 if (t)
5215 v = t;
5216 else
5217 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5218 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5220 if (!integer_zerop (bias))
5222 bias = fold_convert_loc (clause_loc, sizetype, bias);
5223 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5224 TREE_TYPE (new_var), new_var,
5225 unshare_expr (bias));
5226 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5227 TREE_TYPE (ref), ref, bias);
5229 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5230 ref = fold_convert_loc (clause_loc, ptype, ref);
5231 tree m = create_tmp_var (ptype, NULL);
5232 gimplify_assign (m, new_var, stmt_seqp);
5233 new_var = m;
5234 m = create_tmp_var (ptype, NULL);
5235 gimplify_assign (m, ref, stmt_seqp);
5236 ref = m;
5237 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5238 tree body = create_artificial_label (UNKNOWN_LOCATION);
5239 tree end = create_artificial_label (UNKNOWN_LOCATION);
5240 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5241 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5242 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5243 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5245 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5246 tree decl_placeholder
5247 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5248 SET_DECL_VALUE_EXPR (placeholder, out);
5249 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5250 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5251 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5252 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5253 gimple_seq_add_seq (&sub_seq,
5254 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5255 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5256 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5257 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5259 else
5261 x = build2 (code, TREE_TYPE (out), out, priv);
5262 out = unshare_expr (out);
5263 gimplify_assign (out, x, &sub_seq);
5265 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5266 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5267 gimple_seq_add_stmt (&sub_seq, g);
5268 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5269 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5270 gimple_seq_add_stmt (&sub_seq, g);
5271 g = gimple_build_assign (i, PLUS_EXPR, i,
5272 build_int_cst (TREE_TYPE (i), 1));
5273 gimple_seq_add_stmt (&sub_seq, g);
5274 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5275 gimple_seq_add_stmt (&sub_seq, g);
5276 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5278 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5280 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5282 if (omp_is_reference (var)
5283 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5284 TREE_TYPE (ref)))
5285 ref = build_fold_addr_expr_loc (clause_loc, ref);
5286 SET_DECL_VALUE_EXPR (placeholder, ref);
5287 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5288 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5289 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5290 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5291 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5293 else
5295 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5296 ref = build_outer_var_ref (var, ctx);
5297 gimplify_assign (ref, x, &sub_seq);
5301 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5303 gimple_seq_add_stmt (stmt_seqp, stmt);
5305 gimple_seq_add_seq (stmt_seqp, sub_seq);
5307 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5309 gimple_seq_add_stmt (stmt_seqp, stmt);
5313 /* Generate code to implement the COPYPRIVATE clauses. */
5315 static void
5316 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5317 omp_context *ctx)
5319 tree c;
5321 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5323 tree var, new_var, ref, x;
5324 bool by_ref;
5325 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5327 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5328 continue;
5330 var = OMP_CLAUSE_DECL (c);
5331 by_ref = use_pointer_for_field (var, NULL);
5333 ref = build_sender_ref (var, ctx);
5334 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5335 if (by_ref)
5337 x = build_fold_addr_expr_loc (clause_loc, new_var);
5338 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5340 gimplify_assign (ref, x, slist);
5342 ref = build_receiver_ref (var, false, ctx);
5343 if (by_ref)
5345 ref = fold_convert_loc (clause_loc,
5346 build_pointer_type (TREE_TYPE (new_var)),
5347 ref);
5348 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5350 if (omp_is_reference (var))
5352 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5353 ref = build_simple_mem_ref_loc (clause_loc, ref);
5354 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5356 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5357 gimplify_and_add (x, rlist);
5362 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5363 and REDUCTION from the sender (aka parent) side. */
5365 static void
5366 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5367 omp_context *ctx)
5369 tree c, t;
5370 int ignored_looptemp = 0;
5371 bool is_taskloop = false;
5373 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5374 by GOMP_taskloop. */
5375 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5377 ignored_looptemp = 2;
5378 is_taskloop = true;
5381 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5383 tree val, ref, x, var;
5384 bool by_ref, do_in = false, do_out = false;
5385 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5387 switch (OMP_CLAUSE_CODE (c))
5389 case OMP_CLAUSE_PRIVATE:
5390 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5391 break;
5392 continue;
5393 case OMP_CLAUSE_FIRSTPRIVATE:
5394 case OMP_CLAUSE_COPYIN:
5395 case OMP_CLAUSE_LASTPRIVATE:
5396 case OMP_CLAUSE_REDUCTION:
5397 break;
5398 case OMP_CLAUSE_SHARED:
5399 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5400 break;
5401 continue;
5402 case OMP_CLAUSE__LOOPTEMP_:
5403 if (ignored_looptemp)
5405 ignored_looptemp--;
5406 continue;
5408 break;
5409 default:
5410 continue;
5413 val = OMP_CLAUSE_DECL (c);
5414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5415 && TREE_CODE (val) == MEM_REF)
5417 val = TREE_OPERAND (val, 0);
5418 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5419 val = TREE_OPERAND (val, 0);
5420 if (TREE_CODE (val) == INDIRECT_REF
5421 || TREE_CODE (val) == ADDR_EXPR)
5422 val = TREE_OPERAND (val, 0);
5423 if (is_variable_sized (val))
5424 continue;
5427 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5428 outer taskloop region. */
5429 omp_context *ctx_for_o = ctx;
5430 if (is_taskloop
5431 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5432 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5433 ctx_for_o = ctx->outer;
5435 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5437 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5438 && is_global_var (var))
5439 continue;
5441 t = omp_member_access_dummy_var (var);
5442 if (t)
5444 var = DECL_VALUE_EXPR (var);
5445 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5446 if (o != t)
5447 var = unshare_and_remap (var, t, o);
5448 else
5449 var = unshare_expr (var);
5452 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5454 /* Handle taskloop firstprivate/lastprivate, where the
5455 lastprivate on GIMPLE_OMP_TASK is represented as
5456 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5457 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5458 x = omp_build_component_ref (ctx->sender_decl, f);
5459 if (use_pointer_for_field (val, ctx))
5460 var = build_fold_addr_expr (var);
5461 gimplify_assign (x, var, ilist);
5462 DECL_ABSTRACT_ORIGIN (f) = NULL;
5463 continue;
5466 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5467 || val == OMP_CLAUSE_DECL (c))
5468 && is_variable_sized (val))
5469 continue;
5470 by_ref = use_pointer_for_field (val, NULL);
5472 switch (OMP_CLAUSE_CODE (c))
5474 case OMP_CLAUSE_FIRSTPRIVATE:
5475 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5476 && !by_ref
5477 && is_task_ctx (ctx))
5478 TREE_NO_WARNING (var) = 1;
5479 do_in = true;
5480 break;
5482 case OMP_CLAUSE_PRIVATE:
5483 case OMP_CLAUSE_COPYIN:
5484 case OMP_CLAUSE__LOOPTEMP_:
5485 do_in = true;
5486 break;
5488 case OMP_CLAUSE_LASTPRIVATE:
5489 if (by_ref || omp_is_reference (val))
5491 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5492 continue;
5493 do_in = true;
5495 else
5497 do_out = true;
5498 if (lang_hooks.decls.omp_private_outer_ref (val))
5499 do_in = true;
5501 break;
5503 case OMP_CLAUSE_REDUCTION:
5504 do_in = true;
5505 if (val == OMP_CLAUSE_DECL (c))
5506 do_out = !(by_ref || omp_is_reference (val));
5507 else
5508 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5509 break;
5511 default:
5512 gcc_unreachable ();
5515 if (do_in)
5517 ref = build_sender_ref (val, ctx);
5518 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5519 gimplify_assign (ref, x, ilist);
5520 if (is_task_ctx (ctx))
5521 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5524 if (do_out)
5526 ref = build_sender_ref (val, ctx);
5527 gimplify_assign (var, ref, olist);
5532 /* Generate code to implement SHARED from the sender (aka parent)
5533 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5534 list things that got automatically shared. */
5536 static void
5537 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5539 tree var, ovar, nvar, t, f, x, record_type;
5541 if (ctx->record_type == NULL)
5542 return;
5544 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5545 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5547 ovar = DECL_ABSTRACT_ORIGIN (f);
5548 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5549 continue;
5551 nvar = maybe_lookup_decl (ovar, ctx);
5552 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5553 continue;
5555 /* If CTX is a nested parallel directive. Find the immediately
5556 enclosing parallel or workshare construct that contains a
5557 mapping for OVAR. */
5558 var = lookup_decl_in_outer_ctx (ovar, ctx);
5560 t = omp_member_access_dummy_var (var);
5561 if (t)
5563 var = DECL_VALUE_EXPR (var);
5564 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5565 if (o != t)
5566 var = unshare_and_remap (var, t, o);
5567 else
5568 var = unshare_expr (var);
5571 if (use_pointer_for_field (ovar, ctx))
5573 x = build_sender_ref (ovar, ctx);
5574 var = build_fold_addr_expr (var);
5575 gimplify_assign (x, var, ilist);
5577 else
5579 x = build_sender_ref (ovar, ctx);
5580 gimplify_assign (x, var, ilist);
5582 if (!TREE_READONLY (var)
5583 /* We don't need to receive a new reference to a result
5584 or parm decl. In fact we may not store to it as we will
5585 invalidate any pending RSO and generate wrong gimple
5586 during inlining. */
5587 && !((TREE_CODE (var) == RESULT_DECL
5588 || TREE_CODE (var) == PARM_DECL)
5589 && DECL_BY_REFERENCE (var)))
5591 x = build_sender_ref (ovar, ctx);
5592 gimplify_assign (var, x, olist);
5598 /* Emit an OpenACC head marker call, encapulating the partitioning and
5599 other information that must be processed by the target compiler.
5600 Return the maximum number of dimensions the associated loop might
5601 be partitioned over. */
5603 static unsigned
5604 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5605 gimple_seq *seq, omp_context *ctx)
5607 unsigned levels = 0;
5608 unsigned tag = 0;
5609 tree gang_static = NULL_TREE;
5610 auto_vec<tree, 5> args;
5612 args.quick_push (build_int_cst
5613 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5614 args.quick_push (ddvar);
5615 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5617 switch (OMP_CLAUSE_CODE (c))
5619 case OMP_CLAUSE_GANG:
5620 tag |= OLF_DIM_GANG;
5621 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5622 /* static:* is represented by -1, and we can ignore it, as
5623 scheduling is always static. */
5624 if (gang_static && integer_minus_onep (gang_static))
5625 gang_static = NULL_TREE;
5626 levels++;
5627 break;
5629 case OMP_CLAUSE_WORKER:
5630 tag |= OLF_DIM_WORKER;
5631 levels++;
5632 break;
5634 case OMP_CLAUSE_VECTOR:
5635 tag |= OLF_DIM_VECTOR;
5636 levels++;
5637 break;
5639 case OMP_CLAUSE_SEQ:
5640 tag |= OLF_SEQ;
5641 break;
5643 case OMP_CLAUSE_AUTO:
5644 tag |= OLF_AUTO;
5645 break;
5647 case OMP_CLAUSE_INDEPENDENT:
5648 tag |= OLF_INDEPENDENT;
5649 break;
5651 case OMP_CLAUSE_TILE:
5652 tag |= OLF_TILE;
5653 break;
5655 default:
5656 continue;
5660 if (gang_static)
5662 if (DECL_P (gang_static))
5663 gang_static = build_outer_var_ref (gang_static, ctx);
5664 tag |= OLF_GANG_STATIC;
5667 /* In a parallel region, loops are implicitly INDEPENDENT. */
5668 omp_context *tgt = enclosing_target_ctx (ctx);
5669 if (!tgt || is_oacc_parallel (tgt))
5670 tag |= OLF_INDEPENDENT;
5672 if (tag & OLF_TILE)
5673 /* Tiling could use all 3 levels. */
5674 levels = 3;
5675 else
5677 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5678 Ensure at least one level, or 2 for possible auto
5679 partitioning */
5680 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5681 << OLF_DIM_BASE) | OLF_SEQ));
5683 if (levels < 1u + maybe_auto)
5684 levels = 1u + maybe_auto;
5687 args.quick_push (build_int_cst (integer_type_node, levels));
5688 args.quick_push (build_int_cst (integer_type_node, tag));
5689 if (gang_static)
5690 args.quick_push (gang_static);
5692 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5693 gimple_set_location (call, loc);
5694 gimple_set_lhs (call, ddvar);
5695 gimple_seq_add_stmt (seq, call);
5697 return levels;
5700 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5701 partitioning level of the enclosed region. */
5703 static void
5704 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5705 tree tofollow, gimple_seq *seq)
5707 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5708 : IFN_UNIQUE_OACC_TAIL_MARK);
5709 tree marker = build_int_cst (integer_type_node, marker_kind);
5710 int nargs = 2 + (tofollow != NULL_TREE);
5711 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5712 marker, ddvar, tofollow);
5713 gimple_set_location (call, loc);
5714 gimple_set_lhs (call, ddvar);
5715 gimple_seq_add_stmt (seq, call);
5718 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5719 the loop clauses, from which we extract reductions. Initialize
5720 HEAD and TAIL. */
5722 static void
5723 lower_oacc_head_tail (location_t loc, tree clauses,
5724 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5726 bool inner = false;
5727 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5728 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5730 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5731 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5732 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5734 gcc_assert (count);
5735 for (unsigned done = 1; count; count--, done++)
5737 gimple_seq fork_seq = NULL;
5738 gimple_seq join_seq = NULL;
5740 tree place = build_int_cst (integer_type_node, -1);
5741 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5742 fork_kind, ddvar, place);
5743 gimple_set_location (fork, loc);
5744 gimple_set_lhs (fork, ddvar);
5746 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5747 join_kind, ddvar, place);
5748 gimple_set_location (join, loc);
5749 gimple_set_lhs (join, ddvar);
5751 /* Mark the beginning of this level sequence. */
5752 if (inner)
5753 lower_oacc_loop_marker (loc, ddvar, true,
5754 build_int_cst (integer_type_node, count),
5755 &fork_seq);
5756 lower_oacc_loop_marker (loc, ddvar, false,
5757 build_int_cst (integer_type_node, done),
5758 &join_seq);
5760 lower_oacc_reductions (loc, clauses, place, inner,
5761 fork, join, &fork_seq, &join_seq, ctx);
5763 /* Append this level to head. */
5764 gimple_seq_add_seq (head, fork_seq);
5765 /* Prepend it to tail. */
5766 gimple_seq_add_seq (&join_seq, *tail);
5767 *tail = join_seq;
5769 inner = true;
5772 /* Mark the end of the sequence. */
5773 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5774 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5777 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5778 catch handler and return it. This prevents programs from violating the
5779 structured block semantics with throws. */
5781 static gimple_seq
5782 maybe_catch_exception (gimple_seq body)
5784 gimple *g;
5785 tree decl;
5787 if (!flag_exceptions)
5788 return body;
5790 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5791 decl = lang_hooks.eh_protect_cleanup_actions ();
5792 else
5793 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5795 g = gimple_build_eh_must_not_throw (decl);
5796 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5797 GIMPLE_TRY_CATCH);
5799 return gimple_seq_alloc_with_stmt (g);
5803 /* Routines to lower OMP directives into OMP-GIMPLE. */
5805 /* If ctx is a worksharing context inside of a cancellable parallel
5806 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5807 and conditional branch to parallel's cancel_label to handle
5808 cancellation in the implicit barrier. */
5810 static void
5811 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5813 gimple *omp_return = gimple_seq_last_stmt (*body);
5814 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5815 if (gimple_omp_return_nowait_p (omp_return))
5816 return;
5817 if (ctx->outer
5818 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5819 && ctx->outer->cancellable)
5821 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5822 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5823 tree lhs = create_tmp_var (c_bool_type);
5824 gimple_omp_return_set_lhs (omp_return, lhs);
5825 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5826 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5827 fold_convert (c_bool_type,
5828 boolean_false_node),
5829 ctx->outer->cancel_label, fallthru_label);
5830 gimple_seq_add_stmt (body, g);
5831 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5835 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5836 CTX is the enclosing OMP context for the current statement. */
5838 static void
5839 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5841 tree block, control;
5842 gimple_stmt_iterator tgsi;
5843 gomp_sections *stmt;
5844 gimple *t;
5845 gbind *new_stmt, *bind;
5846 gimple_seq ilist, dlist, olist, new_body;
5848 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5850 push_gimplify_context ();
5852 dlist = NULL;
5853 ilist = NULL;
5854 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5855 &ilist, &dlist, ctx, NULL);
5857 new_body = gimple_omp_body (stmt);
5858 gimple_omp_set_body (stmt, NULL);
5859 tgsi = gsi_start (new_body);
5860 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5862 omp_context *sctx;
5863 gimple *sec_start;
5865 sec_start = gsi_stmt (tgsi);
5866 sctx = maybe_lookup_ctx (sec_start);
5867 gcc_assert (sctx);
5869 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5870 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5871 GSI_CONTINUE_LINKING);
5872 gimple_omp_set_body (sec_start, NULL);
5874 if (gsi_one_before_end_p (tgsi))
5876 gimple_seq l = NULL;
5877 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5878 &l, ctx);
5879 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5880 gimple_omp_section_set_last (sec_start);
5883 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5884 GSI_CONTINUE_LINKING);
5887 block = make_node (BLOCK);
5888 bind = gimple_build_bind (NULL, new_body, block);
5890 olist = NULL;
5891 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5893 block = make_node (BLOCK);
5894 new_stmt = gimple_build_bind (NULL, NULL, block);
5895 gsi_replace (gsi_p, new_stmt, true);
5897 pop_gimplify_context (new_stmt);
5898 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5899 BLOCK_VARS (block) = gimple_bind_vars (bind);
5900 if (BLOCK_VARS (block))
5901 TREE_USED (block) = 1;
5903 new_body = NULL;
5904 gimple_seq_add_seq (&new_body, ilist);
5905 gimple_seq_add_stmt (&new_body, stmt);
5906 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5907 gimple_seq_add_stmt (&new_body, bind);
5909 control = create_tmp_var (unsigned_type_node, ".section");
5910 t = gimple_build_omp_continue (control, control);
5911 gimple_omp_sections_set_control (stmt, control);
5912 gimple_seq_add_stmt (&new_body, t);
5914 gimple_seq_add_seq (&new_body, olist);
5915 if (ctx->cancellable)
5916 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5917 gimple_seq_add_seq (&new_body, dlist);
5919 new_body = maybe_catch_exception (new_body);
5921 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5922 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5923 t = gimple_build_omp_return (nowait);
5924 gimple_seq_add_stmt (&new_body, t);
5925 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5927 gimple_bind_set_body (new_stmt, new_body);
5931 /* A subroutine of lower_omp_single. Expand the simple form of
5932 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5934 if (GOMP_single_start ())
5935 BODY;
5936 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5938 FIXME. It may be better to delay expanding the logic of this until
5939 pass_expand_omp. The expanded logic may make the job more difficult
5940 to a synchronization analysis pass. */
5942 static void
5943 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5945 location_t loc = gimple_location (single_stmt);
5946 tree tlabel = create_artificial_label (loc);
5947 tree flabel = create_artificial_label (loc);
5948 gimple *call, *cond;
5949 tree lhs, decl;
5951 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5952 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5953 call = gimple_build_call (decl, 0);
5954 gimple_call_set_lhs (call, lhs);
5955 gimple_seq_add_stmt (pre_p, call);
5957 cond = gimple_build_cond (EQ_EXPR, lhs,
5958 fold_convert_loc (loc, TREE_TYPE (lhs),
5959 boolean_true_node),
5960 tlabel, flabel);
5961 gimple_seq_add_stmt (pre_p, cond);
5962 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5963 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5964 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5968 /* A subroutine of lower_omp_single. Expand the simple form of
5969 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5971 #pragma omp single copyprivate (a, b, c)
5973 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5976 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5978 BODY;
5979 copyout.a = a;
5980 copyout.b = b;
5981 copyout.c = c;
5982 GOMP_single_copy_end (&copyout);
5984 else
5986 a = copyout_p->a;
5987 b = copyout_p->b;
5988 c = copyout_p->c;
5990 GOMP_barrier ();
5993 FIXME. It may be better to delay expanding the logic of this until
5994 pass_expand_omp. The expanded logic may make the job more difficult
5995 to a synchronization analysis pass. */
5997 static void
5998 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5999 omp_context *ctx)
6001 tree ptr_type, t, l0, l1, l2, bfn_decl;
6002 gimple_seq copyin_seq;
6003 location_t loc = gimple_location (single_stmt);
6005 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6007 ptr_type = build_pointer_type (ctx->record_type);
6008 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6010 l0 = create_artificial_label (loc);
6011 l1 = create_artificial_label (loc);
6012 l2 = create_artificial_label (loc);
6014 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6015 t = build_call_expr_loc (loc, bfn_decl, 0);
6016 t = fold_convert_loc (loc, ptr_type, t);
6017 gimplify_assign (ctx->receiver_decl, t, pre_p);
6019 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6020 build_int_cst (ptr_type, 0));
6021 t = build3 (COND_EXPR, void_type_node, t,
6022 build_and_jump (&l0), build_and_jump (&l1));
6023 gimplify_and_add (t, pre_p);
6025 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6027 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6029 copyin_seq = NULL;
6030 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6031 &copyin_seq, ctx);
6033 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6034 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6035 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6036 gimplify_and_add (t, pre_p);
6038 t = build_and_jump (&l2);
6039 gimplify_and_add (t, pre_p);
6041 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6043 gimple_seq_add_seq (pre_p, copyin_seq);
6045 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6049 /* Expand code for an OpenMP single directive. */
6051 static void
6052 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6054 tree block;
6055 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6056 gbind *bind;
6057 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6059 push_gimplify_context ();
6061 block = make_node (BLOCK);
6062 bind = gimple_build_bind (NULL, NULL, block);
6063 gsi_replace (gsi_p, bind, true);
6064 bind_body = NULL;
6065 dlist = NULL;
6066 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6067 &bind_body, &dlist, ctx, NULL);
6068 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6070 gimple_seq_add_stmt (&bind_body, single_stmt);
6072 if (ctx->record_type)
6073 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6074 else
6075 lower_omp_single_simple (single_stmt, &bind_body);
6077 gimple_omp_set_body (single_stmt, NULL);
6079 gimple_seq_add_seq (&bind_body, dlist);
6081 bind_body = maybe_catch_exception (bind_body);
6083 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6084 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6085 gimple *g = gimple_build_omp_return (nowait);
6086 gimple_seq_add_stmt (&bind_body_tail, g);
6087 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6088 if (ctx->record_type)
6090 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6091 tree clobber = build_constructor (ctx->record_type, NULL);
6092 TREE_THIS_VOLATILE (clobber) = 1;
6093 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6094 clobber), GSI_SAME_STMT);
6096 gimple_seq_add_seq (&bind_body, bind_body_tail);
6097 gimple_bind_set_body (bind, bind_body);
6099 pop_gimplify_context (bind);
6101 gimple_bind_append_vars (bind, ctx->block_vars);
6102 BLOCK_VARS (block) = ctx->block_vars;
6103 if (BLOCK_VARS (block))
6104 TREE_USED (block) = 1;
6108 /* Expand code for an OpenMP master directive. */
6110 static void
6111 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6113 tree block, lab = NULL, x, bfn_decl;
6114 gimple *stmt = gsi_stmt (*gsi_p);
6115 gbind *bind;
6116 location_t loc = gimple_location (stmt);
6117 gimple_seq tseq;
6119 push_gimplify_context ();
6121 block = make_node (BLOCK);
6122 bind = gimple_build_bind (NULL, NULL, block);
6123 gsi_replace (gsi_p, bind, true);
6124 gimple_bind_add_stmt (bind, stmt);
6126 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6127 x = build_call_expr_loc (loc, bfn_decl, 0);
6128 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6129 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6130 tseq = NULL;
6131 gimplify_and_add (x, &tseq);
6132 gimple_bind_add_seq (bind, tseq);
6134 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6135 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6136 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6137 gimple_omp_set_body (stmt, NULL);
6139 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6141 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6143 pop_gimplify_context (bind);
6145 gimple_bind_append_vars (bind, ctx->block_vars);
6146 BLOCK_VARS (block) = ctx->block_vars;
6150 /* Expand code for an OpenMP taskgroup directive. */
6152 static void
6153 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6155 gimple *stmt = gsi_stmt (*gsi_p);
6156 gcall *x;
6157 gbind *bind;
6158 tree block = make_node (BLOCK);
6160 bind = gimple_build_bind (NULL, NULL, block);
6161 gsi_replace (gsi_p, bind, true);
6162 gimple_bind_add_stmt (bind, stmt);
6164 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6166 gimple_bind_add_stmt (bind, x);
6168 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6169 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6170 gimple_omp_set_body (stmt, NULL);
6172 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6174 gimple_bind_append_vars (bind, ctx->block_vars);
6175 BLOCK_VARS (block) = ctx->block_vars;
6179 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6181 static void
6182 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6183 omp_context *ctx)
6185 struct omp_for_data fd;
6186 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6187 return;
6189 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6190 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6191 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6192 if (!fd.ordered)
6193 return;
6195 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6196 tree c = gimple_omp_ordered_clauses (ord_stmt);
6197 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6198 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6200 /* Merge depend clauses from multiple adjacent
6201 #pragma omp ordered depend(sink:...) constructs
6202 into one #pragma omp ordered depend(sink:...), so that
6203 we can optimize them together. */
6204 gimple_stmt_iterator gsi = *gsi_p;
6205 gsi_next (&gsi);
6206 while (!gsi_end_p (gsi))
6208 gimple *stmt = gsi_stmt (gsi);
6209 if (is_gimple_debug (stmt)
6210 || gimple_code (stmt) == GIMPLE_NOP)
6212 gsi_next (&gsi);
6213 continue;
6215 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6216 break;
6217 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6218 c = gimple_omp_ordered_clauses (ord_stmt2);
6219 if (c == NULL_TREE
6220 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6221 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6222 break;
6223 while (*list_p)
6224 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6225 *list_p = c;
6226 gsi_remove (&gsi, true);
6230 /* Canonicalize sink dependence clauses into one folded clause if
6231 possible.
6233 The basic algorithm is to create a sink vector whose first
6234 element is the GCD of all the first elements, and whose remaining
6235 elements are the minimum of the subsequent columns.
6237 We ignore dependence vectors whose first element is zero because
6238 such dependencies are known to be executed by the same thread.
6240 We take into account the direction of the loop, so a minimum
6241 becomes a maximum if the loop is iterating forwards. We also
6242 ignore sink clauses where the loop direction is unknown, or where
6243 the offsets are clearly invalid because they are not a multiple
6244 of the loop increment.
6246 For example:
6248 #pragma omp for ordered(2)
6249 for (i=0; i < N; ++i)
6250 for (j=0; j < M; ++j)
6252 #pragma omp ordered \
6253 depend(sink:i-8,j-2) \
6254 depend(sink:i,j-1) \ // Completely ignored because i+0.
6255 depend(sink:i-4,j-3) \
6256 depend(sink:i-6,j-4)
6257 #pragma omp ordered depend(source)
6260 Folded clause is:
6262 depend(sink:-gcd(8,4,6),-min(2,3,4))
6263 -or-
6264 depend(sink:-2,-2)
6267 /* FIXME: Computing GCD's where the first element is zero is
6268 non-trivial in the presence of collapsed loops. Do this later. */
6269 if (fd.collapse > 1)
6270 return;
6272 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6274 /* wide_int is not a POD so it must be default-constructed. */
6275 for (unsigned i = 0; i != 2 * len - 1; ++i)
6276 new (static_cast<void*>(folded_deps + i)) wide_int ();
6278 tree folded_dep = NULL_TREE;
6279 /* TRUE if the first dimension's offset is negative. */
6280 bool neg_offset_p = false;
6282 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6283 unsigned int i;
6284 while ((c = *list_p) != NULL)
6286 bool remove = false;
6288 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6289 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6290 goto next_ordered_clause;
6292 tree vec;
6293 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6294 vec && TREE_CODE (vec) == TREE_LIST;
6295 vec = TREE_CHAIN (vec), ++i)
6297 gcc_assert (i < len);
6299 /* omp_extract_for_data has canonicalized the condition. */
6300 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6301 || fd.loops[i].cond_code == GT_EXPR);
6302 bool forward = fd.loops[i].cond_code == LT_EXPR;
6303 bool maybe_lexically_later = true;
6305 /* While the committee makes up its mind, bail if we have any
6306 non-constant steps. */
6307 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6308 goto lower_omp_ordered_ret;
6310 tree itype = TREE_TYPE (TREE_VALUE (vec));
6311 if (POINTER_TYPE_P (itype))
6312 itype = sizetype;
6313 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6314 TYPE_PRECISION (itype),
6315 TYPE_SIGN (itype));
6317 /* Ignore invalid offsets that are not multiples of the step. */
6318 if (!wi::multiple_of_p (wi::abs (offset),
6319 wi::abs (wi::to_wide (fd.loops[i].step)),
6320 UNSIGNED))
6322 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6323 "ignoring sink clause with offset that is not "
6324 "a multiple of the loop step");
6325 remove = true;
6326 goto next_ordered_clause;
6329 /* Calculate the first dimension. The first dimension of
6330 the folded dependency vector is the GCD of the first
6331 elements, while ignoring any first elements whose offset
6332 is 0. */
6333 if (i == 0)
6335 /* Ignore dependence vectors whose first dimension is 0. */
6336 if (offset == 0)
6338 remove = true;
6339 goto next_ordered_clause;
6341 else
6343 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6345 error_at (OMP_CLAUSE_LOCATION (c),
6346 "first offset must be in opposite direction "
6347 "of loop iterations");
6348 goto lower_omp_ordered_ret;
6350 if (forward)
6351 offset = -offset;
6352 neg_offset_p = forward;
6353 /* Initialize the first time around. */
6354 if (folded_dep == NULL_TREE)
6356 folded_dep = c;
6357 folded_deps[0] = offset;
6359 else
6360 folded_deps[0] = wi::gcd (folded_deps[0],
6361 offset, UNSIGNED);
6364 /* Calculate minimum for the remaining dimensions. */
6365 else
6367 folded_deps[len + i - 1] = offset;
6368 if (folded_dep == c)
6369 folded_deps[i] = offset;
6370 else if (maybe_lexically_later
6371 && !wi::eq_p (folded_deps[i], offset))
6373 if (forward ^ wi::gts_p (folded_deps[i], offset))
6375 unsigned int j;
6376 folded_dep = c;
6377 for (j = 1; j <= i; j++)
6378 folded_deps[j] = folded_deps[len + j - 1];
6380 else
6381 maybe_lexically_later = false;
6385 gcc_assert (i == len);
6387 remove = true;
6389 next_ordered_clause:
6390 if (remove)
6391 *list_p = OMP_CLAUSE_CHAIN (c);
6392 else
6393 list_p = &OMP_CLAUSE_CHAIN (c);
6396 if (folded_dep)
6398 if (neg_offset_p)
6399 folded_deps[0] = -folded_deps[0];
6401 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6402 if (POINTER_TYPE_P (itype))
6403 itype = sizetype;
6405 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6406 = wide_int_to_tree (itype, folded_deps[0]);
6407 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6408 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6411 lower_omp_ordered_ret:
6413 /* Ordered without clauses is #pragma omp threads, while we want
6414 a nop instead if we remove all clauses. */
6415 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6416 gsi_replace (gsi_p, gimple_build_nop (), true);
6420 /* Expand code for an OpenMP ordered directive. */
6422 static void
6423 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6425 tree block;
6426 gimple *stmt = gsi_stmt (*gsi_p), *g;
6427 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6428 gcall *x;
6429 gbind *bind;
6430 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6431 OMP_CLAUSE_SIMD);
6432 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6433 loop. */
6434 bool maybe_simt
6435 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6436 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6437 OMP_CLAUSE_THREADS);
6439 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6440 OMP_CLAUSE_DEPEND))
6442 /* FIXME: This is needs to be moved to the expansion to verify various
6443 conditions only testable on cfg with dominators computed, and also
6444 all the depend clauses to be merged still might need to be available
6445 for the runtime checks. */
6446 if (0)
6447 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6448 return;
6451 push_gimplify_context ();
6453 block = make_node (BLOCK);
6454 bind = gimple_build_bind (NULL, NULL, block);
6455 gsi_replace (gsi_p, bind, true);
6456 gimple_bind_add_stmt (bind, stmt);
6458 if (simd)
6460 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6461 build_int_cst (NULL_TREE, threads));
6462 cfun->has_simduid_loops = true;
6464 else
6465 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6467 gimple_bind_add_stmt (bind, x);
6469 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6470 if (maybe_simt)
6472 counter = create_tmp_var (integer_type_node);
6473 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6474 gimple_call_set_lhs (g, counter);
6475 gimple_bind_add_stmt (bind, g);
6477 body = create_artificial_label (UNKNOWN_LOCATION);
6478 test = create_artificial_label (UNKNOWN_LOCATION);
6479 gimple_bind_add_stmt (bind, gimple_build_label (body));
6481 tree simt_pred = create_tmp_var (integer_type_node);
6482 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6483 gimple_call_set_lhs (g, simt_pred);
6484 gimple_bind_add_stmt (bind, g);
6486 tree t = create_artificial_label (UNKNOWN_LOCATION);
6487 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6488 gimple_bind_add_stmt (bind, g);
6490 gimple_bind_add_stmt (bind, gimple_build_label (t));
6492 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6493 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6494 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6495 gimple_omp_set_body (stmt, NULL);
6497 if (maybe_simt)
6499 gimple_bind_add_stmt (bind, gimple_build_label (test));
6500 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6501 gimple_bind_add_stmt (bind, g);
6503 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6504 tree nonneg = create_tmp_var (integer_type_node);
6505 gimple_seq tseq = NULL;
6506 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6507 gimple_bind_add_seq (bind, tseq);
6509 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6510 gimple_call_set_lhs (g, nonneg);
6511 gimple_bind_add_stmt (bind, g);
6513 tree end = create_artificial_label (UNKNOWN_LOCATION);
6514 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6515 gimple_bind_add_stmt (bind, g);
6517 gimple_bind_add_stmt (bind, gimple_build_label (end));
6519 if (simd)
6520 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6521 build_int_cst (NULL_TREE, threads));
6522 else
6523 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6525 gimple_bind_add_stmt (bind, x);
6527 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6529 pop_gimplify_context (bind);
6531 gimple_bind_append_vars (bind, ctx->block_vars);
6532 BLOCK_VARS (block) = gimple_bind_vars (bind);
6536 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6537 substitution of a couple of function calls. But in the NAMED case,
6538 requires that languages coordinate a symbol name. It is therefore
6539 best put here in common code. */
6541 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6543 static void
6544 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6546 tree block;
6547 tree name, lock, unlock;
6548 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6549 gbind *bind;
6550 location_t loc = gimple_location (stmt);
6551 gimple_seq tbody;
6553 name = gimple_omp_critical_name (stmt);
6554 if (name)
6556 tree decl;
6558 if (!critical_name_mutexes)
6559 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6561 tree *n = critical_name_mutexes->get (name);
6562 if (n == NULL)
6564 char *new_str;
6566 decl = create_tmp_var_raw (ptr_type_node);
6568 new_str = ACONCAT ((".gomp_critical_user_",
6569 IDENTIFIER_POINTER (name), NULL));
6570 DECL_NAME (decl) = get_identifier (new_str);
6571 TREE_PUBLIC (decl) = 1;
6572 TREE_STATIC (decl) = 1;
6573 DECL_COMMON (decl) = 1;
6574 DECL_ARTIFICIAL (decl) = 1;
6575 DECL_IGNORED_P (decl) = 1;
6577 varpool_node::finalize_decl (decl);
6579 critical_name_mutexes->put (name, decl);
6581 else
6582 decl = *n;
6584 /* If '#pragma omp critical' is inside offloaded region or
6585 inside function marked as offloadable, the symbol must be
6586 marked as offloadable too. */
6587 omp_context *octx;
6588 if (cgraph_node::get (current_function_decl)->offloadable)
6589 varpool_node::get_create (decl)->offloadable = 1;
6590 else
6591 for (octx = ctx->outer; octx; octx = octx->outer)
6592 if (is_gimple_omp_offloaded (octx->stmt))
6594 varpool_node::get_create (decl)->offloadable = 1;
6595 break;
6598 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6599 lock = build_call_expr_loc (loc, lock, 1,
6600 build_fold_addr_expr_loc (loc, decl));
6602 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6603 unlock = build_call_expr_loc (loc, unlock, 1,
6604 build_fold_addr_expr_loc (loc, decl));
6606 else
6608 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6609 lock = build_call_expr_loc (loc, lock, 0);
6611 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6612 unlock = build_call_expr_loc (loc, unlock, 0);
6615 push_gimplify_context ();
6617 block = make_node (BLOCK);
6618 bind = gimple_build_bind (NULL, NULL, block);
6619 gsi_replace (gsi_p, bind, true);
6620 gimple_bind_add_stmt (bind, stmt);
6622 tbody = gimple_bind_body (bind);
6623 gimplify_and_add (lock, &tbody);
6624 gimple_bind_set_body (bind, tbody);
6626 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6627 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6628 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6629 gimple_omp_set_body (stmt, NULL);
6631 tbody = gimple_bind_body (bind);
6632 gimplify_and_add (unlock, &tbody);
6633 gimple_bind_set_body (bind, tbody);
6635 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6637 pop_gimplify_context (bind);
6638 gimple_bind_append_vars (bind, ctx->block_vars);
6639 BLOCK_VARS (block) = gimple_bind_vars (bind);
6642 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6643 for a lastprivate clause. Given a loop control predicate of (V
6644 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6645 is appended to *DLIST, iterator initialization is appended to
6646 *BODY_P. */
6648 static void
6649 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6650 gimple_seq *dlist, struct omp_context *ctx)
6652 tree clauses, cond, vinit;
6653 enum tree_code cond_code;
6654 gimple_seq stmts;
6656 cond_code = fd->loop.cond_code;
6657 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6659 /* When possible, use a strict equality expression. This can let VRP
6660 type optimizations deduce the value and remove a copy. */
6661 if (tree_fits_shwi_p (fd->loop.step))
6663 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6664 if (step == 1 || step == -1)
6665 cond_code = EQ_EXPR;
6668 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6669 || gimple_omp_for_grid_phony (fd->for_stmt))
6670 cond = omp_grid_lastprivate_predicate (fd);
6671 else
6673 tree n2 = fd->loop.n2;
6674 if (fd->collapse > 1
6675 && TREE_CODE (n2) != INTEGER_CST
6676 && gimple_omp_for_combined_into_p (fd->for_stmt))
6678 struct omp_context *taskreg_ctx = NULL;
6679 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6681 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6682 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6683 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6685 if (gimple_omp_for_combined_into_p (gfor))
6687 gcc_assert (ctx->outer->outer
6688 && is_parallel_ctx (ctx->outer->outer));
6689 taskreg_ctx = ctx->outer->outer;
6691 else
6693 struct omp_for_data outer_fd;
6694 omp_extract_for_data (gfor, &outer_fd, NULL);
6695 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6698 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6699 taskreg_ctx = ctx->outer->outer;
6701 else if (is_taskreg_ctx (ctx->outer))
6702 taskreg_ctx = ctx->outer;
6703 if (taskreg_ctx)
6705 int i;
6706 tree taskreg_clauses
6707 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6708 tree innerc = omp_find_clause (taskreg_clauses,
6709 OMP_CLAUSE__LOOPTEMP_);
6710 gcc_assert (innerc);
6711 for (i = 0; i < fd->collapse; i++)
6713 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6714 OMP_CLAUSE__LOOPTEMP_);
6715 gcc_assert (innerc);
6717 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6718 OMP_CLAUSE__LOOPTEMP_);
6719 if (innerc)
6720 n2 = fold_convert (TREE_TYPE (n2),
6721 lookup_decl (OMP_CLAUSE_DECL (innerc),
6722 taskreg_ctx));
6725 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6728 clauses = gimple_omp_for_clauses (fd->for_stmt);
6729 stmts = NULL;
6730 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6731 if (!gimple_seq_empty_p (stmts))
6733 gimple_seq_add_seq (&stmts, *dlist);
6734 *dlist = stmts;
6736 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6737 vinit = fd->loop.n1;
6738 if (cond_code == EQ_EXPR
6739 && tree_fits_shwi_p (fd->loop.n2)
6740 && ! integer_zerop (fd->loop.n2))
6741 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6742 else
6743 vinit = unshare_expr (vinit);
6745 /* Initialize the iterator variable, so that threads that don't execute
6746 any iterations don't execute the lastprivate clauses by accident. */
6747 gimplify_assign (fd->loop.v, vinit, body_p);
6752 /* Lower code for an OMP loop directive. */
6754 static void
6755 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6757 tree *rhs_p, block;
6758 struct omp_for_data fd, *fdp = NULL;
6759 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6760 gbind *new_stmt;
6761 gimple_seq omp_for_body, body, dlist;
6762 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6763 size_t i;
6765 push_gimplify_context ();
6767 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6769 block = make_node (BLOCK);
6770 new_stmt = gimple_build_bind (NULL, NULL, block);
6771 /* Replace at gsi right away, so that 'stmt' is no member
6772 of a sequence anymore as we're going to add to a different
6773 one below. */
6774 gsi_replace (gsi_p, new_stmt, true);
6776 /* Move declaration of temporaries in the loop body before we make
6777 it go away. */
6778 omp_for_body = gimple_omp_body (stmt);
6779 if (!gimple_seq_empty_p (omp_for_body)
6780 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6782 gbind *inner_bind
6783 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6784 tree vars = gimple_bind_vars (inner_bind);
6785 gimple_bind_append_vars (new_stmt, vars);
6786 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6787 keep them on the inner_bind and it's block. */
6788 gimple_bind_set_vars (inner_bind, NULL_TREE);
6789 if (gimple_bind_block (inner_bind))
6790 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6793 if (gimple_omp_for_combined_into_p (stmt))
6795 omp_extract_for_data (stmt, &fd, NULL);
6796 fdp = &fd;
6798 /* We need two temporaries with fd.loop.v type (istart/iend)
6799 and then (fd.collapse - 1) temporaries with the same
6800 type for count2 ... countN-1 vars if not constant. */
6801 size_t count = 2;
6802 tree type = fd.iter_type;
6803 if (fd.collapse > 1
6804 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6805 count += fd.collapse - 1;
6806 bool taskreg_for
6807 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6808 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6809 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6810 tree simtc = NULL;
6811 tree clauses = *pc;
6812 if (taskreg_for)
6813 outerc
6814 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6815 OMP_CLAUSE__LOOPTEMP_);
6816 if (ctx->simt_stmt)
6817 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6818 OMP_CLAUSE__LOOPTEMP_);
6819 for (i = 0; i < count; i++)
6821 tree temp;
6822 if (taskreg_for)
6824 gcc_assert (outerc);
6825 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6826 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6827 OMP_CLAUSE__LOOPTEMP_);
6829 else
6831 /* If there are 2 adjacent SIMD stmts, one with _simt_
6832 clause, another without, make sure they have the same
6833 decls in _looptemp_ clauses, because the outer stmt
6834 they are combined into will look up just one inner_stmt. */
6835 if (ctx->simt_stmt)
6836 temp = OMP_CLAUSE_DECL (simtc);
6837 else
6838 temp = create_tmp_var (type);
6839 insert_decl_map (&ctx->outer->cb, temp, temp);
6841 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6842 OMP_CLAUSE_DECL (*pc) = temp;
6843 pc = &OMP_CLAUSE_CHAIN (*pc);
6844 if (ctx->simt_stmt)
6845 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6846 OMP_CLAUSE__LOOPTEMP_);
6848 *pc = clauses;
6851 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6852 dlist = NULL;
6853 body = NULL;
6854 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6855 fdp);
6856 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6858 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6860 /* Lower the header expressions. At this point, we can assume that
6861 the header is of the form:
6863 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6865 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6866 using the .omp_data_s mapping, if needed. */
6867 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6869 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6870 if (!is_gimple_min_invariant (*rhs_p))
6871 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6872 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6873 recompute_tree_invariant_for_addr_expr (*rhs_p);
6875 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6876 if (!is_gimple_min_invariant (*rhs_p))
6877 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6878 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6879 recompute_tree_invariant_for_addr_expr (*rhs_p);
6881 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6882 if (!is_gimple_min_invariant (*rhs_p))
6883 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6886 /* Once lowered, extract the bounds and clauses. */
6887 omp_extract_for_data (stmt, &fd, NULL);
6889 if (is_gimple_omp_oacc (ctx->stmt)
6890 && !ctx_in_oacc_kernels_region (ctx))
6891 lower_oacc_head_tail (gimple_location (stmt),
6892 gimple_omp_for_clauses (stmt),
6893 &oacc_head, &oacc_tail, ctx);
6895 /* Add OpenACC partitioning and reduction markers just before the loop. */
6896 if (oacc_head)
6897 gimple_seq_add_seq (&body, oacc_head);
6899 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6901 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6902 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6904 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6906 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6907 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6908 OMP_CLAUSE_LINEAR_STEP (c)
6909 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6910 ctx);
6913 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6914 && gimple_omp_for_grid_phony (stmt));
6915 if (!phony_loop)
6916 gimple_seq_add_stmt (&body, stmt);
6917 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6919 if (!phony_loop)
6920 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6921 fd.loop.v));
6923 /* After the loop, add exit clauses. */
6924 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6926 if (ctx->cancellable)
6927 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6929 gimple_seq_add_seq (&body, dlist);
6931 body = maybe_catch_exception (body);
6933 if (!phony_loop)
6935 /* Region exit marker goes at the end of the loop body. */
6936 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6937 maybe_add_implicit_barrier_cancel (ctx, &body);
6940 /* Add OpenACC joining and reduction markers just after the loop. */
6941 if (oacc_tail)
6942 gimple_seq_add_seq (&body, oacc_tail);
6944 pop_gimplify_context (new_stmt);
6946 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6947 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6948 if (BLOCK_VARS (block))
6949 TREE_USED (block) = 1;
6951 gimple_bind_set_body (new_stmt, body);
6952 gimple_omp_set_body (stmt, NULL);
6953 gimple_omp_for_set_pre_body (stmt, NULL);
6956 /* Callback for walk_stmts. Check if the current statement only contains
6957 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6959 static tree
6960 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6961 bool *handled_ops_p,
6962 struct walk_stmt_info *wi)
6964 int *info = (int *) wi->info;
6965 gimple *stmt = gsi_stmt (*gsi_p);
6967 *handled_ops_p = true;
6968 switch (gimple_code (stmt))
6970 WALK_SUBSTMTS;
6972 case GIMPLE_DEBUG:
6973 break;
6974 case GIMPLE_OMP_FOR:
6975 case GIMPLE_OMP_SECTIONS:
6976 *info = *info == 0 ? 1 : -1;
6977 break;
6978 default:
6979 *info = -1;
6980 break;
6982 return NULL;
6985 struct omp_taskcopy_context
6987 /* This field must be at the beginning, as we do "inheritance": Some
6988 callback functions for tree-inline.c (e.g., omp_copy_decl)
6989 receive a copy_body_data pointer that is up-casted to an
6990 omp_context pointer. */
6991 copy_body_data cb;
6992 omp_context *ctx;
6995 static tree
6996 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6998 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7000 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7001 return create_tmp_var (TREE_TYPE (var));
7003 return var;
7006 static tree
7007 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7009 tree name, new_fields = NULL, type, f;
7011 type = lang_hooks.types.make_type (RECORD_TYPE);
7012 name = DECL_NAME (TYPE_NAME (orig_type));
7013 name = build_decl (gimple_location (tcctx->ctx->stmt),
7014 TYPE_DECL, name, type);
7015 TYPE_NAME (type) = name;
7017 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7019 tree new_f = copy_node (f);
7020 DECL_CONTEXT (new_f) = type;
7021 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7022 TREE_CHAIN (new_f) = new_fields;
7023 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7024 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7025 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7026 &tcctx->cb, NULL);
7027 new_fields = new_f;
7028 tcctx->cb.decl_map->put (f, new_f);
7030 TYPE_FIELDS (type) = nreverse (new_fields);
7031 layout_type (type);
7032 return type;
7035 /* Create task copyfn. */
7037 static void
7038 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7040 struct function *child_cfun;
7041 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7042 tree record_type, srecord_type, bind, list;
7043 bool record_needs_remap = false, srecord_needs_remap = false;
7044 splay_tree_node n;
7045 struct omp_taskcopy_context tcctx;
7046 location_t loc = gimple_location (task_stmt);
7048 child_fn = gimple_omp_task_copy_fn (task_stmt);
7049 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7050 gcc_assert (child_cfun->cfg == NULL);
7051 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7053 /* Reset DECL_CONTEXT on function arguments. */
7054 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7055 DECL_CONTEXT (t) = child_fn;
7057 /* Populate the function. */
7058 push_gimplify_context ();
7059 push_cfun (child_cfun);
7061 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7062 TREE_SIDE_EFFECTS (bind) = 1;
7063 list = NULL;
7064 DECL_SAVED_TREE (child_fn) = bind;
7065 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7067 /* Remap src and dst argument types if needed. */
7068 record_type = ctx->record_type;
7069 srecord_type = ctx->srecord_type;
7070 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7071 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7073 record_needs_remap = true;
7074 break;
7076 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7077 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7079 srecord_needs_remap = true;
7080 break;
7083 if (record_needs_remap || srecord_needs_remap)
7085 memset (&tcctx, '\0', sizeof (tcctx));
7086 tcctx.cb.src_fn = ctx->cb.src_fn;
7087 tcctx.cb.dst_fn = child_fn;
7088 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7089 gcc_checking_assert (tcctx.cb.src_node);
7090 tcctx.cb.dst_node = tcctx.cb.src_node;
7091 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7092 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7093 tcctx.cb.eh_lp_nr = 0;
7094 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7095 tcctx.cb.decl_map = new hash_map<tree, tree>;
7096 tcctx.ctx = ctx;
7098 if (record_needs_remap)
7099 record_type = task_copyfn_remap_type (&tcctx, record_type);
7100 if (srecord_needs_remap)
7101 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7103 else
7104 tcctx.cb.decl_map = NULL;
7106 arg = DECL_ARGUMENTS (child_fn);
7107 TREE_TYPE (arg) = build_pointer_type (record_type);
7108 sarg = DECL_CHAIN (arg);
7109 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7111 /* First pass: initialize temporaries used in record_type and srecord_type
7112 sizes and field offsets. */
7113 if (tcctx.cb.decl_map)
7114 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7115 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7117 tree *p;
7119 decl = OMP_CLAUSE_DECL (c);
7120 p = tcctx.cb.decl_map->get (decl);
7121 if (p == NULL)
7122 continue;
7123 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7124 sf = (tree) n->value;
7125 sf = *tcctx.cb.decl_map->get (sf);
7126 src = build_simple_mem_ref_loc (loc, sarg);
7127 src = omp_build_component_ref (src, sf);
7128 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7129 append_to_statement_list (t, &list);
7132 /* Second pass: copy shared var pointers and copy construct non-VLA
7133 firstprivate vars. */
7134 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7135 switch (OMP_CLAUSE_CODE (c))
7137 splay_tree_key key;
7138 case OMP_CLAUSE_SHARED:
7139 decl = OMP_CLAUSE_DECL (c);
7140 key = (splay_tree_key) decl;
7141 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7142 key = (splay_tree_key) &DECL_UID (decl);
7143 n = splay_tree_lookup (ctx->field_map, key);
7144 if (n == NULL)
7145 break;
7146 f = (tree) n->value;
7147 if (tcctx.cb.decl_map)
7148 f = *tcctx.cb.decl_map->get (f);
7149 n = splay_tree_lookup (ctx->sfield_map, key);
7150 sf = (tree) n->value;
7151 if (tcctx.cb.decl_map)
7152 sf = *tcctx.cb.decl_map->get (sf);
7153 src = build_simple_mem_ref_loc (loc, sarg);
7154 src = omp_build_component_ref (src, sf);
7155 dst = build_simple_mem_ref_loc (loc, arg);
7156 dst = omp_build_component_ref (dst, f);
7157 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7158 append_to_statement_list (t, &list);
7159 break;
7160 case OMP_CLAUSE_FIRSTPRIVATE:
7161 decl = OMP_CLAUSE_DECL (c);
7162 if (is_variable_sized (decl))
7163 break;
7164 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7165 if (n == NULL)
7166 break;
7167 f = (tree) n->value;
7168 if (tcctx.cb.decl_map)
7169 f = *tcctx.cb.decl_map->get (f);
7170 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7171 if (n != NULL)
7173 sf = (tree) n->value;
7174 if (tcctx.cb.decl_map)
7175 sf = *tcctx.cb.decl_map->get (sf);
7176 src = build_simple_mem_ref_loc (loc, sarg);
7177 src = omp_build_component_ref (src, sf);
7178 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7179 src = build_simple_mem_ref_loc (loc, src);
7181 else
7182 src = decl;
7183 dst = build_simple_mem_ref_loc (loc, arg);
7184 dst = omp_build_component_ref (dst, f);
7185 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7186 append_to_statement_list (t, &list);
7187 break;
7188 case OMP_CLAUSE_PRIVATE:
7189 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7190 break;
7191 decl = OMP_CLAUSE_DECL (c);
7192 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7193 f = (tree) n->value;
7194 if (tcctx.cb.decl_map)
7195 f = *tcctx.cb.decl_map->get (f);
7196 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7197 if (n != NULL)
7199 sf = (tree) n->value;
7200 if (tcctx.cb.decl_map)
7201 sf = *tcctx.cb.decl_map->get (sf);
7202 src = build_simple_mem_ref_loc (loc, sarg);
7203 src = omp_build_component_ref (src, sf);
7204 if (use_pointer_for_field (decl, NULL))
7205 src = build_simple_mem_ref_loc (loc, src);
7207 else
7208 src = decl;
7209 dst = build_simple_mem_ref_loc (loc, arg);
7210 dst = omp_build_component_ref (dst, f);
7211 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7212 append_to_statement_list (t, &list);
7213 break;
7214 default:
7215 break;
7218 /* Last pass: handle VLA firstprivates. */
7219 if (tcctx.cb.decl_map)
7220 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7221 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7223 tree ind, ptr, df;
7225 decl = OMP_CLAUSE_DECL (c);
7226 if (!is_variable_sized (decl))
7227 continue;
7228 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7229 if (n == NULL)
7230 continue;
7231 f = (tree) n->value;
7232 f = *tcctx.cb.decl_map->get (f);
7233 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7234 ind = DECL_VALUE_EXPR (decl);
7235 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7236 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7237 n = splay_tree_lookup (ctx->sfield_map,
7238 (splay_tree_key) TREE_OPERAND (ind, 0));
7239 sf = (tree) n->value;
7240 sf = *tcctx.cb.decl_map->get (sf);
7241 src = build_simple_mem_ref_loc (loc, sarg);
7242 src = omp_build_component_ref (src, sf);
7243 src = build_simple_mem_ref_loc (loc, src);
7244 dst = build_simple_mem_ref_loc (loc, arg);
7245 dst = omp_build_component_ref (dst, f);
7246 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7247 append_to_statement_list (t, &list);
7248 n = splay_tree_lookup (ctx->field_map,
7249 (splay_tree_key) TREE_OPERAND (ind, 0));
7250 df = (tree) n->value;
7251 df = *tcctx.cb.decl_map->get (df);
7252 ptr = build_simple_mem_ref_loc (loc, arg);
7253 ptr = omp_build_component_ref (ptr, df);
7254 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7255 build_fold_addr_expr_loc (loc, dst));
7256 append_to_statement_list (t, &list);
7259 t = build1 (RETURN_EXPR, void_type_node, NULL);
7260 append_to_statement_list (t, &list);
7262 if (tcctx.cb.decl_map)
7263 delete tcctx.cb.decl_map;
7264 pop_gimplify_context (NULL);
7265 BIND_EXPR_BODY (bind) = list;
7266 pop_cfun ();
7269 static void
7270 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7272 tree c, clauses;
7273 gimple *g;
7274 size_t n_in = 0, n_out = 0, idx = 2, i;
7276 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7277 gcc_assert (clauses);
7278 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7279 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7280 switch (OMP_CLAUSE_DEPEND_KIND (c))
7282 case OMP_CLAUSE_DEPEND_IN:
7283 n_in++;
7284 break;
7285 case OMP_CLAUSE_DEPEND_OUT:
7286 case OMP_CLAUSE_DEPEND_INOUT:
7287 n_out++;
7288 break;
7289 case OMP_CLAUSE_DEPEND_SOURCE:
7290 case OMP_CLAUSE_DEPEND_SINK:
7291 /* FALLTHRU */
7292 default:
7293 gcc_unreachable ();
7295 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7296 tree array = create_tmp_var (type);
7297 TREE_ADDRESSABLE (array) = 1;
7298 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7299 NULL_TREE);
7300 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7301 gimple_seq_add_stmt (iseq, g);
7302 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7303 NULL_TREE);
7304 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7305 gimple_seq_add_stmt (iseq, g);
7306 for (i = 0; i < 2; i++)
7308 if ((i ? n_in : n_out) == 0)
7309 continue;
7310 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7311 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7312 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7314 tree t = OMP_CLAUSE_DECL (c);
7315 t = fold_convert (ptr_type_node, t);
7316 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7317 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7318 NULL_TREE, NULL_TREE);
7319 g = gimple_build_assign (r, t);
7320 gimple_seq_add_stmt (iseq, g);
7323 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7324 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7325 OMP_CLAUSE_CHAIN (c) = *pclauses;
7326 *pclauses = c;
7327 tree clobber = build_constructor (type, NULL);
7328 TREE_THIS_VOLATILE (clobber) = 1;
7329 g = gimple_build_assign (array, clobber);
7330 gimple_seq_add_stmt (oseq, g);
7333 /* Lower the OpenMP parallel or task directive in the current statement
7334 in GSI_P. CTX holds context information for the directive. */
7336 static void
7337 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7339 tree clauses;
7340 tree child_fn, t;
7341 gimple *stmt = gsi_stmt (*gsi_p);
7342 gbind *par_bind, *bind, *dep_bind = NULL;
7343 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7344 location_t loc = gimple_location (stmt);
7346 clauses = gimple_omp_taskreg_clauses (stmt);
7347 par_bind
7348 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7349 par_body = gimple_bind_body (par_bind);
7350 child_fn = ctx->cb.dst_fn;
7351 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7352 && !gimple_omp_parallel_combined_p (stmt))
7354 struct walk_stmt_info wi;
7355 int ws_num = 0;
7357 memset (&wi, 0, sizeof (wi));
7358 wi.info = &ws_num;
7359 wi.val_only = true;
7360 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7361 if (ws_num == 1)
7362 gimple_omp_parallel_set_combined_p (stmt, true);
7364 gimple_seq dep_ilist = NULL;
7365 gimple_seq dep_olist = NULL;
7366 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7367 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7369 push_gimplify_context ();
7370 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7371 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7372 &dep_ilist, &dep_olist);
7375 if (ctx->srecord_type)
7376 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7378 push_gimplify_context ();
7380 par_olist = NULL;
7381 par_ilist = NULL;
7382 par_rlist = NULL;
7383 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7384 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7385 if (phony_construct && ctx->record_type)
7387 gcc_checking_assert (!ctx->receiver_decl);
7388 ctx->receiver_decl = create_tmp_var
7389 (build_reference_type (ctx->record_type), ".omp_rec");
7391 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7392 lower_omp (&par_body, ctx);
7393 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7394 lower_reduction_clauses (clauses, &par_rlist, ctx);
7396 /* Declare all the variables created by mapping and the variables
7397 declared in the scope of the parallel body. */
7398 record_vars_into (ctx->block_vars, child_fn);
7399 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7401 if (ctx->record_type)
7403 ctx->sender_decl
7404 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7405 : ctx->record_type, ".omp_data_o");
7406 DECL_NAMELESS (ctx->sender_decl) = 1;
7407 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7408 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7411 olist = NULL;
7412 ilist = NULL;
7413 lower_send_clauses (clauses, &ilist, &olist, ctx);
7414 lower_send_shared_vars (&ilist, &olist, ctx);
7416 if (ctx->record_type)
7418 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7419 TREE_THIS_VOLATILE (clobber) = 1;
7420 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7421 clobber));
7424 /* Once all the expansions are done, sequence all the different
7425 fragments inside gimple_omp_body. */
7427 new_body = NULL;
7429 if (ctx->record_type)
7431 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7432 /* fixup_child_record_type might have changed receiver_decl's type. */
7433 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7434 gimple_seq_add_stmt (&new_body,
7435 gimple_build_assign (ctx->receiver_decl, t));
7438 gimple_seq_add_seq (&new_body, par_ilist);
7439 gimple_seq_add_seq (&new_body, par_body);
7440 gimple_seq_add_seq (&new_body, par_rlist);
7441 if (ctx->cancellable)
7442 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7443 gimple_seq_add_seq (&new_body, par_olist);
7444 new_body = maybe_catch_exception (new_body);
7445 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7446 gimple_seq_add_stmt (&new_body,
7447 gimple_build_omp_continue (integer_zero_node,
7448 integer_zero_node));
7449 if (!phony_construct)
7451 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7452 gimple_omp_set_body (stmt, new_body);
7455 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7456 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7457 gimple_bind_add_seq (bind, ilist);
7458 if (!phony_construct)
7459 gimple_bind_add_stmt (bind, stmt);
7460 else
7461 gimple_bind_add_seq (bind, new_body);
7462 gimple_bind_add_seq (bind, olist);
7464 pop_gimplify_context (NULL);
7466 if (dep_bind)
7468 gimple_bind_add_seq (dep_bind, dep_ilist);
7469 gimple_bind_add_stmt (dep_bind, bind);
7470 gimple_bind_add_seq (dep_bind, dep_olist);
7471 pop_gimplify_context (dep_bind);
7475 /* Lower the GIMPLE_OMP_TARGET in the current statement
7476 in GSI_P. CTX holds context information for the directive. */
7478 static void
7479 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7481 tree clauses;
7482 tree child_fn, t, c;
7483 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7484 gbind *tgt_bind, *bind, *dep_bind = NULL;
7485 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7486 location_t loc = gimple_location (stmt);
7487 bool offloaded, data_region;
7488 unsigned int map_cnt = 0;
7490 offloaded = is_gimple_omp_offloaded (stmt);
7491 switch (gimple_omp_target_kind (stmt))
7493 case GF_OMP_TARGET_KIND_REGION:
7494 case GF_OMP_TARGET_KIND_UPDATE:
7495 case GF_OMP_TARGET_KIND_ENTER_DATA:
7496 case GF_OMP_TARGET_KIND_EXIT_DATA:
7497 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7498 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7499 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7500 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7501 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7502 data_region = false;
7503 break;
7504 case GF_OMP_TARGET_KIND_DATA:
7505 case GF_OMP_TARGET_KIND_OACC_DATA:
7506 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7507 data_region = true;
7508 break;
7509 default:
7510 gcc_unreachable ();
7513 clauses = gimple_omp_target_clauses (stmt);
7515 gimple_seq dep_ilist = NULL;
7516 gimple_seq dep_olist = NULL;
7517 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7519 push_gimplify_context ();
7520 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7521 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7522 &dep_ilist, &dep_olist);
7525 tgt_bind = NULL;
7526 tgt_body = NULL;
7527 if (offloaded)
7529 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7530 tgt_body = gimple_bind_body (tgt_bind);
7532 else if (data_region)
7533 tgt_body = gimple_omp_body (stmt);
7534 child_fn = ctx->cb.dst_fn;
7536 push_gimplify_context ();
7537 fplist = NULL;
7539 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7540 switch (OMP_CLAUSE_CODE (c))
7542 tree var, x;
7544 default:
7545 break;
7546 case OMP_CLAUSE_MAP:
7547 #if CHECKING_P
7548 /* First check what we're prepared to handle in the following. */
7549 switch (OMP_CLAUSE_MAP_KIND (c))
7551 case GOMP_MAP_ALLOC:
7552 case GOMP_MAP_TO:
7553 case GOMP_MAP_FROM:
7554 case GOMP_MAP_TOFROM:
7555 case GOMP_MAP_POINTER:
7556 case GOMP_MAP_TO_PSET:
7557 case GOMP_MAP_DELETE:
7558 case GOMP_MAP_RELEASE:
7559 case GOMP_MAP_ALWAYS_TO:
7560 case GOMP_MAP_ALWAYS_FROM:
7561 case GOMP_MAP_ALWAYS_TOFROM:
7562 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7563 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7564 case GOMP_MAP_STRUCT:
7565 case GOMP_MAP_ALWAYS_POINTER:
7566 break;
7567 case GOMP_MAP_FORCE_ALLOC:
7568 case GOMP_MAP_FORCE_TO:
7569 case GOMP_MAP_FORCE_FROM:
7570 case GOMP_MAP_FORCE_TOFROM:
7571 case GOMP_MAP_FORCE_PRESENT:
7572 case GOMP_MAP_FORCE_DEVICEPTR:
7573 case GOMP_MAP_DEVICE_RESIDENT:
7574 case GOMP_MAP_LINK:
7575 gcc_assert (is_gimple_omp_oacc (stmt));
7576 break;
7577 default:
7578 gcc_unreachable ();
7580 #endif
7581 /* FALLTHRU */
7582 case OMP_CLAUSE_TO:
7583 case OMP_CLAUSE_FROM:
7584 oacc_firstprivate:
7585 var = OMP_CLAUSE_DECL (c);
7586 if (!DECL_P (var))
7588 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7589 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7590 && (OMP_CLAUSE_MAP_KIND (c)
7591 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7592 map_cnt++;
7593 continue;
7596 if (DECL_SIZE (var)
7597 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7599 tree var2 = DECL_VALUE_EXPR (var);
7600 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7601 var2 = TREE_OPERAND (var2, 0);
7602 gcc_assert (DECL_P (var2));
7603 var = var2;
7606 if (offloaded
7607 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7608 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7609 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7611 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7613 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7614 && varpool_node::get_create (var)->offloadable)
7615 continue;
7617 tree type = build_pointer_type (TREE_TYPE (var));
7618 tree new_var = lookup_decl (var, ctx);
7619 x = create_tmp_var_raw (type, get_name (new_var));
7620 gimple_add_tmp_var (x);
7621 x = build_simple_mem_ref (x);
7622 SET_DECL_VALUE_EXPR (new_var, x);
7623 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7625 continue;
7628 if (!maybe_lookup_field (var, ctx))
7629 continue;
7631 /* Don't remap oacc parallel reduction variables, because the
7632 intermediate result must be local to each gang. */
7633 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7634 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7636 x = build_receiver_ref (var, true, ctx);
7637 tree new_var = lookup_decl (var, ctx);
7639 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7640 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7641 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7642 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7643 x = build_simple_mem_ref (x);
7644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7646 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7647 if (omp_is_reference (new_var))
7649 /* Create a local object to hold the instance
7650 value. */
7651 tree type = TREE_TYPE (TREE_TYPE (new_var));
7652 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7653 tree inst = create_tmp_var (type, id);
7654 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7655 x = build_fold_addr_expr (inst);
7657 gimplify_assign (new_var, x, &fplist);
7659 else if (DECL_P (new_var))
7661 SET_DECL_VALUE_EXPR (new_var, x);
7662 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7664 else
7665 gcc_unreachable ();
7667 map_cnt++;
7668 break;
7670 case OMP_CLAUSE_FIRSTPRIVATE:
7671 if (is_oacc_parallel (ctx))
7672 goto oacc_firstprivate;
7673 map_cnt++;
7674 var = OMP_CLAUSE_DECL (c);
7675 if (!omp_is_reference (var)
7676 && !is_gimple_reg_type (TREE_TYPE (var)))
7678 tree new_var = lookup_decl (var, ctx);
7679 if (is_variable_sized (var))
7681 tree pvar = DECL_VALUE_EXPR (var);
7682 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7683 pvar = TREE_OPERAND (pvar, 0);
7684 gcc_assert (DECL_P (pvar));
7685 tree new_pvar = lookup_decl (pvar, ctx);
7686 x = build_fold_indirect_ref (new_pvar);
7687 TREE_THIS_NOTRAP (x) = 1;
7689 else
7690 x = build_receiver_ref (var, true, ctx);
7691 SET_DECL_VALUE_EXPR (new_var, x);
7692 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7694 break;
7696 case OMP_CLAUSE_PRIVATE:
7697 if (is_gimple_omp_oacc (ctx->stmt))
7698 break;
7699 var = OMP_CLAUSE_DECL (c);
7700 if (is_variable_sized (var))
7702 tree new_var = lookup_decl (var, ctx);
7703 tree pvar = DECL_VALUE_EXPR (var);
7704 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7705 pvar = TREE_OPERAND (pvar, 0);
7706 gcc_assert (DECL_P (pvar));
7707 tree new_pvar = lookup_decl (pvar, ctx);
7708 x = build_fold_indirect_ref (new_pvar);
7709 TREE_THIS_NOTRAP (x) = 1;
7710 SET_DECL_VALUE_EXPR (new_var, x);
7711 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7713 break;
7715 case OMP_CLAUSE_USE_DEVICE_PTR:
7716 case OMP_CLAUSE_IS_DEVICE_PTR:
7717 var = OMP_CLAUSE_DECL (c);
7718 map_cnt++;
7719 if (is_variable_sized (var))
7721 tree new_var = lookup_decl (var, ctx);
7722 tree pvar = DECL_VALUE_EXPR (var);
7723 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7724 pvar = TREE_OPERAND (pvar, 0);
7725 gcc_assert (DECL_P (pvar));
7726 tree new_pvar = lookup_decl (pvar, ctx);
7727 x = build_fold_indirect_ref (new_pvar);
7728 TREE_THIS_NOTRAP (x) = 1;
7729 SET_DECL_VALUE_EXPR (new_var, x);
7730 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7732 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7734 tree new_var = lookup_decl (var, ctx);
7735 tree type = build_pointer_type (TREE_TYPE (var));
7736 x = create_tmp_var_raw (type, get_name (new_var));
7737 gimple_add_tmp_var (x);
7738 x = build_simple_mem_ref (x);
7739 SET_DECL_VALUE_EXPR (new_var, x);
7740 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7742 else
7744 tree new_var = lookup_decl (var, ctx);
7745 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7746 gimple_add_tmp_var (x);
7747 SET_DECL_VALUE_EXPR (new_var, x);
7748 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7750 break;
7753 if (offloaded)
7755 target_nesting_level++;
7756 lower_omp (&tgt_body, ctx);
7757 target_nesting_level--;
7759 else if (data_region)
7760 lower_omp (&tgt_body, ctx);
7762 if (offloaded)
7764 /* Declare all the variables created by mapping and the variables
7765 declared in the scope of the target body. */
7766 record_vars_into (ctx->block_vars, child_fn);
7767 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7770 olist = NULL;
7771 ilist = NULL;
7772 if (ctx->record_type)
7774 ctx->sender_decl
7775 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7776 DECL_NAMELESS (ctx->sender_decl) = 1;
7777 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7778 t = make_tree_vec (3);
7779 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7780 TREE_VEC_ELT (t, 1)
7781 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7782 ".omp_data_sizes");
7783 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7784 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7785 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7786 tree tkind_type = short_unsigned_type_node;
7787 int talign_shift = 8;
7788 TREE_VEC_ELT (t, 2)
7789 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7790 ".omp_data_kinds");
7791 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7792 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7793 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7794 gimple_omp_target_set_data_arg (stmt, t);
7796 vec<constructor_elt, va_gc> *vsize;
7797 vec<constructor_elt, va_gc> *vkind;
7798 vec_alloc (vsize, map_cnt);
7799 vec_alloc (vkind, map_cnt);
7800 unsigned int map_idx = 0;
7802 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7803 switch (OMP_CLAUSE_CODE (c))
7805 tree ovar, nc, s, purpose, var, x, type;
7806 unsigned int talign;
7808 default:
7809 break;
7811 case OMP_CLAUSE_MAP:
7812 case OMP_CLAUSE_TO:
7813 case OMP_CLAUSE_FROM:
7814 oacc_firstprivate_map:
7815 nc = c;
7816 ovar = OMP_CLAUSE_DECL (c);
7817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7818 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7819 || (OMP_CLAUSE_MAP_KIND (c)
7820 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7821 break;
7822 if (!DECL_P (ovar))
7824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7825 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7827 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7828 == get_base_address (ovar));
7829 nc = OMP_CLAUSE_CHAIN (c);
7830 ovar = OMP_CLAUSE_DECL (nc);
7832 else
7834 tree x = build_sender_ref (ovar, ctx);
7835 tree v
7836 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7837 gimplify_assign (x, v, &ilist);
7838 nc = NULL_TREE;
7841 else
7843 if (DECL_SIZE (ovar)
7844 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7846 tree ovar2 = DECL_VALUE_EXPR (ovar);
7847 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7848 ovar2 = TREE_OPERAND (ovar2, 0);
7849 gcc_assert (DECL_P (ovar2));
7850 ovar = ovar2;
7852 if (!maybe_lookup_field (ovar, ctx))
7853 continue;
7856 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7857 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7858 talign = DECL_ALIGN_UNIT (ovar);
7859 if (nc)
7861 var = lookup_decl_in_outer_ctx (ovar, ctx);
7862 x = build_sender_ref (ovar, ctx);
7864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7865 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7866 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7867 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7869 gcc_assert (offloaded);
7870 tree avar
7871 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7872 mark_addressable (avar);
7873 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7874 talign = DECL_ALIGN_UNIT (avar);
7875 avar = build_fold_addr_expr (avar);
7876 gimplify_assign (x, avar, &ilist);
7878 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7880 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7881 if (!omp_is_reference (var))
7883 if (is_gimple_reg (var)
7884 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7885 TREE_NO_WARNING (var) = 1;
7886 var = build_fold_addr_expr (var);
7888 else
7889 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7890 gimplify_assign (x, var, &ilist);
7892 else if (is_gimple_reg (var))
7894 gcc_assert (offloaded);
7895 tree avar = create_tmp_var (TREE_TYPE (var));
7896 mark_addressable (avar);
7897 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7898 if (GOMP_MAP_COPY_TO_P (map_kind)
7899 || map_kind == GOMP_MAP_POINTER
7900 || map_kind == GOMP_MAP_TO_PSET
7901 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7903 /* If we need to initialize a temporary
7904 with VAR because it is not addressable, and
7905 the variable hasn't been initialized yet, then
7906 we'll get a warning for the store to avar.
7907 Don't warn in that case, the mapping might
7908 be implicit. */
7909 TREE_NO_WARNING (var) = 1;
7910 gimplify_assign (avar, var, &ilist);
7912 avar = build_fold_addr_expr (avar);
7913 gimplify_assign (x, avar, &ilist);
7914 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7915 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7916 && !TYPE_READONLY (TREE_TYPE (var)))
7918 x = unshare_expr (x);
7919 x = build_simple_mem_ref (x);
7920 gimplify_assign (var, x, &olist);
7923 else
7925 var = build_fold_addr_expr (var);
7926 gimplify_assign (x, var, &ilist);
7929 s = NULL_TREE;
7930 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7932 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7933 s = TREE_TYPE (ovar);
7934 if (TREE_CODE (s) == REFERENCE_TYPE)
7935 s = TREE_TYPE (s);
7936 s = TYPE_SIZE_UNIT (s);
7938 else
7939 s = OMP_CLAUSE_SIZE (c);
7940 if (s == NULL_TREE)
7941 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7942 s = fold_convert (size_type_node, s);
7943 purpose = size_int (map_idx++);
7944 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7945 if (TREE_CODE (s) != INTEGER_CST)
7946 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7948 unsigned HOST_WIDE_INT tkind, tkind_zero;
7949 switch (OMP_CLAUSE_CODE (c))
7951 case OMP_CLAUSE_MAP:
7952 tkind = OMP_CLAUSE_MAP_KIND (c);
7953 tkind_zero = tkind;
7954 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7955 switch (tkind)
7957 case GOMP_MAP_ALLOC:
7958 case GOMP_MAP_TO:
7959 case GOMP_MAP_FROM:
7960 case GOMP_MAP_TOFROM:
7961 case GOMP_MAP_ALWAYS_TO:
7962 case GOMP_MAP_ALWAYS_FROM:
7963 case GOMP_MAP_ALWAYS_TOFROM:
7964 case GOMP_MAP_RELEASE:
7965 case GOMP_MAP_FORCE_TO:
7966 case GOMP_MAP_FORCE_FROM:
7967 case GOMP_MAP_FORCE_TOFROM:
7968 case GOMP_MAP_FORCE_PRESENT:
7969 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7970 break;
7971 case GOMP_MAP_DELETE:
7972 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7973 default:
7974 break;
7976 if (tkind_zero != tkind)
7978 if (integer_zerop (s))
7979 tkind = tkind_zero;
7980 else if (integer_nonzerop (s))
7981 tkind_zero = tkind;
7983 break;
7984 case OMP_CLAUSE_FIRSTPRIVATE:
7985 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7986 tkind = GOMP_MAP_TO;
7987 tkind_zero = tkind;
7988 break;
7989 case OMP_CLAUSE_TO:
7990 tkind = GOMP_MAP_TO;
7991 tkind_zero = tkind;
7992 break;
7993 case OMP_CLAUSE_FROM:
7994 tkind = GOMP_MAP_FROM;
7995 tkind_zero = tkind;
7996 break;
7997 default:
7998 gcc_unreachable ();
8000 gcc_checking_assert (tkind
8001 < (HOST_WIDE_INT_C (1U) << talign_shift));
8002 gcc_checking_assert (tkind_zero
8003 < (HOST_WIDE_INT_C (1U) << talign_shift));
8004 talign = ceil_log2 (talign);
8005 tkind |= talign << talign_shift;
8006 tkind_zero |= talign << talign_shift;
8007 gcc_checking_assert (tkind
8008 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8009 gcc_checking_assert (tkind_zero
8010 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8011 if (tkind == tkind_zero)
8012 x = build_int_cstu (tkind_type, tkind);
8013 else
8015 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8016 x = build3 (COND_EXPR, tkind_type,
8017 fold_build2 (EQ_EXPR, boolean_type_node,
8018 unshare_expr (s), size_zero_node),
8019 build_int_cstu (tkind_type, tkind_zero),
8020 build_int_cstu (tkind_type, tkind));
8022 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8023 if (nc && nc != c)
8024 c = nc;
8025 break;
8027 case OMP_CLAUSE_FIRSTPRIVATE:
8028 if (is_oacc_parallel (ctx))
8029 goto oacc_firstprivate_map;
8030 ovar = OMP_CLAUSE_DECL (c);
8031 if (omp_is_reference (ovar))
8032 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8033 else
8034 talign = DECL_ALIGN_UNIT (ovar);
8035 var = lookup_decl_in_outer_ctx (ovar, ctx);
8036 x = build_sender_ref (ovar, ctx);
8037 tkind = GOMP_MAP_FIRSTPRIVATE;
8038 type = TREE_TYPE (ovar);
8039 if (omp_is_reference (ovar))
8040 type = TREE_TYPE (type);
8041 if ((INTEGRAL_TYPE_P (type)
8042 && TYPE_PRECISION (type) <= POINTER_SIZE)
8043 || TREE_CODE (type) == POINTER_TYPE)
8045 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8046 tree t = var;
8047 if (omp_is_reference (var))
8048 t = build_simple_mem_ref (var);
8049 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8050 TREE_NO_WARNING (var) = 1;
8051 if (TREE_CODE (type) != POINTER_TYPE)
8052 t = fold_convert (pointer_sized_int_node, t);
8053 t = fold_convert (TREE_TYPE (x), t);
8054 gimplify_assign (x, t, &ilist);
8056 else if (omp_is_reference (var))
8057 gimplify_assign (x, var, &ilist);
8058 else if (is_gimple_reg (var))
8060 tree avar = create_tmp_var (TREE_TYPE (var));
8061 mark_addressable (avar);
8062 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8063 TREE_NO_WARNING (var) = 1;
8064 gimplify_assign (avar, var, &ilist);
8065 avar = build_fold_addr_expr (avar);
8066 gimplify_assign (x, avar, &ilist);
8068 else
8070 var = build_fold_addr_expr (var);
8071 gimplify_assign (x, var, &ilist);
8073 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8074 s = size_int (0);
8075 else if (omp_is_reference (ovar))
8076 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8077 else
8078 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8079 s = fold_convert (size_type_node, s);
8080 purpose = size_int (map_idx++);
8081 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8082 if (TREE_CODE (s) != INTEGER_CST)
8083 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8085 gcc_checking_assert (tkind
8086 < (HOST_WIDE_INT_C (1U) << talign_shift));
8087 talign = ceil_log2 (talign);
8088 tkind |= talign << talign_shift;
8089 gcc_checking_assert (tkind
8090 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8091 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8092 build_int_cstu (tkind_type, tkind));
8093 break;
8095 case OMP_CLAUSE_USE_DEVICE_PTR:
8096 case OMP_CLAUSE_IS_DEVICE_PTR:
8097 ovar = OMP_CLAUSE_DECL (c);
8098 var = lookup_decl_in_outer_ctx (ovar, ctx);
8099 x = build_sender_ref (ovar, ctx);
8100 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8101 tkind = GOMP_MAP_USE_DEVICE_PTR;
8102 else
8103 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8104 type = TREE_TYPE (ovar);
8105 if (TREE_CODE (type) == ARRAY_TYPE)
8106 var = build_fold_addr_expr (var);
8107 else
8109 if (omp_is_reference (ovar))
8111 type = TREE_TYPE (type);
8112 if (TREE_CODE (type) != ARRAY_TYPE)
8113 var = build_simple_mem_ref (var);
8114 var = fold_convert (TREE_TYPE (x), var);
8117 gimplify_assign (x, var, &ilist);
8118 s = size_int (0);
8119 purpose = size_int (map_idx++);
8120 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8121 gcc_checking_assert (tkind
8122 < (HOST_WIDE_INT_C (1U) << talign_shift));
8123 gcc_checking_assert (tkind
8124 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8125 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8126 build_int_cstu (tkind_type, tkind));
8127 break;
8130 gcc_assert (map_idx == map_cnt);
8132 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8133 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8134 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8135 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8136 for (int i = 1; i <= 2; i++)
8137 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8139 gimple_seq initlist = NULL;
8140 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8141 TREE_VEC_ELT (t, i)),
8142 &initlist, true, NULL_TREE);
8143 gimple_seq_add_seq (&ilist, initlist);
8145 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8146 NULL);
8147 TREE_THIS_VOLATILE (clobber) = 1;
8148 gimple_seq_add_stmt (&olist,
8149 gimple_build_assign (TREE_VEC_ELT (t, i),
8150 clobber));
8153 tree clobber = build_constructor (ctx->record_type, NULL);
8154 TREE_THIS_VOLATILE (clobber) = 1;
8155 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8156 clobber));
8159 /* Once all the expansions are done, sequence all the different
8160 fragments inside gimple_omp_body. */
8162 new_body = NULL;
8164 if (offloaded
8165 && ctx->record_type)
8167 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8168 /* fixup_child_record_type might have changed receiver_decl's type. */
8169 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8170 gimple_seq_add_stmt (&new_body,
8171 gimple_build_assign (ctx->receiver_decl, t));
8173 gimple_seq_add_seq (&new_body, fplist);
8175 if (offloaded || data_region)
8177 tree prev = NULL_TREE;
8178 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8179 switch (OMP_CLAUSE_CODE (c))
8181 tree var, x;
8182 default:
8183 break;
8184 case OMP_CLAUSE_FIRSTPRIVATE:
8185 if (is_gimple_omp_oacc (ctx->stmt))
8186 break;
8187 var = OMP_CLAUSE_DECL (c);
8188 if (omp_is_reference (var)
8189 || is_gimple_reg_type (TREE_TYPE (var)))
8191 tree new_var = lookup_decl (var, ctx);
8192 tree type;
8193 type = TREE_TYPE (var);
8194 if (omp_is_reference (var))
8195 type = TREE_TYPE (type);
8196 if ((INTEGRAL_TYPE_P (type)
8197 && TYPE_PRECISION (type) <= POINTER_SIZE)
8198 || TREE_CODE (type) == POINTER_TYPE)
8200 x = build_receiver_ref (var, false, ctx);
8201 if (TREE_CODE (type) != POINTER_TYPE)
8202 x = fold_convert (pointer_sized_int_node, x);
8203 x = fold_convert (type, x);
8204 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8205 fb_rvalue);
8206 if (omp_is_reference (var))
8208 tree v = create_tmp_var_raw (type, get_name (var));
8209 gimple_add_tmp_var (v);
8210 TREE_ADDRESSABLE (v) = 1;
8211 gimple_seq_add_stmt (&new_body,
8212 gimple_build_assign (v, x));
8213 x = build_fold_addr_expr (v);
8215 gimple_seq_add_stmt (&new_body,
8216 gimple_build_assign (new_var, x));
8218 else
8220 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8221 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8222 fb_rvalue);
8223 gimple_seq_add_stmt (&new_body,
8224 gimple_build_assign (new_var, x));
8227 else if (is_variable_sized (var))
8229 tree pvar = DECL_VALUE_EXPR (var);
8230 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8231 pvar = TREE_OPERAND (pvar, 0);
8232 gcc_assert (DECL_P (pvar));
8233 tree new_var = lookup_decl (pvar, ctx);
8234 x = build_receiver_ref (var, false, ctx);
8235 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8236 gimple_seq_add_stmt (&new_body,
8237 gimple_build_assign (new_var, x));
8239 break;
8240 case OMP_CLAUSE_PRIVATE:
8241 if (is_gimple_omp_oacc (ctx->stmt))
8242 break;
8243 var = OMP_CLAUSE_DECL (c);
8244 if (omp_is_reference (var))
8246 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8247 tree new_var = lookup_decl (var, ctx);
8248 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8249 if (TREE_CONSTANT (x))
8251 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8252 get_name (var));
8253 gimple_add_tmp_var (x);
8254 TREE_ADDRESSABLE (x) = 1;
8255 x = build_fold_addr_expr_loc (clause_loc, x);
8257 else
8258 break;
8260 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8261 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8262 gimple_seq_add_stmt (&new_body,
8263 gimple_build_assign (new_var, x));
8265 break;
8266 case OMP_CLAUSE_USE_DEVICE_PTR:
8267 case OMP_CLAUSE_IS_DEVICE_PTR:
8268 var = OMP_CLAUSE_DECL (c);
8269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8270 x = build_sender_ref (var, ctx);
8271 else
8272 x = build_receiver_ref (var, false, ctx);
8273 if (is_variable_sized (var))
8275 tree pvar = DECL_VALUE_EXPR (var);
8276 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8277 pvar = TREE_OPERAND (pvar, 0);
8278 gcc_assert (DECL_P (pvar));
8279 tree new_var = lookup_decl (pvar, ctx);
8280 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8281 gimple_seq_add_stmt (&new_body,
8282 gimple_build_assign (new_var, x));
8284 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8286 tree new_var = lookup_decl (var, ctx);
8287 new_var = DECL_VALUE_EXPR (new_var);
8288 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8289 new_var = TREE_OPERAND (new_var, 0);
8290 gcc_assert (DECL_P (new_var));
8291 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8292 gimple_seq_add_stmt (&new_body,
8293 gimple_build_assign (new_var, x));
8295 else
8297 tree type = TREE_TYPE (var);
8298 tree new_var = lookup_decl (var, ctx);
8299 if (omp_is_reference (var))
8301 type = TREE_TYPE (type);
8302 if (TREE_CODE (type) != ARRAY_TYPE)
8304 tree v = create_tmp_var_raw (type, get_name (var));
8305 gimple_add_tmp_var (v);
8306 TREE_ADDRESSABLE (v) = 1;
8307 x = fold_convert (type, x);
8308 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8309 fb_rvalue);
8310 gimple_seq_add_stmt (&new_body,
8311 gimple_build_assign (v, x));
8312 x = build_fold_addr_expr (v);
8315 new_var = DECL_VALUE_EXPR (new_var);
8316 x = fold_convert (TREE_TYPE (new_var), x);
8317 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8318 gimple_seq_add_stmt (&new_body,
8319 gimple_build_assign (new_var, x));
8321 break;
8323 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8324 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8325 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8326 or references to VLAs. */
8327 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8328 switch (OMP_CLAUSE_CODE (c))
8330 tree var;
8331 default:
8332 break;
8333 case OMP_CLAUSE_MAP:
8334 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8335 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8337 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8338 poly_int64 offset = 0;
8339 gcc_assert (prev);
8340 var = OMP_CLAUSE_DECL (c);
8341 if (DECL_P (var)
8342 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8343 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8344 ctx))
8345 && varpool_node::get_create (var)->offloadable)
8346 break;
8347 if (TREE_CODE (var) == INDIRECT_REF
8348 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8349 var = TREE_OPERAND (var, 0);
8350 if (TREE_CODE (var) == COMPONENT_REF)
8352 var = get_addr_base_and_unit_offset (var, &offset);
8353 gcc_assert (var != NULL_TREE && DECL_P (var));
8355 else if (DECL_SIZE (var)
8356 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8358 tree var2 = DECL_VALUE_EXPR (var);
8359 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8360 var2 = TREE_OPERAND (var2, 0);
8361 gcc_assert (DECL_P (var2));
8362 var = var2;
8364 tree new_var = lookup_decl (var, ctx), x;
8365 tree type = TREE_TYPE (new_var);
8366 bool is_ref;
8367 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8368 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8369 == COMPONENT_REF))
8371 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8372 is_ref = true;
8373 new_var = build2 (MEM_REF, type,
8374 build_fold_addr_expr (new_var),
8375 build_int_cst (build_pointer_type (type),
8376 offset));
8378 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8380 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8381 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8382 new_var = build2 (MEM_REF, type,
8383 build_fold_addr_expr (new_var),
8384 build_int_cst (build_pointer_type (type),
8385 offset));
8387 else
8388 is_ref = omp_is_reference (var);
8389 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8390 is_ref = false;
8391 bool ref_to_array = false;
8392 if (is_ref)
8394 type = TREE_TYPE (type);
8395 if (TREE_CODE (type) == ARRAY_TYPE)
8397 type = build_pointer_type (type);
8398 ref_to_array = true;
8401 else if (TREE_CODE (type) == ARRAY_TYPE)
8403 tree decl2 = DECL_VALUE_EXPR (new_var);
8404 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8405 decl2 = TREE_OPERAND (decl2, 0);
8406 gcc_assert (DECL_P (decl2));
8407 new_var = decl2;
8408 type = TREE_TYPE (new_var);
8410 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8411 x = fold_convert_loc (clause_loc, type, x);
8412 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8414 tree bias = OMP_CLAUSE_SIZE (c);
8415 if (DECL_P (bias))
8416 bias = lookup_decl (bias, ctx);
8417 bias = fold_convert_loc (clause_loc, sizetype, bias);
8418 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8419 bias);
8420 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8421 TREE_TYPE (x), x, bias);
8423 if (ref_to_array)
8424 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8425 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8426 if (is_ref && !ref_to_array)
8428 tree t = create_tmp_var_raw (type, get_name (var));
8429 gimple_add_tmp_var (t);
8430 TREE_ADDRESSABLE (t) = 1;
8431 gimple_seq_add_stmt (&new_body,
8432 gimple_build_assign (t, x));
8433 x = build_fold_addr_expr_loc (clause_loc, t);
8435 gimple_seq_add_stmt (&new_body,
8436 gimple_build_assign (new_var, x));
8437 prev = NULL_TREE;
8439 else if (OMP_CLAUSE_CHAIN (c)
8440 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8441 == OMP_CLAUSE_MAP
8442 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8443 == GOMP_MAP_FIRSTPRIVATE_POINTER
8444 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8445 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8446 prev = c;
8447 break;
8448 case OMP_CLAUSE_PRIVATE:
8449 var = OMP_CLAUSE_DECL (c);
8450 if (is_variable_sized (var))
8452 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8453 tree new_var = lookup_decl (var, ctx);
8454 tree pvar = DECL_VALUE_EXPR (var);
8455 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8456 pvar = TREE_OPERAND (pvar, 0);
8457 gcc_assert (DECL_P (pvar));
8458 tree new_pvar = lookup_decl (pvar, ctx);
8459 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8460 tree al = size_int (DECL_ALIGN (var));
8461 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8462 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8463 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8464 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8465 gimple_seq_add_stmt (&new_body,
8466 gimple_build_assign (new_pvar, x));
8468 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8470 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8471 tree new_var = lookup_decl (var, ctx);
8472 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8473 if (TREE_CONSTANT (x))
8474 break;
8475 else
8477 tree atmp
8478 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8479 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8480 tree al = size_int (TYPE_ALIGN (rtype));
8481 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8484 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8485 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8486 gimple_seq_add_stmt (&new_body,
8487 gimple_build_assign (new_var, x));
8489 break;
8492 gimple_seq fork_seq = NULL;
8493 gimple_seq join_seq = NULL;
8495 if (is_oacc_parallel (ctx))
8497 /* If there are reductions on the offloaded region itself, treat
8498 them as a dummy GANG loop. */
8499 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8501 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8502 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8505 gimple_seq_add_seq (&new_body, fork_seq);
8506 gimple_seq_add_seq (&new_body, tgt_body);
8507 gimple_seq_add_seq (&new_body, join_seq);
8509 if (offloaded)
8510 new_body = maybe_catch_exception (new_body);
8512 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8513 gimple_omp_set_body (stmt, new_body);
8516 bind = gimple_build_bind (NULL, NULL,
8517 tgt_bind ? gimple_bind_block (tgt_bind)
8518 : NULL_TREE);
8519 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8520 gimple_bind_add_seq (bind, ilist);
8521 gimple_bind_add_stmt (bind, stmt);
8522 gimple_bind_add_seq (bind, olist);
8524 pop_gimplify_context (NULL);
8526 if (dep_bind)
8528 gimple_bind_add_seq (dep_bind, dep_ilist);
8529 gimple_bind_add_stmt (dep_bind, bind);
8530 gimple_bind_add_seq (dep_bind, dep_olist);
8531 pop_gimplify_context (dep_bind);
8535 /* Expand code for an OpenMP teams directive. */
8537 static void
8538 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8540 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8541 push_gimplify_context ();
8543 tree block = make_node (BLOCK);
8544 gbind *bind = gimple_build_bind (NULL, NULL, block);
8545 gsi_replace (gsi_p, bind, true);
8546 gimple_seq bind_body = NULL;
8547 gimple_seq dlist = NULL;
8548 gimple_seq olist = NULL;
8550 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8551 OMP_CLAUSE_NUM_TEAMS);
8552 if (num_teams == NULL_TREE)
8553 num_teams = build_int_cst (unsigned_type_node, 0);
8554 else
8556 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8557 num_teams = fold_convert (unsigned_type_node, num_teams);
8558 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8560 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8561 OMP_CLAUSE_THREAD_LIMIT);
8562 if (thread_limit == NULL_TREE)
8563 thread_limit = build_int_cst (unsigned_type_node, 0);
8564 else
8566 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8567 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8568 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8569 fb_rvalue);
8572 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8573 &bind_body, &dlist, ctx, NULL);
8574 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8575 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8576 if (!gimple_omp_teams_grid_phony (teams_stmt))
8578 gimple_seq_add_stmt (&bind_body, teams_stmt);
8579 location_t loc = gimple_location (teams_stmt);
8580 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8581 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8582 gimple_set_location (call, loc);
8583 gimple_seq_add_stmt (&bind_body, call);
8586 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8587 gimple_omp_set_body (teams_stmt, NULL);
8588 gimple_seq_add_seq (&bind_body, olist);
8589 gimple_seq_add_seq (&bind_body, dlist);
8590 if (!gimple_omp_teams_grid_phony (teams_stmt))
8591 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8592 gimple_bind_set_body (bind, bind_body);
8594 pop_gimplify_context (bind);
8596 gimple_bind_append_vars (bind, ctx->block_vars);
8597 BLOCK_VARS (block) = ctx->block_vars;
8598 if (BLOCK_VARS (block))
8599 TREE_USED (block) = 1;
8602 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8604 static void
8605 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8607 gimple *stmt = gsi_stmt (*gsi_p);
8608 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8609 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8610 gimple_build_omp_return (false));
8614 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8615 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8616 of OMP context, but with task_shared_vars set. */
8618 static tree
8619 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8620 void *data)
8622 tree t = *tp;
8624 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8625 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8626 return t;
8628 if (task_shared_vars
8629 && DECL_P (t)
8630 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8631 return t;
8633 /* If a global variable has been privatized, TREE_CONSTANT on
8634 ADDR_EXPR might be wrong. */
8635 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8636 recompute_tree_invariant_for_addr_expr (t);
8638 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8639 return NULL_TREE;
8642 /* Data to be communicated between lower_omp_regimplify_operands and
8643 lower_omp_regimplify_operands_p. */
8645 struct lower_omp_regimplify_operands_data
8647 omp_context *ctx;
8648 vec<tree> *decls;
8651 /* Helper function for lower_omp_regimplify_operands. Find
8652 omp_member_access_dummy_var vars and adjust temporarily their
8653 DECL_VALUE_EXPRs if needed. */
8655 static tree
8656 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8657 void *data)
8659 tree t = omp_member_access_dummy_var (*tp);
8660 if (t)
8662 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8663 lower_omp_regimplify_operands_data *ldata
8664 = (lower_omp_regimplify_operands_data *) wi->info;
8665 tree o = maybe_lookup_decl (t, ldata->ctx);
8666 if (o != t)
8668 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8669 ldata->decls->safe_push (*tp);
8670 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8671 SET_DECL_VALUE_EXPR (*tp, v);
8674 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8675 return NULL_TREE;
8678 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8679 of omp_member_access_dummy_var vars during regimplification. */
8681 static void
8682 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8683 gimple_stmt_iterator *gsi_p)
8685 auto_vec<tree, 10> decls;
8686 if (ctx)
8688 struct walk_stmt_info wi;
8689 memset (&wi, '\0', sizeof (wi));
8690 struct lower_omp_regimplify_operands_data data;
8691 data.ctx = ctx;
8692 data.decls = &decls;
8693 wi.info = &data;
8694 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8696 gimple_regimplify_operands (stmt, gsi_p);
8697 while (!decls.is_empty ())
8699 tree t = decls.pop ();
8700 tree v = decls.pop ();
8701 SET_DECL_VALUE_EXPR (t, v);
8705 static void
8706 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8708 gimple *stmt = gsi_stmt (*gsi_p);
8709 struct walk_stmt_info wi;
8710 gcall *call_stmt;
8712 if (gimple_has_location (stmt))
8713 input_location = gimple_location (stmt);
8715 if (task_shared_vars)
8716 memset (&wi, '\0', sizeof (wi));
8718 /* If we have issued syntax errors, avoid doing any heavy lifting.
8719 Just replace the OMP directives with a NOP to avoid
8720 confusing RTL expansion. */
8721 if (seen_error () && is_gimple_omp (stmt))
8723 gsi_replace (gsi_p, gimple_build_nop (), true);
8724 return;
8727 switch (gimple_code (stmt))
8729 case GIMPLE_COND:
8731 gcond *cond_stmt = as_a <gcond *> (stmt);
8732 if ((ctx || task_shared_vars)
8733 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8734 lower_omp_regimplify_p,
8735 ctx ? NULL : &wi, NULL)
8736 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8737 lower_omp_regimplify_p,
8738 ctx ? NULL : &wi, NULL)))
8739 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8741 break;
8742 case GIMPLE_CATCH:
8743 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8744 break;
8745 case GIMPLE_EH_FILTER:
8746 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8747 break;
8748 case GIMPLE_TRY:
8749 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8750 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8751 break;
8752 case GIMPLE_TRANSACTION:
8753 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8754 ctx);
8755 break;
8756 case GIMPLE_BIND:
8757 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8758 break;
8759 case GIMPLE_OMP_PARALLEL:
8760 case GIMPLE_OMP_TASK:
8761 ctx = maybe_lookup_ctx (stmt);
8762 gcc_assert (ctx);
8763 if (ctx->cancellable)
8764 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8765 lower_omp_taskreg (gsi_p, ctx);
8766 break;
8767 case GIMPLE_OMP_FOR:
8768 ctx = maybe_lookup_ctx (stmt);
8769 gcc_assert (ctx);
8770 if (ctx->cancellable)
8771 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8772 lower_omp_for (gsi_p, ctx);
8773 break;
8774 case GIMPLE_OMP_SECTIONS:
8775 ctx = maybe_lookup_ctx (stmt);
8776 gcc_assert (ctx);
8777 if (ctx->cancellable)
8778 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8779 lower_omp_sections (gsi_p, ctx);
8780 break;
8781 case GIMPLE_OMP_SINGLE:
8782 ctx = maybe_lookup_ctx (stmt);
8783 gcc_assert (ctx);
8784 lower_omp_single (gsi_p, ctx);
8785 break;
8786 case GIMPLE_OMP_MASTER:
8787 ctx = maybe_lookup_ctx (stmt);
8788 gcc_assert (ctx);
8789 lower_omp_master (gsi_p, ctx);
8790 break;
8791 case GIMPLE_OMP_TASKGROUP:
8792 ctx = maybe_lookup_ctx (stmt);
8793 gcc_assert (ctx);
8794 lower_omp_taskgroup (gsi_p, ctx);
8795 break;
8796 case GIMPLE_OMP_ORDERED:
8797 ctx = maybe_lookup_ctx (stmt);
8798 gcc_assert (ctx);
8799 lower_omp_ordered (gsi_p, ctx);
8800 break;
8801 case GIMPLE_OMP_CRITICAL:
8802 ctx = maybe_lookup_ctx (stmt);
8803 gcc_assert (ctx);
8804 lower_omp_critical (gsi_p, ctx);
8805 break;
8806 case GIMPLE_OMP_ATOMIC_LOAD:
8807 if ((ctx || task_shared_vars)
8808 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8809 as_a <gomp_atomic_load *> (stmt)),
8810 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8811 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8812 break;
8813 case GIMPLE_OMP_TARGET:
8814 ctx = maybe_lookup_ctx (stmt);
8815 gcc_assert (ctx);
8816 lower_omp_target (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_TEAMS:
8819 ctx = maybe_lookup_ctx (stmt);
8820 gcc_assert (ctx);
8821 lower_omp_teams (gsi_p, ctx);
8822 break;
8823 case GIMPLE_OMP_GRID_BODY:
8824 ctx = maybe_lookup_ctx (stmt);
8825 gcc_assert (ctx);
8826 lower_omp_grid_body (gsi_p, ctx);
8827 break;
8828 case GIMPLE_CALL:
8829 tree fndecl;
8830 call_stmt = as_a <gcall *> (stmt);
8831 fndecl = gimple_call_fndecl (call_stmt);
8832 if (fndecl
8833 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8834 switch (DECL_FUNCTION_CODE (fndecl))
8836 case BUILT_IN_GOMP_BARRIER:
8837 if (ctx == NULL)
8838 break;
8839 /* FALLTHRU */
8840 case BUILT_IN_GOMP_CANCEL:
8841 case BUILT_IN_GOMP_CANCELLATION_POINT:
8842 omp_context *cctx;
8843 cctx = ctx;
8844 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8845 cctx = cctx->outer;
8846 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8847 if (!cctx->cancellable)
8849 if (DECL_FUNCTION_CODE (fndecl)
8850 == BUILT_IN_GOMP_CANCELLATION_POINT)
8852 stmt = gimple_build_nop ();
8853 gsi_replace (gsi_p, stmt, false);
8855 break;
8857 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8859 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8860 gimple_call_set_fndecl (call_stmt, fndecl);
8861 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8863 tree lhs;
8864 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8865 gimple_call_set_lhs (call_stmt, lhs);
8866 tree fallthru_label;
8867 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8868 gimple *g;
8869 g = gimple_build_label (fallthru_label);
8870 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8871 g = gimple_build_cond (NE_EXPR, lhs,
8872 fold_convert (TREE_TYPE (lhs),
8873 boolean_false_node),
8874 cctx->cancel_label, fallthru_label);
8875 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8876 break;
8877 default:
8878 break;
8880 /* FALLTHRU */
8881 default:
8882 if ((ctx || task_shared_vars)
8883 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8884 ctx ? NULL : &wi))
8886 /* Just remove clobbers, this should happen only if we have
8887 "privatized" local addressable variables in SIMD regions,
8888 the clobber isn't needed in that case and gimplifying address
8889 of the ARRAY_REF into a pointer and creating MEM_REF based
8890 clobber would create worse code than we get with the clobber
8891 dropped. */
8892 if (gimple_clobber_p (stmt))
8894 gsi_replace (gsi_p, gimple_build_nop (), true);
8895 break;
8897 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8899 break;
8903 static void
8904 lower_omp (gimple_seq *body, omp_context *ctx)
8906 location_t saved_location = input_location;
8907 gimple_stmt_iterator gsi;
8908 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8909 lower_omp_1 (&gsi, ctx);
8910 /* During gimplification, we haven't folded statments inside offloading
8911 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8912 if (target_nesting_level || taskreg_nesting_level)
8913 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8914 fold_stmt (&gsi);
8915 input_location = saved_location;
8918 /* Main entry point. */
8920 static unsigned int
8921 execute_lower_omp (void)
8923 gimple_seq body;
8924 int i;
8925 omp_context *ctx;
8927 /* This pass always runs, to provide PROP_gimple_lomp.
8928 But often, there is nothing to do. */
8929 if (flag_openacc == 0 && flag_openmp == 0
8930 && flag_openmp_simd == 0)
8931 return 0;
8933 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8934 delete_omp_context);
8936 body = gimple_body (current_function_decl);
8938 if (hsa_gen_requested_p ())
8939 omp_grid_gridify_all_targets (&body);
8941 scan_omp (&body, NULL);
8942 gcc_assert (taskreg_nesting_level == 0);
8943 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8944 finish_taskreg_scan (ctx);
8945 taskreg_contexts.release ();
8947 if (all_contexts->root)
8949 if (task_shared_vars)
8950 push_gimplify_context ();
8951 lower_omp (&body, NULL);
8952 if (task_shared_vars)
8953 pop_gimplify_context (NULL);
8956 if (all_contexts)
8958 splay_tree_delete (all_contexts);
8959 all_contexts = NULL;
8961 BITMAP_FREE (task_shared_vars);
8962 return 0;
8965 namespace {
8967 const pass_data pass_data_lower_omp =
8969 GIMPLE_PASS, /* type */
8970 "omplower", /* name */
8971 OPTGROUP_OMP, /* optinfo_flags */
8972 TV_NONE, /* tv_id */
8973 PROP_gimple_any, /* properties_required */
8974 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8975 0, /* properties_destroyed */
8976 0, /* todo_flags_start */
8977 0, /* todo_flags_finish */
8980 class pass_lower_omp : public gimple_opt_pass
8982 public:
8983 pass_lower_omp (gcc::context *ctxt)
8984 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8987 /* opt_pass methods: */
8988 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8990 }; // class pass_lower_omp
8992 } // anon namespace
8994 gimple_opt_pass *
8995 make_pass_lower_omp (gcc::context *ctxt)
8997 return new pass_lower_omp (ctxt);
9000 /* The following is a utility to diagnose structured block violations.
9001 It is not part of the "omplower" pass, as that's invoked too late. It
9002 should be invoked by the respective front ends after gimplification. */
9004 static splay_tree all_labels;
9006 /* Check for mismatched contexts and generate an error if needed. Return
9007 true if an error is detected. */
9009 static bool
9010 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9011 gimple *branch_ctx, gimple *label_ctx)
9013 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9014 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9016 if (label_ctx == branch_ctx)
9017 return false;
9019 const char* kind = NULL;
9021 if (flag_openacc)
9023 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9024 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9026 gcc_checking_assert (kind == NULL);
9027 kind = "OpenACC";
9030 if (kind == NULL)
9032 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9033 kind = "OpenMP";
9036 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9037 so we could traverse it and issue a correct "exit" or "enter" error
9038 message upon a structured block violation.
9040 We built the context by building a list with tree_cons'ing, but there is
9041 no easy counterpart in gimple tuples. It seems like far too much work
9042 for issuing exit/enter error messages. If someone really misses the
9043 distinct error message... patches welcome. */
9045 #if 0
9046 /* Try to avoid confusing the user by producing and error message
9047 with correct "exit" or "enter" verbiage. We prefer "exit"
9048 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9049 if (branch_ctx == NULL)
9050 exit_p = false;
9051 else
9053 while (label_ctx)
9055 if (TREE_VALUE (label_ctx) == branch_ctx)
9057 exit_p = false;
9058 break;
9060 label_ctx = TREE_CHAIN (label_ctx);
9064 if (exit_p)
9065 error ("invalid exit from %s structured block", kind);
9066 else
9067 error ("invalid entry to %s structured block", kind);
9068 #endif
9070 /* If it's obvious we have an invalid entry, be specific about the error. */
9071 if (branch_ctx == NULL)
9072 error ("invalid entry to %s structured block", kind);
9073 else
9075 /* Otherwise, be vague and lazy, but efficient. */
9076 error ("invalid branch to/from %s structured block", kind);
9079 gsi_replace (gsi_p, gimple_build_nop (), false);
9080 return true;
9083 /* Pass 1: Create a minimal tree of structured blocks, and record
9084 where each label is found. */
9086 static tree
9087 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9088 struct walk_stmt_info *wi)
9090 gimple *context = (gimple *) wi->info;
9091 gimple *inner_context;
9092 gimple *stmt = gsi_stmt (*gsi_p);
9094 *handled_ops_p = true;
9096 switch (gimple_code (stmt))
9098 WALK_SUBSTMTS;
9100 case GIMPLE_OMP_PARALLEL:
9101 case GIMPLE_OMP_TASK:
9102 case GIMPLE_OMP_SECTIONS:
9103 case GIMPLE_OMP_SINGLE:
9104 case GIMPLE_OMP_SECTION:
9105 case GIMPLE_OMP_MASTER:
9106 case GIMPLE_OMP_ORDERED:
9107 case GIMPLE_OMP_CRITICAL:
9108 case GIMPLE_OMP_TARGET:
9109 case GIMPLE_OMP_TEAMS:
9110 case GIMPLE_OMP_TASKGROUP:
9111 /* The minimal context here is just the current OMP construct. */
9112 inner_context = stmt;
9113 wi->info = inner_context;
9114 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9115 wi->info = context;
9116 break;
9118 case GIMPLE_OMP_FOR:
9119 inner_context = stmt;
9120 wi->info = inner_context;
9121 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9122 walk them. */
9123 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9124 diagnose_sb_1, NULL, wi);
9125 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9126 wi->info = context;
9127 break;
9129 case GIMPLE_LABEL:
9130 splay_tree_insert (all_labels,
9131 (splay_tree_key) gimple_label_label (
9132 as_a <glabel *> (stmt)),
9133 (splay_tree_value) context);
9134 break;
9136 default:
9137 break;
9140 return NULL_TREE;
9143 /* Pass 2: Check each branch and see if its context differs from that of
9144 the destination label's context. */
9146 static tree
9147 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9148 struct walk_stmt_info *wi)
9150 gimple *context = (gimple *) wi->info;
9151 splay_tree_node n;
9152 gimple *stmt = gsi_stmt (*gsi_p);
9154 *handled_ops_p = true;
9156 switch (gimple_code (stmt))
9158 WALK_SUBSTMTS;
9160 case GIMPLE_OMP_PARALLEL:
9161 case GIMPLE_OMP_TASK:
9162 case GIMPLE_OMP_SECTIONS:
9163 case GIMPLE_OMP_SINGLE:
9164 case GIMPLE_OMP_SECTION:
9165 case GIMPLE_OMP_MASTER:
9166 case GIMPLE_OMP_ORDERED:
9167 case GIMPLE_OMP_CRITICAL:
9168 case GIMPLE_OMP_TARGET:
9169 case GIMPLE_OMP_TEAMS:
9170 case GIMPLE_OMP_TASKGROUP:
9171 wi->info = stmt;
9172 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9173 wi->info = context;
9174 break;
9176 case GIMPLE_OMP_FOR:
9177 wi->info = stmt;
9178 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9179 walk them. */
9180 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9181 diagnose_sb_2, NULL, wi);
9182 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9183 wi->info = context;
9184 break;
9186 case GIMPLE_COND:
9188 gcond *cond_stmt = as_a <gcond *> (stmt);
9189 tree lab = gimple_cond_true_label (cond_stmt);
9190 if (lab)
9192 n = splay_tree_lookup (all_labels,
9193 (splay_tree_key) lab);
9194 diagnose_sb_0 (gsi_p, context,
9195 n ? (gimple *) n->value : NULL);
9197 lab = gimple_cond_false_label (cond_stmt);
9198 if (lab)
9200 n = splay_tree_lookup (all_labels,
9201 (splay_tree_key) lab);
9202 diagnose_sb_0 (gsi_p, context,
9203 n ? (gimple *) n->value : NULL);
9206 break;
9208 case GIMPLE_GOTO:
9210 tree lab = gimple_goto_dest (stmt);
9211 if (TREE_CODE (lab) != LABEL_DECL)
9212 break;
9214 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9215 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9217 break;
9219 case GIMPLE_SWITCH:
9221 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9222 unsigned int i;
9223 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9225 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9226 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9227 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9228 break;
9231 break;
9233 case GIMPLE_RETURN:
9234 diagnose_sb_0 (gsi_p, context, NULL);
9235 break;
9237 default:
9238 break;
9241 return NULL_TREE;
9244 static unsigned int
9245 diagnose_omp_structured_block_errors (void)
9247 struct walk_stmt_info wi;
9248 gimple_seq body = gimple_body (current_function_decl);
9250 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9252 memset (&wi, 0, sizeof (wi));
9253 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9255 memset (&wi, 0, sizeof (wi));
9256 wi.want_locations = true;
9257 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9259 gimple_set_body (current_function_decl, body);
9261 splay_tree_delete (all_labels);
9262 all_labels = NULL;
9264 return 0;
9267 namespace {
9269 const pass_data pass_data_diagnose_omp_blocks =
9271 GIMPLE_PASS, /* type */
9272 "*diagnose_omp_blocks", /* name */
9273 OPTGROUP_OMP, /* optinfo_flags */
9274 TV_NONE, /* tv_id */
9275 PROP_gimple_any, /* properties_required */
9276 0, /* properties_provided */
9277 0, /* properties_destroyed */
9278 0, /* todo_flags_start */
9279 0, /* todo_flags_finish */
9282 class pass_diagnose_omp_blocks : public gimple_opt_pass
9284 public:
9285 pass_diagnose_omp_blocks (gcc::context *ctxt)
9286 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9289 /* opt_pass methods: */
9290 virtual bool gate (function *)
9292 return flag_openacc || flag_openmp || flag_openmp_simd;
9294 virtual unsigned int execute (function *)
9296 return diagnose_omp_structured_block_errors ();
9299 }; // class pass_diagnose_omp_blocks
9301 } // anon namespace
9303 gimple_opt_pass *
9304 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9306 return new pass_diagnose_omp_blocks (ctxt);
9310 #include "gt-omp-low.h"