poly_int: current_vector_size and TARGET_AUTOVECTORIZE_VECTOR_SIZES
[official-gcc.git] / gcc / omp-low.c
blobefbdd0519a4e0b4e580f3de3153adeff1856f02b
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 break;
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1201 && varpool_node::get_create (decl)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl)))
1204 break;
1205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1206 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx->stmt)
1211 && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1213 break;
1215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1220 if (TREE_CODE (decl) == COMPONENT_REF
1221 || (TREE_CODE (decl) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 == REFERENCE_TYPE)))
1225 break;
1226 if (DECL_SIZE (decl)
1227 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1229 tree decl2 = DECL_VALUE_EXPR (decl);
1230 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 decl2 = TREE_OPERAND (decl2, 0);
1232 gcc_assert (DECL_P (decl2));
1233 install_var_local (decl2, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 if (DECL_P (decl))
1240 if (DECL_SIZE (decl)
1241 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1243 tree decl2 = DECL_VALUE_EXPR (decl);
1244 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 decl2 = TREE_OPERAND (decl2, 0);
1246 gcc_assert (DECL_P (decl2));
1247 install_var_field (decl2, true, 3, ctx);
1248 install_var_local (decl2, ctx);
1249 install_var_local (decl, ctx);
1251 else
1253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 7, ctx);
1258 else
1259 install_var_field (decl, true, 3, ctx,
1260 base_pointers_restrict);
1261 if (is_gimple_omp_offloaded (ctx->stmt)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1263 install_var_local (decl, ctx);
1266 else
1268 tree base = get_base_address (decl);
1269 tree nc = OMP_CLAUSE_CHAIN (c);
1270 if (DECL_P (base)
1271 && nc != NULL_TREE
1272 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc) == base
1274 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1280 else
1282 if (ctx->outer)
1284 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 decl = OMP_CLAUSE_DECL (c);
1287 gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 (splay_tree_key) decl));
1289 tree field
1290 = build_decl (OMP_CLAUSE_LOCATION (c),
1291 FIELD_DECL, NULL_TREE, ptr_type_node);
1292 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1293 insert_field_into_struct (ctx->record_type, field);
1294 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 (splay_tree_value) field);
1298 break;
1300 case OMP_CLAUSE__GRIDDIM_:
1301 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1306 break;
1308 case OMP_CLAUSE_NOWAIT:
1309 case OMP_CLAUSE_ORDERED:
1310 case OMP_CLAUSE_COLLAPSE:
1311 case OMP_CLAUSE_UNTIED:
1312 case OMP_CLAUSE_MERGEABLE:
1313 case OMP_CLAUSE_PROC_BIND:
1314 case OMP_CLAUSE_SAFELEN:
1315 case OMP_CLAUSE_SIMDLEN:
1316 case OMP_CLAUSE_THREADS:
1317 case OMP_CLAUSE_SIMD:
1318 case OMP_CLAUSE_NOGROUP:
1319 case OMP_CLAUSE_DEFAULTMAP:
1320 case OMP_CLAUSE_ASYNC:
1321 case OMP_CLAUSE_WAIT:
1322 case OMP_CLAUSE_GANG:
1323 case OMP_CLAUSE_WORKER:
1324 case OMP_CLAUSE_VECTOR:
1325 case OMP_CLAUSE_INDEPENDENT:
1326 case OMP_CLAUSE_AUTO:
1327 case OMP_CLAUSE_SEQ:
1328 case OMP_CLAUSE_TILE:
1329 case OMP_CLAUSE__SIMT_:
1330 case OMP_CLAUSE_DEFAULT:
1331 break;
1333 case OMP_CLAUSE_ALIGNED:
1334 decl = OMP_CLAUSE_DECL (c);
1335 if (is_global_var (decl)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_local (decl, ctx);
1338 break;
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE_ASYNC:
1489 case OMP_CLAUSE_WAIT:
1490 case OMP_CLAUSE_NUM_GANGS:
1491 case OMP_CLAUSE_NUM_WORKERS:
1492 case OMP_CLAUSE_VECTOR_LENGTH:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
1499 case OMP_CLAUSE_TILE:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE__CACHE_:
1505 default:
1506 gcc_unreachable ();
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx->stmt));
1512 if (scan_array_reductions)
1514 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1521 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1530 /* Create a new name for omp child function. Returns an identifier. */
1532 static tree
1533 create_omp_child_function_name (bool task_copy)
1535 return clone_function_name (current_function_decl,
1536 task_copy ? "_omp_cpyfn" : "_omp_fn");
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1542 static bool
1543 omp_maybe_offloaded_ctx (omp_context *ctx)
1545 if (cgraph_node::get (current_function_decl)->offloadable)
1546 return true;
1547 for (; ctx; ctx = ctx->outer)
1548 if (is_gimple_omp_offloaded (ctx->stmt))
1549 return true;
1550 return false;
1553 /* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1556 static void
1557 create_omp_child_function (omp_context *ctx, bool task_copy)
1559 tree decl, type, name, t;
1561 name = create_omp_child_function_name (task_copy);
1562 if (task_copy)
1563 type = build_function_type_list (void_type_node, ptr_type_node,
1564 ptr_type_node, NULL_TREE);
1565 else
1566 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1568 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 || !task_copy);
1572 if (!task_copy)
1573 ctx->cb.dst_fn = decl;
1574 else
1575 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1577 TREE_STATIC (decl) = 1;
1578 TREE_USED (decl) = 1;
1579 DECL_ARTIFICIAL (decl) = 1;
1580 DECL_IGNORED_P (decl) = 0;
1581 TREE_PUBLIC (decl) = 0;
1582 DECL_UNINLINABLE (decl) = 1;
1583 DECL_EXTERNAL (decl) = 0;
1584 DECL_CONTEXT (decl) = NULL_TREE;
1585 DECL_INITIAL (decl) = make_node (BLOCK);
1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1587 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1588 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1589 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1590 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1591 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1592 DECL_FUNCTION_VERSIONED (decl)
1593 = DECL_FUNCTION_VERSIONED (current_function_decl);
1595 if (omp_maybe_offloaded_ctx (ctx))
1597 cgraph_node::get_create (decl)->offloadable = 1;
1598 if (ENABLE_OFFLOADING)
1599 g->have_offload = true;
1602 if (cgraph_node::get_create (decl)->offloadable
1603 && !lookup_attribute ("omp declare target",
1604 DECL_ATTRIBUTES (current_function_decl)))
1606 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1607 ? "omp target entrypoint"
1608 : "omp declare target");
1609 DECL_ATTRIBUTES (decl)
1610 = tree_cons (get_identifier (target_attr),
1611 NULL_TREE, DECL_ATTRIBUTES (decl));
1614 t = build_decl (DECL_SOURCE_LOCATION (decl),
1615 RESULT_DECL, NULL_TREE, void_type_node);
1616 DECL_ARTIFICIAL (t) = 1;
1617 DECL_IGNORED_P (t) = 1;
1618 DECL_CONTEXT (t) = decl;
1619 DECL_RESULT (decl) = t;
1621 tree data_name = get_identifier (".omp_data_i");
1622 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1623 ptr_type_node);
1624 DECL_ARTIFICIAL (t) = 1;
1625 DECL_NAMELESS (t) = 1;
1626 DECL_ARG_TYPE (t) = ptr_type_node;
1627 DECL_CONTEXT (t) = current_function_decl;
1628 TREE_USED (t) = 1;
1629 TREE_READONLY (t) = 1;
1630 DECL_ARGUMENTS (decl) = t;
1631 if (!task_copy)
1632 ctx->receiver_decl = t;
1633 else
1635 t = build_decl (DECL_SOURCE_LOCATION (decl),
1636 PARM_DECL, get_identifier (".omp_data_o"),
1637 ptr_type_node);
1638 DECL_ARTIFICIAL (t) = 1;
1639 DECL_NAMELESS (t) = 1;
1640 DECL_ARG_TYPE (t) = ptr_type_node;
1641 DECL_CONTEXT (t) = current_function_decl;
1642 TREE_USED (t) = 1;
1643 TREE_ADDRESSABLE (t) = 1;
1644 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1645 DECL_ARGUMENTS (decl) = t;
1648 /* Allocate memory for the function structure. The call to
1649 allocate_struct_function clobbers CFUN, so we need to restore
1650 it afterward. */
1651 push_struct_function (decl);
1652 cfun->function_end_locus = gimple_location (ctx->stmt);
1653 init_tree_ssa (cfun);
1654 pop_cfun ();
1657 /* Callback for walk_gimple_seq. Check if combined parallel
1658 contains gimple_omp_for_combined_into_p OMP_FOR. */
1660 tree
1661 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1662 bool *handled_ops_p,
1663 struct walk_stmt_info *wi)
1665 gimple *stmt = gsi_stmt (*gsi_p);
1667 *handled_ops_p = true;
1668 switch (gimple_code (stmt))
1670 WALK_SUBSTMTS;
1672 case GIMPLE_OMP_FOR:
1673 if (gimple_omp_for_combined_into_p (stmt)
1674 && gimple_omp_for_kind (stmt)
1675 == *(const enum gf_mask *) (wi->info))
1677 wi->info = stmt;
1678 return integer_zero_node;
1680 break;
1681 default:
1682 break;
1684 return NULL;
1687 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1689 static void
1690 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1691 omp_context *outer_ctx)
1693 struct walk_stmt_info wi;
1695 memset (&wi, 0, sizeof (wi));
1696 wi.val_only = true;
1697 wi.info = (void *) &msk;
1698 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1699 if (wi.info != (void *) &msk)
1701 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1702 struct omp_for_data fd;
1703 omp_extract_for_data (for_stmt, &fd, NULL);
1704 /* We need two temporaries with fd.loop.v type (istart/iend)
1705 and then (fd.collapse - 1) temporaries with the same
1706 type for count2 ... countN-1 vars if not constant. */
1707 size_t count = 2, i;
1708 tree type = fd.iter_type;
1709 if (fd.collapse > 1
1710 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1712 count += fd.collapse - 1;
1713 /* If there are lastprivate clauses on the inner
1714 GIMPLE_OMP_FOR, add one more temporaries for the total number
1715 of iterations (product of count1 ... countN-1). */
1716 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1717 OMP_CLAUSE_LASTPRIVATE))
1718 count++;
1719 else if (msk == GF_OMP_FOR_KIND_FOR
1720 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1721 OMP_CLAUSE_LASTPRIVATE))
1722 count++;
1724 for (i = 0; i < count; i++)
1726 tree temp = create_tmp_var (type);
1727 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1728 insert_decl_map (&outer_ctx->cb, temp, temp);
1729 OMP_CLAUSE_DECL (c) = temp;
1730 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1731 gimple_omp_taskreg_set_clauses (stmt, c);
1736 /* Scan an OpenMP parallel directive. */
1738 static void
1739 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1741 omp_context *ctx;
1742 tree name;
1743 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1745 /* Ignore parallel directives with empty bodies, unless there
1746 are copyin clauses. */
1747 if (optimize > 0
1748 && empty_body_p (gimple_omp_body (stmt))
1749 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1750 OMP_CLAUSE_COPYIN) == NULL)
1752 gsi_replace (gsi, gimple_build_nop (), false);
1753 return;
1756 if (gimple_omp_parallel_combined_p (stmt))
1757 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1759 ctx = new_omp_context (stmt, outer_ctx);
1760 taskreg_contexts.safe_push (ctx);
1761 if (taskreg_nesting_level > 1)
1762 ctx->is_nested = true;
1763 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1764 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1765 name = create_tmp_var_name (".omp_data_s");
1766 name = build_decl (gimple_location (stmt),
1767 TYPE_DECL, name, ctx->record_type);
1768 DECL_ARTIFICIAL (name) = 1;
1769 DECL_NAMELESS (name) = 1;
1770 TYPE_NAME (ctx->record_type) = name;
1771 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1772 if (!gimple_omp_parallel_grid_phony (stmt))
1774 create_omp_child_function (ctx, false);
1775 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1778 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1779 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1781 if (TYPE_FIELDS (ctx->record_type) == NULL)
1782 ctx->record_type = ctx->receiver_decl = NULL;
1785 /* Scan an OpenMP task directive. */
1787 static void
1788 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1790 omp_context *ctx;
1791 tree name, t;
1792 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1794 /* Ignore task directives with empty bodies, unless they have depend
1795 clause. */
1796 if (optimize > 0
1797 && empty_body_p (gimple_omp_body (stmt))
1798 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1800 gsi_replace (gsi, gimple_build_nop (), false);
1801 return;
1804 if (gimple_omp_task_taskloop_p (stmt))
1805 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1807 ctx = new_omp_context (stmt, outer_ctx);
1808 taskreg_contexts.safe_push (ctx);
1809 if (taskreg_nesting_level > 1)
1810 ctx->is_nested = true;
1811 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1812 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1813 name = create_tmp_var_name (".omp_data_s");
1814 name = build_decl (gimple_location (stmt),
1815 TYPE_DECL, name, ctx->record_type);
1816 DECL_ARTIFICIAL (name) = 1;
1817 DECL_NAMELESS (name) = 1;
1818 TYPE_NAME (ctx->record_type) = name;
1819 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1820 create_omp_child_function (ctx, false);
1821 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1823 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1825 if (ctx->srecord_type)
1827 name = create_tmp_var_name (".omp_data_a");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->srecord_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->srecord_type) = name;
1833 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1834 create_omp_child_function (ctx, true);
1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1841 ctx->record_type = ctx->receiver_decl = NULL;
1842 t = build_int_cst (long_integer_type_node, 0);
1843 gimple_omp_task_set_arg_size (stmt, t);
1844 t = build_int_cst (long_integer_type_node, 1);
1845 gimple_omp_task_set_arg_align (stmt, t);
1849 /* Helper function for finish_taskreg_scan, called through walk_tree.
1850 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1851 tree, replace it in the expression. */
1853 static tree
1854 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1856 if (VAR_P (*tp))
1858 omp_context *ctx = (omp_context *) data;
1859 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1860 if (t != *tp)
1862 if (DECL_HAS_VALUE_EXPR_P (t))
1863 t = unshare_expr (DECL_VALUE_EXPR (t));
1864 *tp = t;
1866 *walk_subtrees = 0;
1868 else if (IS_TYPE_OR_DECL_P (*tp))
1869 *walk_subtrees = 0;
1870 return NULL_TREE;
1873 /* If any decls have been made addressable during scan_omp,
1874 adjust their fields if needed, and layout record types
1875 of parallel/task constructs. */
1877 static void
1878 finish_taskreg_scan (omp_context *ctx)
1880 if (ctx->record_type == NULL_TREE)
1881 return;
1883 /* If any task_shared_vars were needed, verify all
1884 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1885 statements if use_pointer_for_field hasn't changed
1886 because of that. If it did, update field types now. */
1887 if (task_shared_vars)
1889 tree c;
1891 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1892 c; c = OMP_CLAUSE_CHAIN (c))
1893 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1894 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1896 tree decl = OMP_CLAUSE_DECL (c);
1898 /* Global variables don't need to be copied,
1899 the receiver side will use them directly. */
1900 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1901 continue;
1902 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1903 || !use_pointer_for_field (decl, ctx))
1904 continue;
1905 tree field = lookup_field (decl, ctx);
1906 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1907 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1908 continue;
1909 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1910 TREE_THIS_VOLATILE (field) = 0;
1911 DECL_USER_ALIGN (field) = 0;
1912 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1913 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1914 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1915 if (ctx->srecord_type)
1917 tree sfield = lookup_sfield (decl, ctx);
1918 TREE_TYPE (sfield) = TREE_TYPE (field);
1919 TREE_THIS_VOLATILE (sfield) = 0;
1920 DECL_USER_ALIGN (sfield) = 0;
1921 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1922 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1923 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1928 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1930 layout_type (ctx->record_type);
1931 fixup_child_record_type (ctx);
1933 else
1935 location_t loc = gimple_location (ctx->stmt);
1936 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1937 /* Move VLA fields to the end. */
1938 p = &TYPE_FIELDS (ctx->record_type);
1939 while (*p)
1940 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1941 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1943 *q = *p;
1944 *p = TREE_CHAIN (*p);
1945 TREE_CHAIN (*q) = NULL_TREE;
1946 q = &TREE_CHAIN (*q);
1948 else
1949 p = &DECL_CHAIN (*p);
1950 *p = vla_fields;
1951 if (gimple_omp_task_taskloop_p (ctx->stmt))
1953 /* Move fields corresponding to first and second _looptemp_
1954 clause first. There are filled by GOMP_taskloop
1955 and thus need to be in specific positions. */
1956 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1957 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1958 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1959 OMP_CLAUSE__LOOPTEMP_);
1960 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1961 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1962 p = &TYPE_FIELDS (ctx->record_type);
1963 while (*p)
1964 if (*p == f1 || *p == f2)
1965 *p = DECL_CHAIN (*p);
1966 else
1967 p = &DECL_CHAIN (*p);
1968 DECL_CHAIN (f1) = f2;
1969 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1970 TYPE_FIELDS (ctx->record_type) = f1;
1971 if (ctx->srecord_type)
1973 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1974 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1975 p = &TYPE_FIELDS (ctx->srecord_type);
1976 while (*p)
1977 if (*p == f1 || *p == f2)
1978 *p = DECL_CHAIN (*p);
1979 else
1980 p = &DECL_CHAIN (*p);
1981 DECL_CHAIN (f1) = f2;
1982 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1983 TYPE_FIELDS (ctx->srecord_type) = f1;
1986 layout_type (ctx->record_type);
1987 fixup_child_record_type (ctx);
1988 if (ctx->srecord_type)
1989 layout_type (ctx->srecord_type);
1990 tree t = fold_convert_loc (loc, long_integer_type_node,
1991 TYPE_SIZE_UNIT (ctx->record_type));
1992 if (TREE_CODE (t) != INTEGER_CST)
1994 t = unshare_expr (t);
1995 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
1997 gimple_omp_task_set_arg_size (ctx->stmt, t);
1998 t = build_int_cst (long_integer_type_node,
1999 TYPE_ALIGN_UNIT (ctx->record_type));
2000 gimple_omp_task_set_arg_align (ctx->stmt, t);
2004 /* Find the enclosing offload context. */
2006 static omp_context *
2007 enclosing_target_ctx (omp_context *ctx)
2009 for (; ctx; ctx = ctx->outer)
2010 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2011 break;
2013 return ctx;
2016 /* Return true if ctx is part of an oacc kernels region. */
2018 static bool
2019 ctx_in_oacc_kernels_region (omp_context *ctx)
2021 for (;ctx != NULL; ctx = ctx->outer)
2023 gimple *stmt = ctx->stmt;
2024 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2025 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2026 return true;
2029 return false;
2032 /* Check the parallelism clauses inside a kernels regions.
2033 Until kernels handling moves to use the same loop indirection
2034 scheme as parallel, we need to do this checking early. */
2036 static unsigned
2037 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2039 bool checking = true;
2040 unsigned outer_mask = 0;
2041 unsigned this_mask = 0;
2042 bool has_seq = false, has_auto = false;
2044 if (ctx->outer)
2045 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2046 if (!stmt)
2048 checking = false;
2049 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2050 return outer_mask;
2051 stmt = as_a <gomp_for *> (ctx->stmt);
2054 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2056 switch (OMP_CLAUSE_CODE (c))
2058 case OMP_CLAUSE_GANG:
2059 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2060 break;
2061 case OMP_CLAUSE_WORKER:
2062 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2063 break;
2064 case OMP_CLAUSE_VECTOR:
2065 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2066 break;
2067 case OMP_CLAUSE_SEQ:
2068 has_seq = true;
2069 break;
2070 case OMP_CLAUSE_AUTO:
2071 has_auto = true;
2072 break;
2073 default:
2074 break;
2078 if (checking)
2080 if (has_seq && (this_mask || has_auto))
2081 error_at (gimple_location (stmt), "%<seq%> overrides other"
2082 " OpenACC loop specifiers");
2083 else if (has_auto && this_mask)
2084 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2085 " OpenACC loop specifiers");
2087 if (this_mask & outer_mask)
2088 error_at (gimple_location (stmt), "inner loop uses same"
2089 " OpenACC parallelism as containing loop");
2092 return outer_mask | this_mask;
2095 /* Scan a GIMPLE_OMP_FOR. */
2097 static omp_context *
2098 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2100 omp_context *ctx;
2101 size_t i;
2102 tree clauses = gimple_omp_for_clauses (stmt);
2104 ctx = new_omp_context (stmt, outer_ctx);
2106 if (is_gimple_omp_oacc (stmt))
2108 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2110 if (!tgt || is_oacc_parallel (tgt))
2111 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2113 char const *check = NULL;
2115 switch (OMP_CLAUSE_CODE (c))
2117 case OMP_CLAUSE_GANG:
2118 check = "gang";
2119 break;
2121 case OMP_CLAUSE_WORKER:
2122 check = "worker";
2123 break;
2125 case OMP_CLAUSE_VECTOR:
2126 check = "vector";
2127 break;
2129 default:
2130 break;
2133 if (check && OMP_CLAUSE_OPERAND (c, 0))
2134 error_at (gimple_location (stmt),
2135 "argument not permitted on %qs clause in"
2136 " OpenACC %<parallel%>", check);
2139 if (tgt && is_oacc_kernels (tgt))
2141 /* Strip out reductions, as they are not handled yet. */
2142 tree *prev_ptr = &clauses;
2144 while (tree probe = *prev_ptr)
2146 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2148 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2149 *prev_ptr = *next_ptr;
2150 else
2151 prev_ptr = next_ptr;
2154 gimple_omp_for_set_clauses (stmt, clauses);
2155 check_oacc_kernel_gwv (stmt, ctx);
2159 scan_sharing_clauses (clauses, ctx);
2161 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2162 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2164 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2165 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2166 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2167 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2169 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2170 return ctx;
2173 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2175 static void
2176 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2177 omp_context *outer_ctx)
2179 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2180 gsi_replace (gsi, bind, false);
2181 gimple_seq seq = NULL;
2182 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2183 tree cond = create_tmp_var_raw (integer_type_node);
2184 DECL_CONTEXT (cond) = current_function_decl;
2185 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2186 gimple_bind_set_vars (bind, cond);
2187 gimple_call_set_lhs (g, cond);
2188 gimple_seq_add_stmt (&seq, g);
2189 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2190 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2191 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2192 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2193 gimple_seq_add_stmt (&seq, g);
2194 g = gimple_build_label (lab1);
2195 gimple_seq_add_stmt (&seq, g);
2196 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2197 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2198 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2199 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2200 gimple_omp_for_set_clauses (new_stmt, clause);
2201 gimple_seq_add_stmt (&seq, new_stmt);
2202 g = gimple_build_goto (lab3);
2203 gimple_seq_add_stmt (&seq, g);
2204 g = gimple_build_label (lab2);
2205 gimple_seq_add_stmt (&seq, g);
2206 gimple_seq_add_stmt (&seq, stmt);
2207 g = gimple_build_label (lab3);
2208 gimple_seq_add_stmt (&seq, g);
2209 gimple_bind_set_body (bind, seq);
2210 update_stmt (bind);
2211 scan_omp_for (new_stmt, outer_ctx);
2212 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2215 /* Scan an OpenMP sections directive. */
2217 static void
2218 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2220 omp_context *ctx;
2222 ctx = new_omp_context (stmt, outer_ctx);
2223 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2224 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2227 /* Scan an OpenMP single directive. */
2229 static void
2230 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2232 omp_context *ctx;
2233 tree name;
2235 ctx = new_omp_context (stmt, outer_ctx);
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_copy_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 TYPE_NAME (ctx->record_type) = name;
2243 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2244 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2246 if (TYPE_FIELDS (ctx->record_type) == NULL)
2247 ctx->record_type = NULL;
2248 else
2249 layout_type (ctx->record_type);
2252 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2253 used in the corresponding offloaded function are restrict. */
2255 static bool
2256 omp_target_base_pointers_restrict_p (tree clauses)
2258 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2259 used by OpenACC. */
2260 if (flag_openacc == 0)
2261 return false;
2263 /* I. Basic example:
2265 void foo (void)
2267 unsigned int a[2], b[2];
2269 #pragma acc kernels \
2270 copyout (a) \
2271 copyout (b)
2273 a[0] = 0;
2274 b[0] = 1;
2278 After gimplification, we have:
2280 #pragma omp target oacc_kernels \
2281 map(force_from:a [len: 8]) \
2282 map(force_from:b [len: 8])
2284 a[0] = 0;
2285 b[0] = 1;
2288 Because both mappings have the force prefix, we know that they will be
2289 allocated when calling the corresponding offloaded function, which means we
2290 can mark the base pointers for a and b in the offloaded function as
2291 restrict. */
2293 tree c;
2294 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2296 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2297 return false;
2299 switch (OMP_CLAUSE_MAP_KIND (c))
2301 case GOMP_MAP_FORCE_ALLOC:
2302 case GOMP_MAP_FORCE_TO:
2303 case GOMP_MAP_FORCE_FROM:
2304 case GOMP_MAP_FORCE_TOFROM:
2305 break;
2306 default:
2307 return false;
2311 return true;
2314 /* Scan a GIMPLE_OMP_TARGET. */
2316 static void
2317 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2319 omp_context *ctx;
2320 tree name;
2321 bool offloaded = is_gimple_omp_offloaded (stmt);
2322 tree clauses = gimple_omp_target_clauses (stmt);
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2326 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2327 name = create_tmp_var_name (".omp_data_t");
2328 name = build_decl (gimple_location (stmt),
2329 TYPE_DECL, name, ctx->record_type);
2330 DECL_ARTIFICIAL (name) = 1;
2331 DECL_NAMELESS (name) = 1;
2332 TYPE_NAME (ctx->record_type) = name;
2333 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2335 bool base_pointers_restrict = false;
2336 if (offloaded)
2338 create_omp_child_function (ctx, false);
2339 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2341 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2342 if (base_pointers_restrict
2343 && dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file,
2345 "Base pointers in offloaded function are restrict\n");
2348 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2349 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2351 if (TYPE_FIELDS (ctx->record_type) == NULL)
2352 ctx->record_type = ctx->receiver_decl = NULL;
2353 else
2355 TYPE_FIELDS (ctx->record_type)
2356 = nreverse (TYPE_FIELDS (ctx->record_type));
2357 if (flag_checking)
2359 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2360 for (tree field = TYPE_FIELDS (ctx->record_type);
2361 field;
2362 field = DECL_CHAIN (field))
2363 gcc_assert (DECL_ALIGN (field) == align);
2365 layout_type (ctx->record_type);
2366 if (offloaded)
2367 fixup_child_record_type (ctx);
2371 /* Scan an OpenMP teams directive. */
2373 static void
2374 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2376 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2377 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2381 /* Check nesting restrictions. */
2382 static bool
2383 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2385 tree c;
2387 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2388 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2389 the original copy of its contents. */
2390 return true;
2392 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2393 inside an OpenACC CTX. */
2394 if (!(is_gimple_omp (stmt)
2395 && is_gimple_omp_oacc (stmt))
2396 /* Except for atomic codes that we share with OpenMP. */
2397 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2398 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2400 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2402 error_at (gimple_location (stmt),
2403 "non-OpenACC construct inside of OpenACC routine");
2404 return false;
2406 else
2407 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2408 if (is_gimple_omp (octx->stmt)
2409 && is_gimple_omp_oacc (octx->stmt))
2411 error_at (gimple_location (stmt),
2412 "non-OpenACC construct inside of OpenACC region");
2413 return false;
2417 if (ctx != NULL)
2419 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2420 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2422 c = NULL_TREE;
2423 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2425 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2426 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2428 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2429 && (ctx->outer == NULL
2430 || !gimple_omp_for_combined_into_p (ctx->stmt)
2431 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2432 || (gimple_omp_for_kind (ctx->outer->stmt)
2433 != GF_OMP_FOR_KIND_FOR)
2434 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2436 error_at (gimple_location (stmt),
2437 "%<ordered simd threads%> must be closely "
2438 "nested inside of %<for simd%> region");
2439 return false;
2441 return true;
2444 error_at (gimple_location (stmt),
2445 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2446 " may not be nested inside %<simd%> region");
2447 return false;
2449 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2451 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2452 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2453 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2454 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2456 error_at (gimple_location (stmt),
2457 "only %<distribute%> or %<parallel%> regions are "
2458 "allowed to be strictly nested inside %<teams%> "
2459 "region");
2460 return false;
2464 switch (gimple_code (stmt))
2466 case GIMPLE_OMP_FOR:
2467 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2468 return true;
2469 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2471 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2473 error_at (gimple_location (stmt),
2474 "%<distribute%> region must be strictly nested "
2475 "inside %<teams%> construct");
2476 return false;
2478 return true;
2480 /* We split taskloop into task and nested taskloop in it. */
2481 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2482 return true;
2483 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2485 bool ok = false;
2487 if (ctx)
2488 switch (gimple_code (ctx->stmt))
2490 case GIMPLE_OMP_FOR:
2491 ok = (gimple_omp_for_kind (ctx->stmt)
2492 == GF_OMP_FOR_KIND_OACC_LOOP);
2493 break;
2495 case GIMPLE_OMP_TARGET:
2496 switch (gimple_omp_target_kind (ctx->stmt))
2498 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2499 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2500 ok = true;
2501 break;
2503 default:
2504 break;
2507 default:
2508 break;
2510 else if (oacc_get_fn_attrib (current_function_decl))
2511 ok = true;
2512 if (!ok)
2514 error_at (gimple_location (stmt),
2515 "OpenACC loop directive must be associated with"
2516 " an OpenACC compute region");
2517 return false;
2520 /* FALLTHRU */
2521 case GIMPLE_CALL:
2522 if (is_gimple_call (stmt)
2523 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2524 == BUILT_IN_GOMP_CANCEL
2525 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2526 == BUILT_IN_GOMP_CANCELLATION_POINT))
2528 const char *bad = NULL;
2529 const char *kind = NULL;
2530 const char *construct
2531 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2532 == BUILT_IN_GOMP_CANCEL)
2533 ? "#pragma omp cancel"
2534 : "#pragma omp cancellation point";
2535 if (ctx == NULL)
2537 error_at (gimple_location (stmt), "orphaned %qs construct",
2538 construct);
2539 return false;
2541 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2542 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2543 : 0)
2545 case 1:
2546 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2547 bad = "#pragma omp parallel";
2548 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 == BUILT_IN_GOMP_CANCEL
2550 && !integer_zerop (gimple_call_arg (stmt, 1)))
2551 ctx->cancellable = true;
2552 kind = "parallel";
2553 break;
2554 case 2:
2555 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2556 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2557 bad = "#pragma omp for";
2558 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 == BUILT_IN_GOMP_CANCEL
2560 && !integer_zerop (gimple_call_arg (stmt, 1)))
2562 ctx->cancellable = true;
2563 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2564 OMP_CLAUSE_NOWAIT))
2565 warning_at (gimple_location (stmt), 0,
2566 "%<#pragma omp cancel for%> inside "
2567 "%<nowait%> for construct");
2568 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2569 OMP_CLAUSE_ORDERED))
2570 warning_at (gimple_location (stmt), 0,
2571 "%<#pragma omp cancel for%> inside "
2572 "%<ordered%> for construct");
2574 kind = "for";
2575 break;
2576 case 4:
2577 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2578 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2579 bad = "#pragma omp sections";
2580 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2581 == BUILT_IN_GOMP_CANCEL
2582 && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2586 ctx->cancellable = true;
2587 if (omp_find_clause (gimple_omp_sections_clauses
2588 (ctx->stmt),
2589 OMP_CLAUSE_NOWAIT))
2590 warning_at (gimple_location (stmt), 0,
2591 "%<#pragma omp cancel sections%> inside "
2592 "%<nowait%> sections construct");
2594 else
2596 gcc_assert (ctx->outer
2597 && gimple_code (ctx->outer->stmt)
2598 == GIMPLE_OMP_SECTIONS);
2599 ctx->outer->cancellable = true;
2600 if (omp_find_clause (gimple_omp_sections_clauses
2601 (ctx->outer->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel sections%> inside "
2605 "%<nowait%> sections construct");
2608 kind = "sections";
2609 break;
2610 case 8:
2611 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2612 bad = "#pragma omp task";
2613 else
2615 for (omp_context *octx = ctx->outer;
2616 octx; octx = octx->outer)
2618 switch (gimple_code (octx->stmt))
2620 case GIMPLE_OMP_TASKGROUP:
2621 break;
2622 case GIMPLE_OMP_TARGET:
2623 if (gimple_omp_target_kind (octx->stmt)
2624 != GF_OMP_TARGET_KIND_REGION)
2625 continue;
2626 /* FALLTHRU */
2627 case GIMPLE_OMP_PARALLEL:
2628 case GIMPLE_OMP_TEAMS:
2629 error_at (gimple_location (stmt),
2630 "%<%s taskgroup%> construct not closely "
2631 "nested inside of %<taskgroup%> region",
2632 construct);
2633 return false;
2634 default:
2635 continue;
2637 break;
2639 ctx->cancellable = true;
2641 kind = "taskgroup";
2642 break;
2643 default:
2644 error_at (gimple_location (stmt), "invalid arguments");
2645 return false;
2647 if (bad)
2649 error_at (gimple_location (stmt),
2650 "%<%s %s%> construct not closely nested inside of %qs",
2651 construct, kind, bad);
2652 return false;
2655 /* FALLTHRU */
2656 case GIMPLE_OMP_SECTIONS:
2657 case GIMPLE_OMP_SINGLE:
2658 for (; ctx != NULL; ctx = ctx->outer)
2659 switch (gimple_code (ctx->stmt))
2661 case GIMPLE_OMP_FOR:
2662 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2663 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2664 break;
2665 /* FALLTHRU */
2666 case GIMPLE_OMP_SECTIONS:
2667 case GIMPLE_OMP_SINGLE:
2668 case GIMPLE_OMP_ORDERED:
2669 case GIMPLE_OMP_MASTER:
2670 case GIMPLE_OMP_TASK:
2671 case GIMPLE_OMP_CRITICAL:
2672 if (is_gimple_call (stmt))
2674 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2675 != BUILT_IN_GOMP_BARRIER)
2676 return true;
2677 error_at (gimple_location (stmt),
2678 "barrier region may not be closely nested inside "
2679 "of work-sharing, %<critical%>, %<ordered%>, "
2680 "%<master%>, explicit %<task%> or %<taskloop%> "
2681 "region");
2682 return false;
2684 error_at (gimple_location (stmt),
2685 "work-sharing region may not be closely nested inside "
2686 "of work-sharing, %<critical%>, %<ordered%>, "
2687 "%<master%>, explicit %<task%> or %<taskloop%> region");
2688 return false;
2689 case GIMPLE_OMP_PARALLEL:
2690 case GIMPLE_OMP_TEAMS:
2691 return true;
2692 case GIMPLE_OMP_TARGET:
2693 if (gimple_omp_target_kind (ctx->stmt)
2694 == GF_OMP_TARGET_KIND_REGION)
2695 return true;
2696 break;
2697 default:
2698 break;
2700 break;
2701 case GIMPLE_OMP_MASTER:
2702 for (; ctx != NULL; ctx = ctx->outer)
2703 switch (gimple_code (ctx->stmt))
2705 case GIMPLE_OMP_FOR:
2706 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2707 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2708 break;
2709 /* FALLTHRU */
2710 case GIMPLE_OMP_SECTIONS:
2711 case GIMPLE_OMP_SINGLE:
2712 case GIMPLE_OMP_TASK:
2713 error_at (gimple_location (stmt),
2714 "%<master%> region may not be closely nested inside "
2715 "of work-sharing, explicit %<task%> or %<taskloop%> "
2716 "region");
2717 return false;
2718 case GIMPLE_OMP_PARALLEL:
2719 case GIMPLE_OMP_TEAMS:
2720 return true;
2721 case GIMPLE_OMP_TARGET:
2722 if (gimple_omp_target_kind (ctx->stmt)
2723 == GF_OMP_TARGET_KIND_REGION)
2724 return true;
2725 break;
2726 default:
2727 break;
2729 break;
2730 case GIMPLE_OMP_TASK:
2731 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2733 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2734 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2736 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2737 error_at (OMP_CLAUSE_LOCATION (c),
2738 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2739 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2740 return false;
2742 break;
2743 case GIMPLE_OMP_ORDERED:
2744 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2745 c; c = OMP_CLAUSE_CHAIN (c))
2747 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2749 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2750 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2751 continue;
2753 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2755 || kind == OMP_CLAUSE_DEPEND_SINK)
2757 tree oclause;
2758 /* Look for containing ordered(N) loop. */
2759 if (ctx == NULL
2760 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2761 || (oclause
2762 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2763 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2765 error_at (OMP_CLAUSE_LOCATION (c),
2766 "%<ordered%> construct with %<depend%> clause "
2767 "must be closely nested inside an %<ordered%> "
2768 "loop");
2769 return false;
2771 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2773 error_at (OMP_CLAUSE_LOCATION (c),
2774 "%<ordered%> construct with %<depend%> clause "
2775 "must be closely nested inside a loop with "
2776 "%<ordered%> clause with a parameter");
2777 return false;
2780 else
2782 error_at (OMP_CLAUSE_LOCATION (c),
2783 "invalid depend kind in omp %<ordered%> %<depend%>");
2784 return false;
2787 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2788 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2790 /* ordered simd must be closely nested inside of simd region,
2791 and simd region must not encounter constructs other than
2792 ordered simd, therefore ordered simd may be either orphaned,
2793 or ctx->stmt must be simd. The latter case is handled already
2794 earlier. */
2795 if (ctx != NULL)
2797 error_at (gimple_location (stmt),
2798 "%<ordered%> %<simd%> must be closely nested inside "
2799 "%<simd%> region");
2800 return false;
2803 for (; ctx != NULL; ctx = ctx->outer)
2804 switch (gimple_code (ctx->stmt))
2806 case GIMPLE_OMP_CRITICAL:
2807 case GIMPLE_OMP_TASK:
2808 case GIMPLE_OMP_ORDERED:
2809 ordered_in_taskloop:
2810 error_at (gimple_location (stmt),
2811 "%<ordered%> region may not be closely nested inside "
2812 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2813 "%<taskloop%> region");
2814 return false;
2815 case GIMPLE_OMP_FOR:
2816 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2817 goto ordered_in_taskloop;
2818 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2819 OMP_CLAUSE_ORDERED) == NULL)
2821 error_at (gimple_location (stmt),
2822 "%<ordered%> region must be closely nested inside "
2823 "a loop region with an %<ordered%> clause");
2824 return false;
2826 return true;
2827 case GIMPLE_OMP_TARGET:
2828 if (gimple_omp_target_kind (ctx->stmt)
2829 != GF_OMP_TARGET_KIND_REGION)
2830 break;
2831 /* FALLTHRU */
2832 case GIMPLE_OMP_PARALLEL:
2833 case GIMPLE_OMP_TEAMS:
2834 error_at (gimple_location (stmt),
2835 "%<ordered%> region must be closely nested inside "
2836 "a loop region with an %<ordered%> clause");
2837 return false;
2838 default:
2839 break;
2841 break;
2842 case GIMPLE_OMP_CRITICAL:
2844 tree this_stmt_name
2845 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2846 for (; ctx != NULL; ctx = ctx->outer)
2847 if (gomp_critical *other_crit
2848 = dyn_cast <gomp_critical *> (ctx->stmt))
2849 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2851 error_at (gimple_location (stmt),
2852 "%<critical%> region may not be nested inside "
2853 "a %<critical%> region with the same name");
2854 return false;
2857 break;
2858 case GIMPLE_OMP_TEAMS:
2859 if (ctx == NULL
2860 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2861 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2863 error_at (gimple_location (stmt),
2864 "%<teams%> construct not closely nested inside of "
2865 "%<target%> construct");
2866 return false;
2868 break;
2869 case GIMPLE_OMP_TARGET:
2870 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2871 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2872 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2873 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2875 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2876 error_at (OMP_CLAUSE_LOCATION (c),
2877 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2878 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2879 return false;
2881 if (is_gimple_omp_offloaded (stmt)
2882 && oacc_get_fn_attrib (cfun->decl) != NULL)
2884 error_at (gimple_location (stmt),
2885 "OpenACC region inside of OpenACC routine, nested "
2886 "parallelism not supported yet");
2887 return false;
2889 for (; ctx != NULL; ctx = ctx->outer)
2891 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2893 if (is_gimple_omp (stmt)
2894 && is_gimple_omp_oacc (stmt)
2895 && is_gimple_omp (ctx->stmt))
2897 error_at (gimple_location (stmt),
2898 "OpenACC construct inside of non-OpenACC region");
2899 return false;
2901 continue;
2904 const char *stmt_name, *ctx_stmt_name;
2905 switch (gimple_omp_target_kind (stmt))
2907 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2908 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2909 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2910 case GF_OMP_TARGET_KIND_ENTER_DATA:
2911 stmt_name = "target enter data"; break;
2912 case GF_OMP_TARGET_KIND_EXIT_DATA:
2913 stmt_name = "target exit data"; break;
2914 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2915 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2916 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2917 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2918 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2919 stmt_name = "enter/exit data"; break;
2920 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2921 break;
2922 default: gcc_unreachable ();
2924 switch (gimple_omp_target_kind (ctx->stmt))
2926 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2927 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2928 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2929 ctx_stmt_name = "parallel"; break;
2930 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2931 ctx_stmt_name = "kernels"; break;
2932 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2933 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2934 ctx_stmt_name = "host_data"; break;
2935 default: gcc_unreachable ();
2938 /* OpenACC/OpenMP mismatch? */
2939 if (is_gimple_omp_oacc (stmt)
2940 != is_gimple_omp_oacc (ctx->stmt))
2942 error_at (gimple_location (stmt),
2943 "%s %qs construct inside of %s %qs region",
2944 (is_gimple_omp_oacc (stmt)
2945 ? "OpenACC" : "OpenMP"), stmt_name,
2946 (is_gimple_omp_oacc (ctx->stmt)
2947 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2948 return false;
2950 if (is_gimple_omp_offloaded (ctx->stmt))
2952 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2953 if (is_gimple_omp_oacc (ctx->stmt))
2955 error_at (gimple_location (stmt),
2956 "%qs construct inside of %qs region",
2957 stmt_name, ctx_stmt_name);
2958 return false;
2960 else
2962 warning_at (gimple_location (stmt), 0,
2963 "%qs construct inside of %qs region",
2964 stmt_name, ctx_stmt_name);
2968 break;
2969 default:
2970 break;
2972 return true;
2976 /* Helper function scan_omp.
2978 Callback for walk_tree or operators in walk_gimple_stmt used to
2979 scan for OMP directives in TP. */
2981 static tree
2982 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2984 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2985 omp_context *ctx = (omp_context *) wi->info;
2986 tree t = *tp;
2988 switch (TREE_CODE (t))
2990 case VAR_DECL:
2991 case PARM_DECL:
2992 case LABEL_DECL:
2993 case RESULT_DECL:
2994 if (ctx)
2996 tree repl = remap_decl (t, &ctx->cb);
2997 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2998 *tp = repl;
3000 break;
3002 default:
3003 if (ctx && TYPE_P (t))
3004 *tp = remap_type (t, &ctx->cb);
3005 else if (!DECL_P (t))
3007 *walk_subtrees = 1;
3008 if (ctx)
3010 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3011 if (tem != TREE_TYPE (t))
3013 if (TREE_CODE (t) == INTEGER_CST)
3014 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3015 else
3016 TREE_TYPE (t) = tem;
3020 break;
3023 return NULL_TREE;
3026 /* Return true if FNDECL is a setjmp or a longjmp. */
3028 static bool
3029 setjmp_or_longjmp_p (const_tree fndecl)
3031 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3032 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3033 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3034 return true;
3036 tree declname = DECL_NAME (fndecl);
3037 if (!declname)
3038 return false;
3039 const char *name = IDENTIFIER_POINTER (declname);
3040 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3044 /* Helper function for scan_omp.
3046 Callback for walk_gimple_stmt used to scan for OMP directives in
3047 the current statement in GSI. */
3049 static tree
3050 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3051 struct walk_stmt_info *wi)
3053 gimple *stmt = gsi_stmt (*gsi);
3054 omp_context *ctx = (omp_context *) wi->info;
3056 if (gimple_has_location (stmt))
3057 input_location = gimple_location (stmt);
3059 /* Check the nesting restrictions. */
3060 bool remove = false;
3061 if (is_gimple_omp (stmt))
3062 remove = !check_omp_nesting_restrictions (stmt, ctx);
3063 else if (is_gimple_call (stmt))
3065 tree fndecl = gimple_call_fndecl (stmt);
3066 if (fndecl)
3068 if (setjmp_or_longjmp_p (fndecl)
3069 && ctx
3070 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3071 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3073 remove = true;
3074 error_at (gimple_location (stmt),
3075 "setjmp/longjmp inside simd construct");
3077 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3078 switch (DECL_FUNCTION_CODE (fndecl))
3080 case BUILT_IN_GOMP_BARRIER:
3081 case BUILT_IN_GOMP_CANCEL:
3082 case BUILT_IN_GOMP_CANCELLATION_POINT:
3083 case BUILT_IN_GOMP_TASKYIELD:
3084 case BUILT_IN_GOMP_TASKWAIT:
3085 case BUILT_IN_GOMP_TASKGROUP_START:
3086 case BUILT_IN_GOMP_TASKGROUP_END:
3087 remove = !check_omp_nesting_restrictions (stmt, ctx);
3088 break;
3089 default:
3090 break;
3094 if (remove)
3096 stmt = gimple_build_nop ();
3097 gsi_replace (gsi, stmt, false);
3100 *handled_ops_p = true;
3102 switch (gimple_code (stmt))
3104 case GIMPLE_OMP_PARALLEL:
3105 taskreg_nesting_level++;
3106 scan_omp_parallel (gsi, ctx);
3107 taskreg_nesting_level--;
3108 break;
3110 case GIMPLE_OMP_TASK:
3111 taskreg_nesting_level++;
3112 scan_omp_task (gsi, ctx);
3113 taskreg_nesting_level--;
3114 break;
3116 case GIMPLE_OMP_FOR:
3117 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3118 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3119 && omp_maybe_offloaded_ctx (ctx)
3120 && omp_max_simt_vf ())
3121 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3122 else
3123 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3124 break;
3126 case GIMPLE_OMP_SECTIONS:
3127 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3128 break;
3130 case GIMPLE_OMP_SINGLE:
3131 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3132 break;
3134 case GIMPLE_OMP_SECTION:
3135 case GIMPLE_OMP_MASTER:
3136 case GIMPLE_OMP_TASKGROUP:
3137 case GIMPLE_OMP_ORDERED:
3138 case GIMPLE_OMP_CRITICAL:
3139 case GIMPLE_OMP_GRID_BODY:
3140 ctx = new_omp_context (stmt, ctx);
3141 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3142 break;
3144 case GIMPLE_OMP_TARGET:
3145 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3146 break;
3148 case GIMPLE_OMP_TEAMS:
3149 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3150 break;
3152 case GIMPLE_BIND:
3154 tree var;
3156 *handled_ops_p = false;
3157 if (ctx)
3158 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3159 var ;
3160 var = DECL_CHAIN (var))
3161 insert_decl_map (&ctx->cb, var, var);
3163 break;
3164 default:
3165 *handled_ops_p = false;
3166 break;
3169 return NULL_TREE;
3173 /* Scan all the statements starting at the current statement. CTX
3174 contains context information about the OMP directives and
3175 clauses found during the scan. */
3177 static void
3178 scan_omp (gimple_seq *body_p, omp_context *ctx)
3180 location_t saved_location;
3181 struct walk_stmt_info wi;
3183 memset (&wi, 0, sizeof (wi));
3184 wi.info = ctx;
3185 wi.want_locations = true;
3187 saved_location = input_location;
3188 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3189 input_location = saved_location;
3192 /* Re-gimplification and code generation routines. */
3194 /* If a context was created for STMT when it was scanned, return it. */
3196 static omp_context *
3197 maybe_lookup_ctx (gimple *stmt)
3199 splay_tree_node n;
3200 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3201 return n ? (omp_context *) n->value : NULL;
3205 /* Find the mapping for DECL in CTX or the immediately enclosing
3206 context that has a mapping for DECL.
3208 If CTX is a nested parallel directive, we may have to use the decl
3209 mappings created in CTX's parent context. Suppose that we have the
3210 following parallel nesting (variable UIDs showed for clarity):
3212 iD.1562 = 0;
3213 #omp parallel shared(iD.1562) -> outer parallel
3214 iD.1562 = iD.1562 + 1;
3216 #omp parallel shared (iD.1562) -> inner parallel
3217 iD.1562 = iD.1562 - 1;
3219 Each parallel structure will create a distinct .omp_data_s structure
3220 for copying iD.1562 in/out of the directive:
3222 outer parallel .omp_data_s.1.i -> iD.1562
3223 inner parallel .omp_data_s.2.i -> iD.1562
3225 A shared variable mapping will produce a copy-out operation before
3226 the parallel directive and a copy-in operation after it. So, in
3227 this case we would have:
3229 iD.1562 = 0;
3230 .omp_data_o.1.i = iD.1562;
3231 #omp parallel shared(iD.1562) -> outer parallel
3232 .omp_data_i.1 = &.omp_data_o.1
3233 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3235 .omp_data_o.2.i = iD.1562; -> **
3236 #omp parallel shared(iD.1562) -> inner parallel
3237 .omp_data_i.2 = &.omp_data_o.2
3238 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3241 ** This is a problem. The symbol iD.1562 cannot be referenced
3242 inside the body of the outer parallel region. But since we are
3243 emitting this copy operation while expanding the inner parallel
3244 directive, we need to access the CTX structure of the outer
3245 parallel directive to get the correct mapping:
3247 .omp_data_o.2.i = .omp_data_i.1->i
3249 Since there may be other workshare or parallel directives enclosing
3250 the parallel directive, it may be necessary to walk up the context
3251 parent chain. This is not a problem in general because nested
3252 parallelism happens only rarely. */
3254 static tree
3255 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3257 tree t;
3258 omp_context *up;
3260 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3261 t = maybe_lookup_decl (decl, up);
3263 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3265 return t ? t : decl;
3269 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3270 in outer contexts. */
3272 static tree
3273 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3275 tree t = NULL;
3276 omp_context *up;
3278 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3279 t = maybe_lookup_decl (decl, up);
3281 return t ? t : decl;
3285 /* Construct the initialization value for reduction operation OP. */
3287 tree
3288 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3290 switch (op)
3292 case PLUS_EXPR:
3293 case MINUS_EXPR:
3294 case BIT_IOR_EXPR:
3295 case BIT_XOR_EXPR:
3296 case TRUTH_OR_EXPR:
3297 case TRUTH_ORIF_EXPR:
3298 case TRUTH_XOR_EXPR:
3299 case NE_EXPR:
3300 return build_zero_cst (type);
3302 case MULT_EXPR:
3303 case TRUTH_AND_EXPR:
3304 case TRUTH_ANDIF_EXPR:
3305 case EQ_EXPR:
3306 return fold_convert_loc (loc, type, integer_one_node);
3308 case BIT_AND_EXPR:
3309 return fold_convert_loc (loc, type, integer_minus_one_node);
3311 case MAX_EXPR:
3312 if (SCALAR_FLOAT_TYPE_P (type))
3314 REAL_VALUE_TYPE max, min;
3315 if (HONOR_INFINITIES (type))
3317 real_inf (&max);
3318 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3320 else
3321 real_maxval (&min, 1, TYPE_MODE (type));
3322 return build_real (type, min);
3324 else if (POINTER_TYPE_P (type))
3326 wide_int min
3327 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3328 return wide_int_to_tree (type, min);
3330 else
3332 gcc_assert (INTEGRAL_TYPE_P (type));
3333 return TYPE_MIN_VALUE (type);
3336 case MIN_EXPR:
3337 if (SCALAR_FLOAT_TYPE_P (type))
3339 REAL_VALUE_TYPE max;
3340 if (HONOR_INFINITIES (type))
3341 real_inf (&max);
3342 else
3343 real_maxval (&max, 0, TYPE_MODE (type));
3344 return build_real (type, max);
3346 else if (POINTER_TYPE_P (type))
3348 wide_int max
3349 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3350 return wide_int_to_tree (type, max);
3352 else
3354 gcc_assert (INTEGRAL_TYPE_P (type));
3355 return TYPE_MAX_VALUE (type);
3358 default:
3359 gcc_unreachable ();
3363 /* Construct the initialization value for reduction CLAUSE. */
3365 tree
3366 omp_reduction_init (tree clause, tree type)
3368 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3369 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3372 /* Return alignment to be assumed for var in CLAUSE, which should be
3373 OMP_CLAUSE_ALIGNED. */
3375 static tree
3376 omp_clause_aligned_alignment (tree clause)
3378 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3379 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3381 /* Otherwise return implementation defined alignment. */
3382 unsigned int al = 1;
3383 opt_scalar_mode mode_iter;
3384 auto_vector_sizes sizes;
3385 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3386 poly_uint64 vs = 0;
3387 for (unsigned int i = 0; i < sizes.length (); ++i)
3388 vs = ordered_max (vs, sizes[i]);
3389 static enum mode_class classes[]
3390 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3391 for (int i = 0; i < 4; i += 2)
3392 /* The for loop above dictates that we only walk through scalar classes. */
3393 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3395 scalar_mode mode = mode_iter.require ();
3396 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3397 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3398 continue;
3399 while (maybe_ne (vs, 0U)
3400 && known_lt (GET_MODE_SIZE (vmode), vs)
3401 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3402 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3404 tree type = lang_hooks.types.type_for_mode (mode, 1);
3405 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3406 continue;
3407 unsigned int nelts = GET_MODE_SIZE (vmode) / GET_MODE_SIZE (mode);
3408 type = build_vector_type (type, nelts);
3409 if (TYPE_MODE (type) != vmode)
3410 continue;
3411 if (TYPE_ALIGN_UNIT (type) > al)
3412 al = TYPE_ALIGN_UNIT (type);
3414 return build_int_cst (integer_type_node, al);
3418 /* This structure is part of the interface between lower_rec_simd_input_clauses
3419 and lower_rec_input_clauses. */
3421 struct omplow_simd_context {
3422 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3423 tree idx;
3424 tree lane;
3425 vec<tree, va_heap> simt_eargs;
3426 gimple_seq simt_dlist;
3427 poly_uint64_pod max_vf;
3428 bool is_simt;
3431 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3432 privatization. */
3434 static bool
3435 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3436 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3438 if (known_eq (sctx->max_vf, 0U))
3440 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3441 if (maybe_gt (sctx->max_vf, 1U))
3443 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3444 OMP_CLAUSE_SAFELEN);
3445 if (c)
3447 poly_uint64 safe_len;
3448 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3449 || maybe_lt (safe_len, 1U))
3450 sctx->max_vf = 1;
3451 else
3452 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3455 if (maybe_gt (sctx->max_vf, 1U))
3457 sctx->idx = create_tmp_var (unsigned_type_node);
3458 sctx->lane = create_tmp_var (unsigned_type_node);
3461 if (known_eq (sctx->max_vf, 1U))
3462 return false;
3464 if (sctx->is_simt)
3466 if (is_gimple_reg (new_var))
3468 ivar = lvar = new_var;
3469 return true;
3471 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3472 ivar = lvar = create_tmp_var (type);
3473 TREE_ADDRESSABLE (ivar) = 1;
3474 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3475 NULL, DECL_ATTRIBUTES (ivar));
3476 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3477 tree clobber = build_constructor (type, NULL);
3478 TREE_THIS_VOLATILE (clobber) = 1;
3479 gimple *g = gimple_build_assign (ivar, clobber);
3480 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3482 else
3484 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3485 tree avar = create_tmp_var_raw (atype);
3486 if (TREE_ADDRESSABLE (new_var))
3487 TREE_ADDRESSABLE (avar) = 1;
3488 DECL_ATTRIBUTES (avar)
3489 = tree_cons (get_identifier ("omp simd array"), NULL,
3490 DECL_ATTRIBUTES (avar));
3491 gimple_add_tmp_var (avar);
3492 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3493 NULL_TREE, NULL_TREE);
3494 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3495 NULL_TREE, NULL_TREE);
3497 if (DECL_P (new_var))
3499 SET_DECL_VALUE_EXPR (new_var, lvar);
3500 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3502 return true;
3505 /* Helper function of lower_rec_input_clauses. For a reference
3506 in simd reduction, add an underlying variable it will reference. */
3508 static void
3509 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3511 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3512 if (TREE_CONSTANT (z))
3514 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3515 get_name (new_vard));
3516 gimple_add_tmp_var (z);
3517 TREE_ADDRESSABLE (z) = 1;
3518 z = build_fold_addr_expr_loc (loc, z);
3519 gimplify_assign (new_vard, z, ilist);
3523 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3524 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3525 private variables. Initialization statements go in ILIST, while calls
3526 to destructors go in DLIST. */
3528 static void
3529 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3530 omp_context *ctx, struct omp_for_data *fd)
3532 tree c, dtor, copyin_seq, x, ptr;
3533 bool copyin_by_ref = false;
3534 bool lastprivate_firstprivate = false;
3535 bool reduction_omp_orig_ref = false;
3536 int pass;
3537 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3538 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3539 omplow_simd_context sctx = omplow_simd_context ();
3540 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3541 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3542 gimple_seq llist[3] = { };
3544 copyin_seq = NULL;
3545 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3547 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3548 with data sharing clauses referencing variable sized vars. That
3549 is unnecessarily hard to support and very unlikely to result in
3550 vectorized code anyway. */
3551 if (is_simd)
3552 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3553 switch (OMP_CLAUSE_CODE (c))
3555 case OMP_CLAUSE_LINEAR:
3556 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3557 sctx.max_vf = 1;
3558 /* FALLTHRU */
3559 case OMP_CLAUSE_PRIVATE:
3560 case OMP_CLAUSE_FIRSTPRIVATE:
3561 case OMP_CLAUSE_LASTPRIVATE:
3562 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3563 sctx.max_vf = 1;
3564 break;
3565 case OMP_CLAUSE_REDUCTION:
3566 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3567 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3568 sctx.max_vf = 1;
3569 break;
3570 default:
3571 continue;
3574 /* Add a placeholder for simduid. */
3575 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3576 sctx.simt_eargs.safe_push (NULL_TREE);
3578 /* Do all the fixed sized types in the first pass, and the variable sized
3579 types in the second pass. This makes sure that the scalar arguments to
3580 the variable sized types are processed before we use them in the
3581 variable sized operations. */
3582 for (pass = 0; pass < 2; ++pass)
3584 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3586 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3587 tree var, new_var;
3588 bool by_ref;
3589 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3591 switch (c_kind)
3593 case OMP_CLAUSE_PRIVATE:
3594 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3595 continue;
3596 break;
3597 case OMP_CLAUSE_SHARED:
3598 /* Ignore shared directives in teams construct. */
3599 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3600 continue;
3601 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3603 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3604 || is_global_var (OMP_CLAUSE_DECL (c)));
3605 continue;
3607 case OMP_CLAUSE_FIRSTPRIVATE:
3608 case OMP_CLAUSE_COPYIN:
3609 break;
3610 case OMP_CLAUSE_LINEAR:
3611 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3612 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3613 lastprivate_firstprivate = true;
3614 break;
3615 case OMP_CLAUSE_REDUCTION:
3616 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3617 reduction_omp_orig_ref = true;
3618 break;
3619 case OMP_CLAUSE__LOOPTEMP_:
3620 /* Handle _looptemp_ clauses only on parallel/task. */
3621 if (fd)
3622 continue;
3623 break;
3624 case OMP_CLAUSE_LASTPRIVATE:
3625 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3627 lastprivate_firstprivate = true;
3628 if (pass != 0 || is_taskloop_ctx (ctx))
3629 continue;
3631 /* Even without corresponding firstprivate, if
3632 decl is Fortran allocatable, it needs outer var
3633 reference. */
3634 else if (pass == 0
3635 && lang_hooks.decls.omp_private_outer_ref
3636 (OMP_CLAUSE_DECL (c)))
3637 lastprivate_firstprivate = true;
3638 break;
3639 case OMP_CLAUSE_ALIGNED:
3640 if (pass == 0)
3641 continue;
3642 var = OMP_CLAUSE_DECL (c);
3643 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3644 && !is_global_var (var))
3646 new_var = maybe_lookup_decl (var, ctx);
3647 if (new_var == NULL_TREE)
3648 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3649 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3650 tree alarg = omp_clause_aligned_alignment (c);
3651 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3652 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3653 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3654 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3655 gimplify_and_add (x, ilist);
3657 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3658 && is_global_var (var))
3660 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3661 new_var = lookup_decl (var, ctx);
3662 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3663 t = build_fold_addr_expr_loc (clause_loc, t);
3664 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3665 tree alarg = omp_clause_aligned_alignment (c);
3666 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3667 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3668 t = fold_convert_loc (clause_loc, ptype, t);
3669 x = create_tmp_var (ptype);
3670 t = build2 (MODIFY_EXPR, ptype, x, t);
3671 gimplify_and_add (t, ilist);
3672 t = build_simple_mem_ref_loc (clause_loc, x);
3673 SET_DECL_VALUE_EXPR (new_var, t);
3674 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3676 continue;
3677 default:
3678 continue;
3681 new_var = var = OMP_CLAUSE_DECL (c);
3682 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3684 var = TREE_OPERAND (var, 0);
3685 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3686 var = TREE_OPERAND (var, 0);
3687 if (TREE_CODE (var) == INDIRECT_REF
3688 || TREE_CODE (var) == ADDR_EXPR)
3689 var = TREE_OPERAND (var, 0);
3690 if (is_variable_sized (var))
3692 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3693 var = DECL_VALUE_EXPR (var);
3694 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3695 var = TREE_OPERAND (var, 0);
3696 gcc_assert (DECL_P (var));
3698 new_var = var;
3700 if (c_kind != OMP_CLAUSE_COPYIN)
3701 new_var = lookup_decl (var, ctx);
3703 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3705 if (pass != 0)
3706 continue;
3708 /* C/C++ array section reductions. */
3709 else if (c_kind == OMP_CLAUSE_REDUCTION
3710 && var != OMP_CLAUSE_DECL (c))
3712 if (pass == 0)
3713 continue;
3715 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3716 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3717 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3719 tree b = TREE_OPERAND (orig_var, 1);
3720 b = maybe_lookup_decl (b, ctx);
3721 if (b == NULL)
3723 b = TREE_OPERAND (orig_var, 1);
3724 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3726 if (integer_zerop (bias))
3727 bias = b;
3728 else
3730 bias = fold_convert_loc (clause_loc,
3731 TREE_TYPE (b), bias);
3732 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3733 TREE_TYPE (b), b, bias);
3735 orig_var = TREE_OPERAND (orig_var, 0);
3737 if (TREE_CODE (orig_var) == INDIRECT_REF
3738 || TREE_CODE (orig_var) == ADDR_EXPR)
3739 orig_var = TREE_OPERAND (orig_var, 0);
3740 tree d = OMP_CLAUSE_DECL (c);
3741 tree type = TREE_TYPE (d);
3742 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3743 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3744 const char *name = get_name (orig_var);
3745 if (TREE_CONSTANT (v))
3747 x = create_tmp_var_raw (type, name);
3748 gimple_add_tmp_var (x);
3749 TREE_ADDRESSABLE (x) = 1;
3750 x = build_fold_addr_expr_loc (clause_loc, x);
3752 else
3754 tree atmp
3755 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3756 tree t = maybe_lookup_decl (v, ctx);
3757 if (t)
3758 v = t;
3759 else
3760 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3761 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3762 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3763 TREE_TYPE (v), v,
3764 build_int_cst (TREE_TYPE (v), 1));
3765 t = fold_build2_loc (clause_loc, MULT_EXPR,
3766 TREE_TYPE (v), t,
3767 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3768 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3769 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3772 tree ptype = build_pointer_type (TREE_TYPE (type));
3773 x = fold_convert_loc (clause_loc, ptype, x);
3774 tree y = create_tmp_var (ptype, name);
3775 gimplify_assign (y, x, ilist);
3776 x = y;
3777 tree yb = y;
3779 if (!integer_zerop (bias))
3781 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3782 bias);
3783 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3785 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3786 pointer_sized_int_node, yb, bias);
3787 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3788 yb = create_tmp_var (ptype, name);
3789 gimplify_assign (yb, x, ilist);
3790 x = yb;
3793 d = TREE_OPERAND (d, 0);
3794 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3795 d = TREE_OPERAND (d, 0);
3796 if (TREE_CODE (d) == ADDR_EXPR)
3798 if (orig_var != var)
3800 gcc_assert (is_variable_sized (orig_var));
3801 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3803 gimplify_assign (new_var, x, ilist);
3804 tree new_orig_var = lookup_decl (orig_var, ctx);
3805 tree t = build_fold_indirect_ref (new_var);
3806 DECL_IGNORED_P (new_var) = 0;
3807 TREE_THIS_NOTRAP (t);
3808 SET_DECL_VALUE_EXPR (new_orig_var, t);
3809 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3811 else
3813 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3814 build_int_cst (ptype, 0));
3815 SET_DECL_VALUE_EXPR (new_var, x);
3816 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3819 else
3821 gcc_assert (orig_var == var);
3822 if (TREE_CODE (d) == INDIRECT_REF)
3824 x = create_tmp_var (ptype, name);
3825 TREE_ADDRESSABLE (x) = 1;
3826 gimplify_assign (x, yb, ilist);
3827 x = build_fold_addr_expr_loc (clause_loc, x);
3829 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3830 gimplify_assign (new_var, x, ilist);
3832 tree y1 = create_tmp_var (ptype, NULL);
3833 gimplify_assign (y1, y, ilist);
3834 tree i2 = NULL_TREE, y2 = NULL_TREE;
3835 tree body2 = NULL_TREE, end2 = NULL_TREE;
3836 tree y3 = NULL_TREE, y4 = NULL_TREE;
3837 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3839 y2 = create_tmp_var (ptype, NULL);
3840 gimplify_assign (y2, y, ilist);
3841 tree ref = build_outer_var_ref (var, ctx);
3842 /* For ref build_outer_var_ref already performs this. */
3843 if (TREE_CODE (d) == INDIRECT_REF)
3844 gcc_assert (omp_is_reference (var));
3845 else if (TREE_CODE (d) == ADDR_EXPR)
3846 ref = build_fold_addr_expr (ref);
3847 else if (omp_is_reference (var))
3848 ref = build_fold_addr_expr (ref);
3849 ref = fold_convert_loc (clause_loc, ptype, ref);
3850 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3851 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3853 y3 = create_tmp_var (ptype, NULL);
3854 gimplify_assign (y3, unshare_expr (ref), ilist);
3856 if (is_simd)
3858 y4 = create_tmp_var (ptype, NULL);
3859 gimplify_assign (y4, ref, dlist);
3862 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3863 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3864 tree body = create_artificial_label (UNKNOWN_LOCATION);
3865 tree end = create_artificial_label (UNKNOWN_LOCATION);
3866 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3867 if (y2)
3869 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3870 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3871 body2 = create_artificial_label (UNKNOWN_LOCATION);
3872 end2 = create_artificial_label (UNKNOWN_LOCATION);
3873 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3875 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3877 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3878 tree decl_placeholder
3879 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3880 SET_DECL_VALUE_EXPR (decl_placeholder,
3881 build_simple_mem_ref (y1));
3882 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3883 SET_DECL_VALUE_EXPR (placeholder,
3884 y3 ? build_simple_mem_ref (y3)
3885 : error_mark_node);
3886 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3887 x = lang_hooks.decls.omp_clause_default_ctor
3888 (c, build_simple_mem_ref (y1),
3889 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3890 if (x)
3891 gimplify_and_add (x, ilist);
3892 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3894 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3895 lower_omp (&tseq, ctx);
3896 gimple_seq_add_seq (ilist, tseq);
3898 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3899 if (is_simd)
3901 SET_DECL_VALUE_EXPR (decl_placeholder,
3902 build_simple_mem_ref (y2));
3903 SET_DECL_VALUE_EXPR (placeholder,
3904 build_simple_mem_ref (y4));
3905 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3906 lower_omp (&tseq, ctx);
3907 gimple_seq_add_seq (dlist, tseq);
3908 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3910 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3911 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3912 x = lang_hooks.decls.omp_clause_dtor
3913 (c, build_simple_mem_ref (y2));
3914 if (x)
3916 gimple_seq tseq = NULL;
3917 dtor = x;
3918 gimplify_stmt (&dtor, &tseq);
3919 gimple_seq_add_seq (dlist, tseq);
3922 else
3924 x = omp_reduction_init (c, TREE_TYPE (type));
3925 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3927 /* reduction(-:var) sums up the partial results, so it
3928 acts identically to reduction(+:var). */
3929 if (code == MINUS_EXPR)
3930 code = PLUS_EXPR;
3932 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3933 if (is_simd)
3935 x = build2 (code, TREE_TYPE (type),
3936 build_simple_mem_ref (y4),
3937 build_simple_mem_ref (y2));
3938 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3941 gimple *g
3942 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3943 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3944 gimple_seq_add_stmt (ilist, g);
3945 if (y3)
3947 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3948 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3949 gimple_seq_add_stmt (ilist, g);
3951 g = gimple_build_assign (i, PLUS_EXPR, i,
3952 build_int_cst (TREE_TYPE (i), 1));
3953 gimple_seq_add_stmt (ilist, g);
3954 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3955 gimple_seq_add_stmt (ilist, g);
3956 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3957 if (y2)
3959 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3960 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3961 gimple_seq_add_stmt (dlist, g);
3962 if (y4)
3964 g = gimple_build_assign
3965 (y4, POINTER_PLUS_EXPR, y4,
3966 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3967 gimple_seq_add_stmt (dlist, g);
3969 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3970 build_int_cst (TREE_TYPE (i2), 1));
3971 gimple_seq_add_stmt (dlist, g);
3972 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3973 gimple_seq_add_stmt (dlist, g);
3974 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3976 continue;
3978 else if (is_variable_sized (var))
3980 /* For variable sized types, we need to allocate the
3981 actual storage here. Call alloca and store the
3982 result in the pointer decl that we created elsewhere. */
3983 if (pass == 0)
3984 continue;
3986 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3988 gcall *stmt;
3989 tree tmp, atmp;
3991 ptr = DECL_VALUE_EXPR (new_var);
3992 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3993 ptr = TREE_OPERAND (ptr, 0);
3994 gcc_assert (DECL_P (ptr));
3995 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3997 /* void *tmp = __builtin_alloca */
3998 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3999 stmt = gimple_build_call (atmp, 2, x,
4000 size_int (DECL_ALIGN (var)));
4001 tmp = create_tmp_var_raw (ptr_type_node);
4002 gimple_add_tmp_var (tmp);
4003 gimple_call_set_lhs (stmt, tmp);
4005 gimple_seq_add_stmt (ilist, stmt);
4007 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4008 gimplify_assign (ptr, x, ilist);
4011 else if (omp_is_reference (var))
4013 /* For references that are being privatized for Fortran,
4014 allocate new backing storage for the new pointer
4015 variable. This allows us to avoid changing all the
4016 code that expects a pointer to something that expects
4017 a direct variable. */
4018 if (pass == 0)
4019 continue;
4021 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4022 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4024 x = build_receiver_ref (var, false, ctx);
4025 x = build_fold_addr_expr_loc (clause_loc, x);
4027 else if (TREE_CONSTANT (x))
4029 /* For reduction in SIMD loop, defer adding the
4030 initialization of the reference, because if we decide
4031 to use SIMD array for it, the initilization could cause
4032 expansion ICE. */
4033 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4034 x = NULL_TREE;
4035 else
4037 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4038 get_name (var));
4039 gimple_add_tmp_var (x);
4040 TREE_ADDRESSABLE (x) = 1;
4041 x = build_fold_addr_expr_loc (clause_loc, x);
4044 else
4046 tree atmp
4047 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4048 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4049 tree al = size_int (TYPE_ALIGN (rtype));
4050 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4053 if (x)
4055 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4056 gimplify_assign (new_var, x, ilist);
4059 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4061 else if (c_kind == OMP_CLAUSE_REDUCTION
4062 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4064 if (pass == 0)
4065 continue;
4067 else if (pass != 0)
4068 continue;
4070 switch (OMP_CLAUSE_CODE (c))
4072 case OMP_CLAUSE_SHARED:
4073 /* Ignore shared directives in teams construct. */
4074 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4075 continue;
4076 /* Shared global vars are just accessed directly. */
4077 if (is_global_var (new_var))
4078 break;
4079 /* For taskloop firstprivate/lastprivate, represented
4080 as firstprivate and shared clause on the task, new_var
4081 is the firstprivate var. */
4082 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4083 break;
4084 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4085 needs to be delayed until after fixup_child_record_type so
4086 that we get the correct type during the dereference. */
4087 by_ref = use_pointer_for_field (var, ctx);
4088 x = build_receiver_ref (var, by_ref, ctx);
4089 SET_DECL_VALUE_EXPR (new_var, x);
4090 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4092 /* ??? If VAR is not passed by reference, and the variable
4093 hasn't been initialized yet, then we'll get a warning for
4094 the store into the omp_data_s structure. Ideally, we'd be
4095 able to notice this and not store anything at all, but
4096 we're generating code too early. Suppress the warning. */
4097 if (!by_ref)
4098 TREE_NO_WARNING (var) = 1;
4099 break;
4101 case OMP_CLAUSE_LASTPRIVATE:
4102 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4103 break;
4104 /* FALLTHRU */
4106 case OMP_CLAUSE_PRIVATE:
4107 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4108 x = build_outer_var_ref (var, ctx);
4109 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4111 if (is_task_ctx (ctx))
4112 x = build_receiver_ref (var, false, ctx);
4113 else
4114 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4116 else
4117 x = NULL;
4118 do_private:
4119 tree nx;
4120 nx = lang_hooks.decls.omp_clause_default_ctor
4121 (c, unshare_expr (new_var), x);
4122 if (is_simd)
4124 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4125 if ((TREE_ADDRESSABLE (new_var) || nx || y
4126 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4127 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4128 ivar, lvar))
4130 if (nx)
4131 x = lang_hooks.decls.omp_clause_default_ctor
4132 (c, unshare_expr (ivar), x);
4133 if (nx && x)
4134 gimplify_and_add (x, &llist[0]);
4135 if (y)
4137 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4138 if (y)
4140 gimple_seq tseq = NULL;
4142 dtor = y;
4143 gimplify_stmt (&dtor, &tseq);
4144 gimple_seq_add_seq (&llist[1], tseq);
4147 break;
4150 if (nx)
4151 gimplify_and_add (nx, ilist);
4152 /* FALLTHRU */
4154 do_dtor:
4155 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4156 if (x)
4158 gimple_seq tseq = NULL;
4160 dtor = x;
4161 gimplify_stmt (&dtor, &tseq);
4162 gimple_seq_add_seq (dlist, tseq);
4164 break;
4166 case OMP_CLAUSE_LINEAR:
4167 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4168 goto do_firstprivate;
4169 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4170 x = NULL;
4171 else
4172 x = build_outer_var_ref (var, ctx);
4173 goto do_private;
4175 case OMP_CLAUSE_FIRSTPRIVATE:
4176 if (is_task_ctx (ctx))
4178 if (omp_is_reference (var) || is_variable_sized (var))
4179 goto do_dtor;
4180 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4181 ctx))
4182 || use_pointer_for_field (var, NULL))
4184 x = build_receiver_ref (var, false, ctx);
4185 SET_DECL_VALUE_EXPR (new_var, x);
4186 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4187 goto do_dtor;
4190 do_firstprivate:
4191 x = build_outer_var_ref (var, ctx);
4192 if (is_simd)
4194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4195 && gimple_omp_for_combined_into_p (ctx->stmt))
4197 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4198 tree stept = TREE_TYPE (t);
4199 tree ct = omp_find_clause (clauses,
4200 OMP_CLAUSE__LOOPTEMP_);
4201 gcc_assert (ct);
4202 tree l = OMP_CLAUSE_DECL (ct);
4203 tree n1 = fd->loop.n1;
4204 tree step = fd->loop.step;
4205 tree itype = TREE_TYPE (l);
4206 if (POINTER_TYPE_P (itype))
4207 itype = signed_type_for (itype);
4208 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4209 if (TYPE_UNSIGNED (itype)
4210 && fd->loop.cond_code == GT_EXPR)
4211 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4212 fold_build1 (NEGATE_EXPR, itype, l),
4213 fold_build1 (NEGATE_EXPR,
4214 itype, step));
4215 else
4216 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4217 t = fold_build2 (MULT_EXPR, stept,
4218 fold_convert (stept, l), t);
4220 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4222 x = lang_hooks.decls.omp_clause_linear_ctor
4223 (c, new_var, x, t);
4224 gimplify_and_add (x, ilist);
4225 goto do_dtor;
4228 if (POINTER_TYPE_P (TREE_TYPE (x)))
4229 x = fold_build2 (POINTER_PLUS_EXPR,
4230 TREE_TYPE (x), x, t);
4231 else
4232 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4235 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4236 || TREE_ADDRESSABLE (new_var))
4237 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4238 ivar, lvar))
4240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4242 tree iv = create_tmp_var (TREE_TYPE (new_var));
4243 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4244 gimplify_and_add (x, ilist);
4245 gimple_stmt_iterator gsi
4246 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4247 gassign *g
4248 = gimple_build_assign (unshare_expr (lvar), iv);
4249 gsi_insert_before_without_update (&gsi, g,
4250 GSI_SAME_STMT);
4251 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4252 enum tree_code code = PLUS_EXPR;
4253 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4254 code = POINTER_PLUS_EXPR;
4255 g = gimple_build_assign (iv, code, iv, t);
4256 gsi_insert_before_without_update (&gsi, g,
4257 GSI_SAME_STMT);
4258 break;
4260 x = lang_hooks.decls.omp_clause_copy_ctor
4261 (c, unshare_expr (ivar), x);
4262 gimplify_and_add (x, &llist[0]);
4263 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4264 if (x)
4266 gimple_seq tseq = NULL;
4268 dtor = x;
4269 gimplify_stmt (&dtor, &tseq);
4270 gimple_seq_add_seq (&llist[1], tseq);
4272 break;
4275 x = lang_hooks.decls.omp_clause_copy_ctor
4276 (c, unshare_expr (new_var), x);
4277 gimplify_and_add (x, ilist);
4278 goto do_dtor;
4280 case OMP_CLAUSE__LOOPTEMP_:
4281 gcc_assert (is_taskreg_ctx (ctx));
4282 x = build_outer_var_ref (var, ctx);
4283 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4284 gimplify_and_add (x, ilist);
4285 break;
4287 case OMP_CLAUSE_COPYIN:
4288 by_ref = use_pointer_for_field (var, NULL);
4289 x = build_receiver_ref (var, by_ref, ctx);
4290 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4291 append_to_statement_list (x, &copyin_seq);
4292 copyin_by_ref |= by_ref;
4293 break;
4295 case OMP_CLAUSE_REDUCTION:
4296 /* OpenACC reductions are initialized using the
4297 GOACC_REDUCTION internal function. */
4298 if (is_gimple_omp_oacc (ctx->stmt))
4299 break;
4300 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4302 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4303 gimple *tseq;
4304 x = build_outer_var_ref (var, ctx);
4306 if (omp_is_reference (var)
4307 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4308 TREE_TYPE (x)))
4309 x = build_fold_addr_expr_loc (clause_loc, x);
4310 SET_DECL_VALUE_EXPR (placeholder, x);
4311 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4312 tree new_vard = new_var;
4313 if (omp_is_reference (var))
4315 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4316 new_vard = TREE_OPERAND (new_var, 0);
4317 gcc_assert (DECL_P (new_vard));
4319 if (is_simd
4320 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4321 ivar, lvar))
4323 if (new_vard == new_var)
4325 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4326 SET_DECL_VALUE_EXPR (new_var, ivar);
4328 else
4330 SET_DECL_VALUE_EXPR (new_vard,
4331 build_fold_addr_expr (ivar));
4332 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4334 x = lang_hooks.decls.omp_clause_default_ctor
4335 (c, unshare_expr (ivar),
4336 build_outer_var_ref (var, ctx));
4337 if (x)
4338 gimplify_and_add (x, &llist[0]);
4339 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4341 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4342 lower_omp (&tseq, ctx);
4343 gimple_seq_add_seq (&llist[0], tseq);
4345 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4346 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4347 lower_omp (&tseq, ctx);
4348 gimple_seq_add_seq (&llist[1], tseq);
4349 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4350 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4351 if (new_vard == new_var)
4352 SET_DECL_VALUE_EXPR (new_var, lvar);
4353 else
4354 SET_DECL_VALUE_EXPR (new_vard,
4355 build_fold_addr_expr (lvar));
4356 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4357 if (x)
4359 tseq = NULL;
4360 dtor = x;
4361 gimplify_stmt (&dtor, &tseq);
4362 gimple_seq_add_seq (&llist[1], tseq);
4364 break;
4366 /* If this is a reference to constant size reduction var
4367 with placeholder, we haven't emitted the initializer
4368 for it because it is undesirable if SIMD arrays are used.
4369 But if they aren't used, we need to emit the deferred
4370 initialization now. */
4371 else if (omp_is_reference (var) && is_simd)
4372 handle_simd_reference (clause_loc, new_vard, ilist);
4373 x = lang_hooks.decls.omp_clause_default_ctor
4374 (c, unshare_expr (new_var),
4375 build_outer_var_ref (var, ctx));
4376 if (x)
4377 gimplify_and_add (x, ilist);
4378 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4380 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4381 lower_omp (&tseq, ctx);
4382 gimple_seq_add_seq (ilist, tseq);
4384 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4385 if (is_simd)
4387 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4388 lower_omp (&tseq, ctx);
4389 gimple_seq_add_seq (dlist, tseq);
4390 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4392 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4393 goto do_dtor;
4395 else
4397 x = omp_reduction_init (c, TREE_TYPE (new_var));
4398 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4399 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4401 /* reduction(-:var) sums up the partial results, so it
4402 acts identically to reduction(+:var). */
4403 if (code == MINUS_EXPR)
4404 code = PLUS_EXPR;
4406 tree new_vard = new_var;
4407 if (is_simd && omp_is_reference (var))
4409 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4410 new_vard = TREE_OPERAND (new_var, 0);
4411 gcc_assert (DECL_P (new_vard));
4413 if (is_simd
4414 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4415 ivar, lvar))
4417 tree ref = build_outer_var_ref (var, ctx);
4419 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4421 if (sctx.is_simt)
4423 if (!simt_lane)
4424 simt_lane = create_tmp_var (unsigned_type_node);
4425 x = build_call_expr_internal_loc
4426 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4427 TREE_TYPE (ivar), 2, ivar, simt_lane);
4428 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4429 gimplify_assign (ivar, x, &llist[2]);
4431 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4432 ref = build_outer_var_ref (var, ctx);
4433 gimplify_assign (ref, x, &llist[1]);
4435 if (new_vard != new_var)
4437 SET_DECL_VALUE_EXPR (new_vard,
4438 build_fold_addr_expr (lvar));
4439 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4442 else
4444 if (omp_is_reference (var) && is_simd)
4445 handle_simd_reference (clause_loc, new_vard, ilist);
4446 gimplify_assign (new_var, x, ilist);
4447 if (is_simd)
4449 tree ref = build_outer_var_ref (var, ctx);
4451 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4452 ref = build_outer_var_ref (var, ctx);
4453 gimplify_assign (ref, x, dlist);
4457 break;
4459 default:
4460 gcc_unreachable ();
4465 if (known_eq (sctx.max_vf, 1U))
4466 sctx.is_simt = false;
4468 if (sctx.lane || sctx.is_simt)
4470 uid = create_tmp_var (ptr_type_node, "simduid");
4471 /* Don't want uninit warnings on simduid, it is always uninitialized,
4472 but we use it not for the value, but for the DECL_UID only. */
4473 TREE_NO_WARNING (uid) = 1;
4474 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4475 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4476 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4477 gimple_omp_for_set_clauses (ctx->stmt, c);
4479 /* Emit calls denoting privatized variables and initializing a pointer to
4480 structure that holds private variables as fields after ompdevlow pass. */
4481 if (sctx.is_simt)
4483 sctx.simt_eargs[0] = uid;
4484 gimple *g
4485 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4486 gimple_call_set_lhs (g, uid);
4487 gimple_seq_add_stmt (ilist, g);
4488 sctx.simt_eargs.release ();
4490 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4491 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4492 gimple_call_set_lhs (g, simtrec);
4493 gimple_seq_add_stmt (ilist, g);
4495 if (sctx.lane)
4497 gimple *g
4498 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4499 gimple_call_set_lhs (g, sctx.lane);
4500 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4501 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4502 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4503 build_int_cst (unsigned_type_node, 0));
4504 gimple_seq_add_stmt (ilist, g);
4505 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4506 if (llist[2])
4508 tree simt_vf = create_tmp_var (unsigned_type_node);
4509 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4510 gimple_call_set_lhs (g, simt_vf);
4511 gimple_seq_add_stmt (dlist, g);
4513 tree t = build_int_cst (unsigned_type_node, 1);
4514 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4515 gimple_seq_add_stmt (dlist, g);
4517 t = build_int_cst (unsigned_type_node, 0);
4518 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4519 gimple_seq_add_stmt (dlist, g);
4521 tree body = create_artificial_label (UNKNOWN_LOCATION);
4522 tree header = create_artificial_label (UNKNOWN_LOCATION);
4523 tree end = create_artificial_label (UNKNOWN_LOCATION);
4524 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4525 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4527 gimple_seq_add_seq (dlist, llist[2]);
4529 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4530 gimple_seq_add_stmt (dlist, g);
4532 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4533 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4534 gimple_seq_add_stmt (dlist, g);
4536 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4538 for (int i = 0; i < 2; i++)
4539 if (llist[i])
4541 tree vf = create_tmp_var (unsigned_type_node);
4542 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4543 gimple_call_set_lhs (g, vf);
4544 gimple_seq *seq = i == 0 ? ilist : dlist;
4545 gimple_seq_add_stmt (seq, g);
4546 tree t = build_int_cst (unsigned_type_node, 0);
4547 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4548 gimple_seq_add_stmt (seq, g);
4549 tree body = create_artificial_label (UNKNOWN_LOCATION);
4550 tree header = create_artificial_label (UNKNOWN_LOCATION);
4551 tree end = create_artificial_label (UNKNOWN_LOCATION);
4552 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4553 gimple_seq_add_stmt (seq, gimple_build_label (body));
4554 gimple_seq_add_seq (seq, llist[i]);
4555 t = build_int_cst (unsigned_type_node, 1);
4556 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4557 gimple_seq_add_stmt (seq, g);
4558 gimple_seq_add_stmt (seq, gimple_build_label (header));
4559 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4560 gimple_seq_add_stmt (seq, g);
4561 gimple_seq_add_stmt (seq, gimple_build_label (end));
4564 if (sctx.is_simt)
4566 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4567 gimple *g
4568 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4569 gimple_seq_add_stmt (dlist, g);
4572 /* The copyin sequence is not to be executed by the main thread, since
4573 that would result in self-copies. Perhaps not visible to scalars,
4574 but it certainly is to C++ operator=. */
4575 if (copyin_seq)
4577 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4579 x = build2 (NE_EXPR, boolean_type_node, x,
4580 build_int_cst (TREE_TYPE (x), 0));
4581 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4582 gimplify_and_add (x, ilist);
4585 /* If any copyin variable is passed by reference, we must ensure the
4586 master thread doesn't modify it before it is copied over in all
4587 threads. Similarly for variables in both firstprivate and
4588 lastprivate clauses we need to ensure the lastprivate copying
4589 happens after firstprivate copying in all threads. And similarly
4590 for UDRs if initializer expression refers to omp_orig. */
4591 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4593 /* Don't add any barrier for #pragma omp simd or
4594 #pragma omp distribute. */
4595 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4596 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4597 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4600 /* If max_vf is non-zero, then we can use only a vectorization factor
4601 up to the max_vf we chose. So stick it into the safelen clause. */
4602 if (maybe_ne (sctx.max_vf, 0U))
4604 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4605 OMP_CLAUSE_SAFELEN);
4606 poly_uint64 safe_len;
4607 if (c == NULL_TREE
4608 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4609 && maybe_gt (safe_len, sctx.max_vf)))
4611 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4612 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4613 sctx.max_vf);
4614 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4615 gimple_omp_for_set_clauses (ctx->stmt, c);
4621 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4622 both parallel and workshare constructs. PREDICATE may be NULL if it's
4623 always true. */
4625 static void
4626 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4627 omp_context *ctx)
4629 tree x, c, label = NULL, orig_clauses = clauses;
4630 bool par_clauses = false;
4631 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4633 /* Early exit if there are no lastprivate or linear clauses. */
4634 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4635 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4636 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4637 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4638 break;
4639 if (clauses == NULL)
4641 /* If this was a workshare clause, see if it had been combined
4642 with its parallel. In that case, look for the clauses on the
4643 parallel statement itself. */
4644 if (is_parallel_ctx (ctx))
4645 return;
4647 ctx = ctx->outer;
4648 if (ctx == NULL || !is_parallel_ctx (ctx))
4649 return;
4651 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4652 OMP_CLAUSE_LASTPRIVATE);
4653 if (clauses == NULL)
4654 return;
4655 par_clauses = true;
4658 bool maybe_simt = false;
4659 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4660 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4662 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4663 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4664 if (simduid)
4665 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4668 if (predicate)
4670 gcond *stmt;
4671 tree label_true, arm1, arm2;
4672 enum tree_code pred_code = TREE_CODE (predicate);
4674 label = create_artificial_label (UNKNOWN_LOCATION);
4675 label_true = create_artificial_label (UNKNOWN_LOCATION);
4676 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4678 arm1 = TREE_OPERAND (predicate, 0);
4679 arm2 = TREE_OPERAND (predicate, 1);
4680 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4681 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4683 else
4685 arm1 = predicate;
4686 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4687 arm2 = boolean_false_node;
4688 pred_code = NE_EXPR;
4690 if (maybe_simt)
4692 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4693 c = fold_convert (integer_type_node, c);
4694 simtcond = create_tmp_var (integer_type_node);
4695 gimplify_assign (simtcond, c, stmt_list);
4696 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4697 1, simtcond);
4698 c = create_tmp_var (integer_type_node);
4699 gimple_call_set_lhs (g, c);
4700 gimple_seq_add_stmt (stmt_list, g);
4701 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4702 label_true, label);
4704 else
4705 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4706 gimple_seq_add_stmt (stmt_list, stmt);
4707 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4710 for (c = clauses; c ;)
4712 tree var, new_var;
4713 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4715 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4716 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4717 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4719 var = OMP_CLAUSE_DECL (c);
4720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4721 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4722 && is_taskloop_ctx (ctx))
4724 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4725 new_var = lookup_decl (var, ctx->outer);
4727 else
4729 new_var = lookup_decl (var, ctx);
4730 /* Avoid uninitialized warnings for lastprivate and
4731 for linear iterators. */
4732 if (predicate
4733 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4734 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4735 TREE_NO_WARNING (new_var) = 1;
4738 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4740 tree val = DECL_VALUE_EXPR (new_var);
4741 if (TREE_CODE (val) == ARRAY_REF
4742 && VAR_P (TREE_OPERAND (val, 0))
4743 && lookup_attribute ("omp simd array",
4744 DECL_ATTRIBUTES (TREE_OPERAND (val,
4745 0))))
4747 if (lastlane == NULL)
4749 lastlane = create_tmp_var (unsigned_type_node);
4750 gcall *g
4751 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4752 2, simduid,
4753 TREE_OPERAND (val, 1));
4754 gimple_call_set_lhs (g, lastlane);
4755 gimple_seq_add_stmt (stmt_list, g);
4757 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4758 TREE_OPERAND (val, 0), lastlane,
4759 NULL_TREE, NULL_TREE);
4762 else if (maybe_simt)
4764 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4765 ? DECL_VALUE_EXPR (new_var)
4766 : new_var);
4767 if (simtlast == NULL)
4769 simtlast = create_tmp_var (unsigned_type_node);
4770 gcall *g = gimple_build_call_internal
4771 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4772 gimple_call_set_lhs (g, simtlast);
4773 gimple_seq_add_stmt (stmt_list, g);
4775 x = build_call_expr_internal_loc
4776 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4777 TREE_TYPE (val), 2, val, simtlast);
4778 new_var = unshare_expr (new_var);
4779 gimplify_assign (new_var, x, stmt_list);
4780 new_var = unshare_expr (new_var);
4783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4784 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4786 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4787 gimple_seq_add_seq (stmt_list,
4788 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4789 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4791 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4792 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4794 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4795 gimple_seq_add_seq (stmt_list,
4796 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4797 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4800 x = NULL_TREE;
4801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4802 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4804 gcc_checking_assert (is_taskloop_ctx (ctx));
4805 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4806 ctx->outer->outer);
4807 if (is_global_var (ovar))
4808 x = ovar;
4810 if (!x)
4811 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4812 if (omp_is_reference (var))
4813 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4814 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4815 gimplify_and_add (x, stmt_list);
4817 c = OMP_CLAUSE_CHAIN (c);
4818 if (c == NULL && !par_clauses)
4820 /* If this was a workshare clause, see if it had been combined
4821 with its parallel. In that case, continue looking for the
4822 clauses also on the parallel statement itself. */
4823 if (is_parallel_ctx (ctx))
4824 break;
4826 ctx = ctx->outer;
4827 if (ctx == NULL || !is_parallel_ctx (ctx))
4828 break;
4830 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4831 OMP_CLAUSE_LASTPRIVATE);
4832 par_clauses = true;
4836 if (label)
4837 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4840 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4841 (which might be a placeholder). INNER is true if this is an inner
4842 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4843 join markers. Generate the before-loop forking sequence in
4844 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4845 general form of these sequences is
4847 GOACC_REDUCTION_SETUP
4848 GOACC_FORK
4849 GOACC_REDUCTION_INIT
4851 GOACC_REDUCTION_FINI
4852 GOACC_JOIN
4853 GOACC_REDUCTION_TEARDOWN. */
4855 static void
4856 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4857 gcall *fork, gcall *join, gimple_seq *fork_seq,
4858 gimple_seq *join_seq, omp_context *ctx)
4860 gimple_seq before_fork = NULL;
4861 gimple_seq after_fork = NULL;
4862 gimple_seq before_join = NULL;
4863 gimple_seq after_join = NULL;
4864 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4865 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4866 unsigned offset = 0;
4868 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4869 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4871 tree orig = OMP_CLAUSE_DECL (c);
4872 tree var = maybe_lookup_decl (orig, ctx);
4873 tree ref_to_res = NULL_TREE;
4874 tree incoming, outgoing, v1, v2, v3;
4875 bool is_private = false;
4877 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4878 if (rcode == MINUS_EXPR)
4879 rcode = PLUS_EXPR;
4880 else if (rcode == TRUTH_ANDIF_EXPR)
4881 rcode = BIT_AND_EXPR;
4882 else if (rcode == TRUTH_ORIF_EXPR)
4883 rcode = BIT_IOR_EXPR;
4884 tree op = build_int_cst (unsigned_type_node, rcode);
4886 if (!var)
4887 var = orig;
4889 incoming = outgoing = var;
4891 if (!inner)
4893 /* See if an outer construct also reduces this variable. */
4894 omp_context *outer = ctx;
4896 while (omp_context *probe = outer->outer)
4898 enum gimple_code type = gimple_code (probe->stmt);
4899 tree cls;
4901 switch (type)
4903 case GIMPLE_OMP_FOR:
4904 cls = gimple_omp_for_clauses (probe->stmt);
4905 break;
4907 case GIMPLE_OMP_TARGET:
4908 if (gimple_omp_target_kind (probe->stmt)
4909 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4910 goto do_lookup;
4912 cls = gimple_omp_target_clauses (probe->stmt);
4913 break;
4915 default:
4916 goto do_lookup;
4919 outer = probe;
4920 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4921 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4922 && orig == OMP_CLAUSE_DECL (cls))
4924 incoming = outgoing = lookup_decl (orig, probe);
4925 goto has_outer_reduction;
4927 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4928 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4929 && orig == OMP_CLAUSE_DECL (cls))
4931 is_private = true;
4932 goto do_lookup;
4936 do_lookup:
4937 /* This is the outermost construct with this reduction,
4938 see if there's a mapping for it. */
4939 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4940 && maybe_lookup_field (orig, outer) && !is_private)
4942 ref_to_res = build_receiver_ref (orig, false, outer);
4943 if (omp_is_reference (orig))
4944 ref_to_res = build_simple_mem_ref (ref_to_res);
4946 tree type = TREE_TYPE (var);
4947 if (POINTER_TYPE_P (type))
4948 type = TREE_TYPE (type);
4950 outgoing = var;
4951 incoming = omp_reduction_init_op (loc, rcode, type);
4953 else
4955 /* Try to look at enclosing contexts for reduction var,
4956 use original if no mapping found. */
4957 tree t = NULL_TREE;
4958 omp_context *c = ctx->outer;
4959 while (c && !t)
4961 t = maybe_lookup_decl (orig, c);
4962 c = c->outer;
4964 incoming = outgoing = (t ? t : orig);
4967 has_outer_reduction:;
4970 if (!ref_to_res)
4971 ref_to_res = integer_zero_node;
4973 if (omp_is_reference (orig))
4975 tree type = TREE_TYPE (var);
4976 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4978 if (!inner)
4980 tree x = create_tmp_var (TREE_TYPE (type), id);
4981 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4984 v1 = create_tmp_var (type, id);
4985 v2 = create_tmp_var (type, id);
4986 v3 = create_tmp_var (type, id);
4988 gimplify_assign (v1, var, fork_seq);
4989 gimplify_assign (v2, var, fork_seq);
4990 gimplify_assign (v3, var, fork_seq);
4992 var = build_simple_mem_ref (var);
4993 v1 = build_simple_mem_ref (v1);
4994 v2 = build_simple_mem_ref (v2);
4995 v3 = build_simple_mem_ref (v3);
4996 outgoing = build_simple_mem_ref (outgoing);
4998 if (!TREE_CONSTANT (incoming))
4999 incoming = build_simple_mem_ref (incoming);
5001 else
5002 v1 = v2 = v3 = var;
5004 /* Determine position in reduction buffer, which may be used
5005 by target. The parser has ensured that this is not a
5006 variable-sized type. */
5007 fixed_size_mode mode
5008 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5009 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5010 offset = (offset + align - 1) & ~(align - 1);
5011 tree off = build_int_cst (sizetype, offset);
5012 offset += GET_MODE_SIZE (mode);
5014 if (!init_code)
5016 init_code = build_int_cst (integer_type_node,
5017 IFN_GOACC_REDUCTION_INIT);
5018 fini_code = build_int_cst (integer_type_node,
5019 IFN_GOACC_REDUCTION_FINI);
5020 setup_code = build_int_cst (integer_type_node,
5021 IFN_GOACC_REDUCTION_SETUP);
5022 teardown_code = build_int_cst (integer_type_node,
5023 IFN_GOACC_REDUCTION_TEARDOWN);
5026 tree setup_call
5027 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5028 TREE_TYPE (var), 6, setup_code,
5029 unshare_expr (ref_to_res),
5030 incoming, level, op, off);
5031 tree init_call
5032 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5033 TREE_TYPE (var), 6, init_code,
5034 unshare_expr (ref_to_res),
5035 v1, level, op, off);
5036 tree fini_call
5037 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5038 TREE_TYPE (var), 6, fini_code,
5039 unshare_expr (ref_to_res),
5040 v2, level, op, off);
5041 tree teardown_call
5042 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5043 TREE_TYPE (var), 6, teardown_code,
5044 ref_to_res, v3, level, op, off);
5046 gimplify_assign (v1, setup_call, &before_fork);
5047 gimplify_assign (v2, init_call, &after_fork);
5048 gimplify_assign (v3, fini_call, &before_join);
5049 gimplify_assign (outgoing, teardown_call, &after_join);
5052 /* Now stitch things together. */
5053 gimple_seq_add_seq (fork_seq, before_fork);
5054 if (fork)
5055 gimple_seq_add_stmt (fork_seq, fork);
5056 gimple_seq_add_seq (fork_seq, after_fork);
5058 gimple_seq_add_seq (join_seq, before_join);
5059 if (join)
5060 gimple_seq_add_stmt (join_seq, join);
5061 gimple_seq_add_seq (join_seq, after_join);
5064 /* Generate code to implement the REDUCTION clauses. */
5066 static void
5067 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5069 gimple_seq sub_seq = NULL;
5070 gimple *stmt;
5071 tree x, c;
5072 int count = 0;
5074 /* OpenACC loop reductions are handled elsewhere. */
5075 if (is_gimple_omp_oacc (ctx->stmt))
5076 return;
5078 /* SIMD reductions are handled in lower_rec_input_clauses. */
5079 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5080 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5081 return;
5083 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5084 update in that case, otherwise use a lock. */
5085 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5088 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5089 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5091 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5092 count = -1;
5093 break;
5095 count++;
5098 if (count == 0)
5099 return;
5101 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5103 tree var, ref, new_var, orig_var;
5104 enum tree_code code;
5105 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5107 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5108 continue;
5110 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5111 orig_var = var = OMP_CLAUSE_DECL (c);
5112 if (TREE_CODE (var) == MEM_REF)
5114 var = TREE_OPERAND (var, 0);
5115 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5116 var = TREE_OPERAND (var, 0);
5117 if (TREE_CODE (var) == ADDR_EXPR)
5118 var = TREE_OPERAND (var, 0);
5119 else
5121 /* If this is a pointer or referenced based array
5122 section, the var could be private in the outer
5123 context e.g. on orphaned loop construct. Pretend this
5124 is private variable's outer reference. */
5125 ccode = OMP_CLAUSE_PRIVATE;
5126 if (TREE_CODE (var) == INDIRECT_REF)
5127 var = TREE_OPERAND (var, 0);
5129 orig_var = var;
5130 if (is_variable_sized (var))
5132 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5133 var = DECL_VALUE_EXPR (var);
5134 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5135 var = TREE_OPERAND (var, 0);
5136 gcc_assert (DECL_P (var));
5139 new_var = lookup_decl (var, ctx);
5140 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5141 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5142 ref = build_outer_var_ref (var, ctx, ccode);
5143 code = OMP_CLAUSE_REDUCTION_CODE (c);
5145 /* reduction(-:var) sums up the partial results, so it acts
5146 identically to reduction(+:var). */
5147 if (code == MINUS_EXPR)
5148 code = PLUS_EXPR;
5150 if (count == 1)
5152 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5154 addr = save_expr (addr);
5155 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5156 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5157 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5158 gimplify_and_add (x, stmt_seqp);
5159 return;
5161 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5163 tree d = OMP_CLAUSE_DECL (c);
5164 tree type = TREE_TYPE (d);
5165 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5166 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5167 tree ptype = build_pointer_type (TREE_TYPE (type));
5168 tree bias = TREE_OPERAND (d, 1);
5169 d = TREE_OPERAND (d, 0);
5170 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5172 tree b = TREE_OPERAND (d, 1);
5173 b = maybe_lookup_decl (b, ctx);
5174 if (b == NULL)
5176 b = TREE_OPERAND (d, 1);
5177 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5179 if (integer_zerop (bias))
5180 bias = b;
5181 else
5183 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5184 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5185 TREE_TYPE (b), b, bias);
5187 d = TREE_OPERAND (d, 0);
5189 /* For ref build_outer_var_ref already performs this, so
5190 only new_var needs a dereference. */
5191 if (TREE_CODE (d) == INDIRECT_REF)
5193 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5194 gcc_assert (omp_is_reference (var) && var == orig_var);
5196 else if (TREE_CODE (d) == ADDR_EXPR)
5198 if (orig_var == var)
5200 new_var = build_fold_addr_expr (new_var);
5201 ref = build_fold_addr_expr (ref);
5204 else
5206 gcc_assert (orig_var == var);
5207 if (omp_is_reference (var))
5208 ref = build_fold_addr_expr (ref);
5210 if (DECL_P (v))
5212 tree t = maybe_lookup_decl (v, ctx);
5213 if (t)
5214 v = t;
5215 else
5216 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5217 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5219 if (!integer_zerop (bias))
5221 bias = fold_convert_loc (clause_loc, sizetype, bias);
5222 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5223 TREE_TYPE (new_var), new_var,
5224 unshare_expr (bias));
5225 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5226 TREE_TYPE (ref), ref, bias);
5228 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5229 ref = fold_convert_loc (clause_loc, ptype, ref);
5230 tree m = create_tmp_var (ptype, NULL);
5231 gimplify_assign (m, new_var, stmt_seqp);
5232 new_var = m;
5233 m = create_tmp_var (ptype, NULL);
5234 gimplify_assign (m, ref, stmt_seqp);
5235 ref = m;
5236 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5237 tree body = create_artificial_label (UNKNOWN_LOCATION);
5238 tree end = create_artificial_label (UNKNOWN_LOCATION);
5239 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5240 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5241 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5242 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5244 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5245 tree decl_placeholder
5246 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5247 SET_DECL_VALUE_EXPR (placeholder, out);
5248 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5249 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5250 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5251 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5252 gimple_seq_add_seq (&sub_seq,
5253 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5254 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5255 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5256 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5258 else
5260 x = build2 (code, TREE_TYPE (out), out, priv);
5261 out = unshare_expr (out);
5262 gimplify_assign (out, x, &sub_seq);
5264 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5265 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5266 gimple_seq_add_stmt (&sub_seq, g);
5267 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5268 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5269 gimple_seq_add_stmt (&sub_seq, g);
5270 g = gimple_build_assign (i, PLUS_EXPR, i,
5271 build_int_cst (TREE_TYPE (i), 1));
5272 gimple_seq_add_stmt (&sub_seq, g);
5273 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5274 gimple_seq_add_stmt (&sub_seq, g);
5275 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5277 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5279 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5281 if (omp_is_reference (var)
5282 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5283 TREE_TYPE (ref)))
5284 ref = build_fold_addr_expr_loc (clause_loc, ref);
5285 SET_DECL_VALUE_EXPR (placeholder, ref);
5286 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5287 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5288 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5289 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5290 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5292 else
5294 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5295 ref = build_outer_var_ref (var, ctx);
5296 gimplify_assign (ref, x, &sub_seq);
5300 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5302 gimple_seq_add_stmt (stmt_seqp, stmt);
5304 gimple_seq_add_seq (stmt_seqp, sub_seq);
5306 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5308 gimple_seq_add_stmt (stmt_seqp, stmt);
5312 /* Generate code to implement the COPYPRIVATE clauses. */
5314 static void
5315 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5316 omp_context *ctx)
5318 tree c;
5320 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5322 tree var, new_var, ref, x;
5323 bool by_ref;
5324 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5326 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5327 continue;
5329 var = OMP_CLAUSE_DECL (c);
5330 by_ref = use_pointer_for_field (var, NULL);
5332 ref = build_sender_ref (var, ctx);
5333 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5334 if (by_ref)
5336 x = build_fold_addr_expr_loc (clause_loc, new_var);
5337 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5339 gimplify_assign (ref, x, slist);
5341 ref = build_receiver_ref (var, false, ctx);
5342 if (by_ref)
5344 ref = fold_convert_loc (clause_loc,
5345 build_pointer_type (TREE_TYPE (new_var)),
5346 ref);
5347 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5349 if (omp_is_reference (var))
5351 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5352 ref = build_simple_mem_ref_loc (clause_loc, ref);
5353 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5355 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5356 gimplify_and_add (x, rlist);
5361 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5362 and REDUCTION from the sender (aka parent) side. */
5364 static void
5365 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5366 omp_context *ctx)
5368 tree c, t;
5369 int ignored_looptemp = 0;
5370 bool is_taskloop = false;
5372 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5373 by GOMP_taskloop. */
5374 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5376 ignored_looptemp = 2;
5377 is_taskloop = true;
5380 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5382 tree val, ref, x, var;
5383 bool by_ref, do_in = false, do_out = false;
5384 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5386 switch (OMP_CLAUSE_CODE (c))
5388 case OMP_CLAUSE_PRIVATE:
5389 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5390 break;
5391 continue;
5392 case OMP_CLAUSE_FIRSTPRIVATE:
5393 case OMP_CLAUSE_COPYIN:
5394 case OMP_CLAUSE_LASTPRIVATE:
5395 case OMP_CLAUSE_REDUCTION:
5396 break;
5397 case OMP_CLAUSE_SHARED:
5398 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5399 break;
5400 continue;
5401 case OMP_CLAUSE__LOOPTEMP_:
5402 if (ignored_looptemp)
5404 ignored_looptemp--;
5405 continue;
5407 break;
5408 default:
5409 continue;
5412 val = OMP_CLAUSE_DECL (c);
5413 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5414 && TREE_CODE (val) == MEM_REF)
5416 val = TREE_OPERAND (val, 0);
5417 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5418 val = TREE_OPERAND (val, 0);
5419 if (TREE_CODE (val) == INDIRECT_REF
5420 || TREE_CODE (val) == ADDR_EXPR)
5421 val = TREE_OPERAND (val, 0);
5422 if (is_variable_sized (val))
5423 continue;
5426 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5427 outer taskloop region. */
5428 omp_context *ctx_for_o = ctx;
5429 if (is_taskloop
5430 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5431 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5432 ctx_for_o = ctx->outer;
5434 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5436 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5437 && is_global_var (var))
5438 continue;
5440 t = omp_member_access_dummy_var (var);
5441 if (t)
5443 var = DECL_VALUE_EXPR (var);
5444 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5445 if (o != t)
5446 var = unshare_and_remap (var, t, o);
5447 else
5448 var = unshare_expr (var);
5451 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5453 /* Handle taskloop firstprivate/lastprivate, where the
5454 lastprivate on GIMPLE_OMP_TASK is represented as
5455 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5456 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5457 x = omp_build_component_ref (ctx->sender_decl, f);
5458 if (use_pointer_for_field (val, ctx))
5459 var = build_fold_addr_expr (var);
5460 gimplify_assign (x, var, ilist);
5461 DECL_ABSTRACT_ORIGIN (f) = NULL;
5462 continue;
5465 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5466 || val == OMP_CLAUSE_DECL (c))
5467 && is_variable_sized (val))
5468 continue;
5469 by_ref = use_pointer_for_field (val, NULL);
5471 switch (OMP_CLAUSE_CODE (c))
5473 case OMP_CLAUSE_FIRSTPRIVATE:
5474 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5475 && !by_ref
5476 && is_task_ctx (ctx))
5477 TREE_NO_WARNING (var) = 1;
5478 do_in = true;
5479 break;
5481 case OMP_CLAUSE_PRIVATE:
5482 case OMP_CLAUSE_COPYIN:
5483 case OMP_CLAUSE__LOOPTEMP_:
5484 do_in = true;
5485 break;
5487 case OMP_CLAUSE_LASTPRIVATE:
5488 if (by_ref || omp_is_reference (val))
5490 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5491 continue;
5492 do_in = true;
5494 else
5496 do_out = true;
5497 if (lang_hooks.decls.omp_private_outer_ref (val))
5498 do_in = true;
5500 break;
5502 case OMP_CLAUSE_REDUCTION:
5503 do_in = true;
5504 if (val == OMP_CLAUSE_DECL (c))
5505 do_out = !(by_ref || omp_is_reference (val));
5506 else
5507 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5508 break;
5510 default:
5511 gcc_unreachable ();
5514 if (do_in)
5516 ref = build_sender_ref (val, ctx);
5517 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5518 gimplify_assign (ref, x, ilist);
5519 if (is_task_ctx (ctx))
5520 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5523 if (do_out)
5525 ref = build_sender_ref (val, ctx);
5526 gimplify_assign (var, ref, olist);
5531 /* Generate code to implement SHARED from the sender (aka parent)
5532 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5533 list things that got automatically shared. */
5535 static void
5536 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5538 tree var, ovar, nvar, t, f, x, record_type;
5540 if (ctx->record_type == NULL)
5541 return;
5543 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5544 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5546 ovar = DECL_ABSTRACT_ORIGIN (f);
5547 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5548 continue;
5550 nvar = maybe_lookup_decl (ovar, ctx);
5551 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5552 continue;
5554 /* If CTX is a nested parallel directive. Find the immediately
5555 enclosing parallel or workshare construct that contains a
5556 mapping for OVAR. */
5557 var = lookup_decl_in_outer_ctx (ovar, ctx);
5559 t = omp_member_access_dummy_var (var);
5560 if (t)
5562 var = DECL_VALUE_EXPR (var);
5563 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5564 if (o != t)
5565 var = unshare_and_remap (var, t, o);
5566 else
5567 var = unshare_expr (var);
5570 if (use_pointer_for_field (ovar, ctx))
5572 x = build_sender_ref (ovar, ctx);
5573 var = build_fold_addr_expr (var);
5574 gimplify_assign (x, var, ilist);
5576 else
5578 x = build_sender_ref (ovar, ctx);
5579 gimplify_assign (x, var, ilist);
5581 if (!TREE_READONLY (var)
5582 /* We don't need to receive a new reference to a result
5583 or parm decl. In fact we may not store to it as we will
5584 invalidate any pending RSO and generate wrong gimple
5585 during inlining. */
5586 && !((TREE_CODE (var) == RESULT_DECL
5587 || TREE_CODE (var) == PARM_DECL)
5588 && DECL_BY_REFERENCE (var)))
5590 x = build_sender_ref (ovar, ctx);
5591 gimplify_assign (var, x, olist);
5597 /* Emit an OpenACC head marker call, encapulating the partitioning and
5598 other information that must be processed by the target compiler.
5599 Return the maximum number of dimensions the associated loop might
5600 be partitioned over. */
5602 static unsigned
5603 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5604 gimple_seq *seq, omp_context *ctx)
5606 unsigned levels = 0;
5607 unsigned tag = 0;
5608 tree gang_static = NULL_TREE;
5609 auto_vec<tree, 5> args;
5611 args.quick_push (build_int_cst
5612 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5613 args.quick_push (ddvar);
5614 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5616 switch (OMP_CLAUSE_CODE (c))
5618 case OMP_CLAUSE_GANG:
5619 tag |= OLF_DIM_GANG;
5620 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5621 /* static:* is represented by -1, and we can ignore it, as
5622 scheduling is always static. */
5623 if (gang_static && integer_minus_onep (gang_static))
5624 gang_static = NULL_TREE;
5625 levels++;
5626 break;
5628 case OMP_CLAUSE_WORKER:
5629 tag |= OLF_DIM_WORKER;
5630 levels++;
5631 break;
5633 case OMP_CLAUSE_VECTOR:
5634 tag |= OLF_DIM_VECTOR;
5635 levels++;
5636 break;
5638 case OMP_CLAUSE_SEQ:
5639 tag |= OLF_SEQ;
5640 break;
5642 case OMP_CLAUSE_AUTO:
5643 tag |= OLF_AUTO;
5644 break;
5646 case OMP_CLAUSE_INDEPENDENT:
5647 tag |= OLF_INDEPENDENT;
5648 break;
5650 case OMP_CLAUSE_TILE:
5651 tag |= OLF_TILE;
5652 break;
5654 default:
5655 continue;
5659 if (gang_static)
5661 if (DECL_P (gang_static))
5662 gang_static = build_outer_var_ref (gang_static, ctx);
5663 tag |= OLF_GANG_STATIC;
5666 /* In a parallel region, loops are implicitly INDEPENDENT. */
5667 omp_context *tgt = enclosing_target_ctx (ctx);
5668 if (!tgt || is_oacc_parallel (tgt))
5669 tag |= OLF_INDEPENDENT;
5671 if (tag & OLF_TILE)
5672 /* Tiling could use all 3 levels. */
5673 levels = 3;
5674 else
5676 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5677 Ensure at least one level, or 2 for possible auto
5678 partitioning */
5679 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5680 << OLF_DIM_BASE) | OLF_SEQ));
5682 if (levels < 1u + maybe_auto)
5683 levels = 1u + maybe_auto;
5686 args.quick_push (build_int_cst (integer_type_node, levels));
5687 args.quick_push (build_int_cst (integer_type_node, tag));
5688 if (gang_static)
5689 args.quick_push (gang_static);
5691 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5692 gimple_set_location (call, loc);
5693 gimple_set_lhs (call, ddvar);
5694 gimple_seq_add_stmt (seq, call);
5696 return levels;
5699 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5700 partitioning level of the enclosed region. */
5702 static void
5703 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5704 tree tofollow, gimple_seq *seq)
5706 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5707 : IFN_UNIQUE_OACC_TAIL_MARK);
5708 tree marker = build_int_cst (integer_type_node, marker_kind);
5709 int nargs = 2 + (tofollow != NULL_TREE);
5710 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5711 marker, ddvar, tofollow);
5712 gimple_set_location (call, loc);
5713 gimple_set_lhs (call, ddvar);
5714 gimple_seq_add_stmt (seq, call);
5717 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5718 the loop clauses, from which we extract reductions. Initialize
5719 HEAD and TAIL. */
5721 static void
5722 lower_oacc_head_tail (location_t loc, tree clauses,
5723 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5725 bool inner = false;
5726 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5727 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5729 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5730 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5731 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5733 gcc_assert (count);
5734 for (unsigned done = 1; count; count--, done++)
5736 gimple_seq fork_seq = NULL;
5737 gimple_seq join_seq = NULL;
5739 tree place = build_int_cst (integer_type_node, -1);
5740 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5741 fork_kind, ddvar, place);
5742 gimple_set_location (fork, loc);
5743 gimple_set_lhs (fork, ddvar);
5745 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5746 join_kind, ddvar, place);
5747 gimple_set_location (join, loc);
5748 gimple_set_lhs (join, ddvar);
5750 /* Mark the beginning of this level sequence. */
5751 if (inner)
5752 lower_oacc_loop_marker (loc, ddvar, true,
5753 build_int_cst (integer_type_node, count),
5754 &fork_seq);
5755 lower_oacc_loop_marker (loc, ddvar, false,
5756 build_int_cst (integer_type_node, done),
5757 &join_seq);
5759 lower_oacc_reductions (loc, clauses, place, inner,
5760 fork, join, &fork_seq, &join_seq, ctx);
5762 /* Append this level to head. */
5763 gimple_seq_add_seq (head, fork_seq);
5764 /* Prepend it to tail. */
5765 gimple_seq_add_seq (&join_seq, *tail);
5766 *tail = join_seq;
5768 inner = true;
5771 /* Mark the end of the sequence. */
5772 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5773 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5776 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5777 catch handler and return it. This prevents programs from violating the
5778 structured block semantics with throws. */
5780 static gimple_seq
5781 maybe_catch_exception (gimple_seq body)
5783 gimple *g;
5784 tree decl;
5786 if (!flag_exceptions)
5787 return body;
5789 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5790 decl = lang_hooks.eh_protect_cleanup_actions ();
5791 else
5792 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5794 g = gimple_build_eh_must_not_throw (decl);
5795 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5796 GIMPLE_TRY_CATCH);
5798 return gimple_seq_alloc_with_stmt (g);
5802 /* Routines to lower OMP directives into OMP-GIMPLE. */
5804 /* If ctx is a worksharing context inside of a cancellable parallel
5805 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5806 and conditional branch to parallel's cancel_label to handle
5807 cancellation in the implicit barrier. */
5809 static void
5810 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5812 gimple *omp_return = gimple_seq_last_stmt (*body);
5813 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5814 if (gimple_omp_return_nowait_p (omp_return))
5815 return;
5816 if (ctx->outer
5817 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5818 && ctx->outer->cancellable)
5820 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5821 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5822 tree lhs = create_tmp_var (c_bool_type);
5823 gimple_omp_return_set_lhs (omp_return, lhs);
5824 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5825 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5826 fold_convert (c_bool_type,
5827 boolean_false_node),
5828 ctx->outer->cancel_label, fallthru_label);
5829 gimple_seq_add_stmt (body, g);
5830 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5834 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5835 CTX is the enclosing OMP context for the current statement. */
5837 static void
5838 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5840 tree block, control;
5841 gimple_stmt_iterator tgsi;
5842 gomp_sections *stmt;
5843 gimple *t;
5844 gbind *new_stmt, *bind;
5845 gimple_seq ilist, dlist, olist, new_body;
5847 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5849 push_gimplify_context ();
5851 dlist = NULL;
5852 ilist = NULL;
5853 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5854 &ilist, &dlist, ctx, NULL);
5856 new_body = gimple_omp_body (stmt);
5857 gimple_omp_set_body (stmt, NULL);
5858 tgsi = gsi_start (new_body);
5859 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5861 omp_context *sctx;
5862 gimple *sec_start;
5864 sec_start = gsi_stmt (tgsi);
5865 sctx = maybe_lookup_ctx (sec_start);
5866 gcc_assert (sctx);
5868 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5869 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5870 GSI_CONTINUE_LINKING);
5871 gimple_omp_set_body (sec_start, NULL);
5873 if (gsi_one_before_end_p (tgsi))
5875 gimple_seq l = NULL;
5876 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5877 &l, ctx);
5878 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5879 gimple_omp_section_set_last (sec_start);
5882 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5883 GSI_CONTINUE_LINKING);
5886 block = make_node (BLOCK);
5887 bind = gimple_build_bind (NULL, new_body, block);
5889 olist = NULL;
5890 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5892 block = make_node (BLOCK);
5893 new_stmt = gimple_build_bind (NULL, NULL, block);
5894 gsi_replace (gsi_p, new_stmt, true);
5896 pop_gimplify_context (new_stmt);
5897 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5898 BLOCK_VARS (block) = gimple_bind_vars (bind);
5899 if (BLOCK_VARS (block))
5900 TREE_USED (block) = 1;
5902 new_body = NULL;
5903 gimple_seq_add_seq (&new_body, ilist);
5904 gimple_seq_add_stmt (&new_body, stmt);
5905 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5906 gimple_seq_add_stmt (&new_body, bind);
5908 control = create_tmp_var (unsigned_type_node, ".section");
5909 t = gimple_build_omp_continue (control, control);
5910 gimple_omp_sections_set_control (stmt, control);
5911 gimple_seq_add_stmt (&new_body, t);
5913 gimple_seq_add_seq (&new_body, olist);
5914 if (ctx->cancellable)
5915 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5916 gimple_seq_add_seq (&new_body, dlist);
5918 new_body = maybe_catch_exception (new_body);
5920 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5921 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5922 t = gimple_build_omp_return (nowait);
5923 gimple_seq_add_stmt (&new_body, t);
5924 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5926 gimple_bind_set_body (new_stmt, new_body);
5930 /* A subroutine of lower_omp_single. Expand the simple form of
5931 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5933 if (GOMP_single_start ())
5934 BODY;
5935 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5937 FIXME. It may be better to delay expanding the logic of this until
5938 pass_expand_omp. The expanded logic may make the job more difficult
5939 to a synchronization analysis pass. */
5941 static void
5942 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5944 location_t loc = gimple_location (single_stmt);
5945 tree tlabel = create_artificial_label (loc);
5946 tree flabel = create_artificial_label (loc);
5947 gimple *call, *cond;
5948 tree lhs, decl;
5950 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5951 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5952 call = gimple_build_call (decl, 0);
5953 gimple_call_set_lhs (call, lhs);
5954 gimple_seq_add_stmt (pre_p, call);
5956 cond = gimple_build_cond (EQ_EXPR, lhs,
5957 fold_convert_loc (loc, TREE_TYPE (lhs),
5958 boolean_true_node),
5959 tlabel, flabel);
5960 gimple_seq_add_stmt (pre_p, cond);
5961 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5962 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5963 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5967 /* A subroutine of lower_omp_single. Expand the simple form of
5968 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5970 #pragma omp single copyprivate (a, b, c)
5972 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5975 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5977 BODY;
5978 copyout.a = a;
5979 copyout.b = b;
5980 copyout.c = c;
5981 GOMP_single_copy_end (&copyout);
5983 else
5985 a = copyout_p->a;
5986 b = copyout_p->b;
5987 c = copyout_p->c;
5989 GOMP_barrier ();
5992 FIXME. It may be better to delay expanding the logic of this until
5993 pass_expand_omp. The expanded logic may make the job more difficult
5994 to a synchronization analysis pass. */
5996 static void
5997 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5998 omp_context *ctx)
6000 tree ptr_type, t, l0, l1, l2, bfn_decl;
6001 gimple_seq copyin_seq;
6002 location_t loc = gimple_location (single_stmt);
6004 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6006 ptr_type = build_pointer_type (ctx->record_type);
6007 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6009 l0 = create_artificial_label (loc);
6010 l1 = create_artificial_label (loc);
6011 l2 = create_artificial_label (loc);
6013 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6014 t = build_call_expr_loc (loc, bfn_decl, 0);
6015 t = fold_convert_loc (loc, ptr_type, t);
6016 gimplify_assign (ctx->receiver_decl, t, pre_p);
6018 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6019 build_int_cst (ptr_type, 0));
6020 t = build3 (COND_EXPR, void_type_node, t,
6021 build_and_jump (&l0), build_and_jump (&l1));
6022 gimplify_and_add (t, pre_p);
6024 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6026 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6028 copyin_seq = NULL;
6029 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6030 &copyin_seq, ctx);
6032 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6033 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6034 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6035 gimplify_and_add (t, pre_p);
6037 t = build_and_jump (&l2);
6038 gimplify_and_add (t, pre_p);
6040 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6042 gimple_seq_add_seq (pre_p, copyin_seq);
6044 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6048 /* Expand code for an OpenMP single directive. */
6050 static void
6051 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6053 tree block;
6054 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6055 gbind *bind;
6056 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6058 push_gimplify_context ();
6060 block = make_node (BLOCK);
6061 bind = gimple_build_bind (NULL, NULL, block);
6062 gsi_replace (gsi_p, bind, true);
6063 bind_body = NULL;
6064 dlist = NULL;
6065 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6066 &bind_body, &dlist, ctx, NULL);
6067 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6069 gimple_seq_add_stmt (&bind_body, single_stmt);
6071 if (ctx->record_type)
6072 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6073 else
6074 lower_omp_single_simple (single_stmt, &bind_body);
6076 gimple_omp_set_body (single_stmt, NULL);
6078 gimple_seq_add_seq (&bind_body, dlist);
6080 bind_body = maybe_catch_exception (bind_body);
6082 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6083 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6084 gimple *g = gimple_build_omp_return (nowait);
6085 gimple_seq_add_stmt (&bind_body_tail, g);
6086 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6087 if (ctx->record_type)
6089 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6090 tree clobber = build_constructor (ctx->record_type, NULL);
6091 TREE_THIS_VOLATILE (clobber) = 1;
6092 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6093 clobber), GSI_SAME_STMT);
6095 gimple_seq_add_seq (&bind_body, bind_body_tail);
6096 gimple_bind_set_body (bind, bind_body);
6098 pop_gimplify_context (bind);
6100 gimple_bind_append_vars (bind, ctx->block_vars);
6101 BLOCK_VARS (block) = ctx->block_vars;
6102 if (BLOCK_VARS (block))
6103 TREE_USED (block) = 1;
6107 /* Expand code for an OpenMP master directive. */
6109 static void
6110 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6112 tree block, lab = NULL, x, bfn_decl;
6113 gimple *stmt = gsi_stmt (*gsi_p);
6114 gbind *bind;
6115 location_t loc = gimple_location (stmt);
6116 gimple_seq tseq;
6118 push_gimplify_context ();
6120 block = make_node (BLOCK);
6121 bind = gimple_build_bind (NULL, NULL, block);
6122 gsi_replace (gsi_p, bind, true);
6123 gimple_bind_add_stmt (bind, stmt);
6125 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6126 x = build_call_expr_loc (loc, bfn_decl, 0);
6127 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6128 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6129 tseq = NULL;
6130 gimplify_and_add (x, &tseq);
6131 gimple_bind_add_seq (bind, tseq);
6133 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6134 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6135 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6136 gimple_omp_set_body (stmt, NULL);
6138 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6140 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6142 pop_gimplify_context (bind);
6144 gimple_bind_append_vars (bind, ctx->block_vars);
6145 BLOCK_VARS (block) = ctx->block_vars;
6149 /* Expand code for an OpenMP taskgroup directive. */
6151 static void
6152 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6154 gimple *stmt = gsi_stmt (*gsi_p);
6155 gcall *x;
6156 gbind *bind;
6157 tree block = make_node (BLOCK);
6159 bind = gimple_build_bind (NULL, NULL, block);
6160 gsi_replace (gsi_p, bind, true);
6161 gimple_bind_add_stmt (bind, stmt);
6163 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6165 gimple_bind_add_stmt (bind, x);
6167 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6168 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6169 gimple_omp_set_body (stmt, NULL);
6171 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6173 gimple_bind_append_vars (bind, ctx->block_vars);
6174 BLOCK_VARS (block) = ctx->block_vars;
6178 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6180 static void
6181 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6182 omp_context *ctx)
6184 struct omp_for_data fd;
6185 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6186 return;
6188 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6189 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6190 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6191 if (!fd.ordered)
6192 return;
6194 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6195 tree c = gimple_omp_ordered_clauses (ord_stmt);
6196 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6197 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6199 /* Merge depend clauses from multiple adjacent
6200 #pragma omp ordered depend(sink:...) constructs
6201 into one #pragma omp ordered depend(sink:...), so that
6202 we can optimize them together. */
6203 gimple_stmt_iterator gsi = *gsi_p;
6204 gsi_next (&gsi);
6205 while (!gsi_end_p (gsi))
6207 gimple *stmt = gsi_stmt (gsi);
6208 if (is_gimple_debug (stmt)
6209 || gimple_code (stmt) == GIMPLE_NOP)
6211 gsi_next (&gsi);
6212 continue;
6214 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6215 break;
6216 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6217 c = gimple_omp_ordered_clauses (ord_stmt2);
6218 if (c == NULL_TREE
6219 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6220 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6221 break;
6222 while (*list_p)
6223 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6224 *list_p = c;
6225 gsi_remove (&gsi, true);
6229 /* Canonicalize sink dependence clauses into one folded clause if
6230 possible.
6232 The basic algorithm is to create a sink vector whose first
6233 element is the GCD of all the first elements, and whose remaining
6234 elements are the minimum of the subsequent columns.
6236 We ignore dependence vectors whose first element is zero because
6237 such dependencies are known to be executed by the same thread.
6239 We take into account the direction of the loop, so a minimum
6240 becomes a maximum if the loop is iterating forwards. We also
6241 ignore sink clauses where the loop direction is unknown, or where
6242 the offsets are clearly invalid because they are not a multiple
6243 of the loop increment.
6245 For example:
6247 #pragma omp for ordered(2)
6248 for (i=0; i < N; ++i)
6249 for (j=0; j < M; ++j)
6251 #pragma omp ordered \
6252 depend(sink:i-8,j-2) \
6253 depend(sink:i,j-1) \ // Completely ignored because i+0.
6254 depend(sink:i-4,j-3) \
6255 depend(sink:i-6,j-4)
6256 #pragma omp ordered depend(source)
6259 Folded clause is:
6261 depend(sink:-gcd(8,4,6),-min(2,3,4))
6262 -or-
6263 depend(sink:-2,-2)
6266 /* FIXME: Computing GCD's where the first element is zero is
6267 non-trivial in the presence of collapsed loops. Do this later. */
6268 if (fd.collapse > 1)
6269 return;
6271 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6273 /* wide_int is not a POD so it must be default-constructed. */
6274 for (unsigned i = 0; i != 2 * len - 1; ++i)
6275 new (static_cast<void*>(folded_deps + i)) wide_int ();
6277 tree folded_dep = NULL_TREE;
6278 /* TRUE if the first dimension's offset is negative. */
6279 bool neg_offset_p = false;
6281 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6282 unsigned int i;
6283 while ((c = *list_p) != NULL)
6285 bool remove = false;
6287 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6288 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6289 goto next_ordered_clause;
6291 tree vec;
6292 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6293 vec && TREE_CODE (vec) == TREE_LIST;
6294 vec = TREE_CHAIN (vec), ++i)
6296 gcc_assert (i < len);
6298 /* omp_extract_for_data has canonicalized the condition. */
6299 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6300 || fd.loops[i].cond_code == GT_EXPR);
6301 bool forward = fd.loops[i].cond_code == LT_EXPR;
6302 bool maybe_lexically_later = true;
6304 /* While the committee makes up its mind, bail if we have any
6305 non-constant steps. */
6306 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6307 goto lower_omp_ordered_ret;
6309 tree itype = TREE_TYPE (TREE_VALUE (vec));
6310 if (POINTER_TYPE_P (itype))
6311 itype = sizetype;
6312 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6313 TYPE_PRECISION (itype),
6314 TYPE_SIGN (itype));
6316 /* Ignore invalid offsets that are not multiples of the step. */
6317 if (!wi::multiple_of_p (wi::abs (offset),
6318 wi::abs (wi::to_wide (fd.loops[i].step)),
6319 UNSIGNED))
6321 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6322 "ignoring sink clause with offset that is not "
6323 "a multiple of the loop step");
6324 remove = true;
6325 goto next_ordered_clause;
6328 /* Calculate the first dimension. The first dimension of
6329 the folded dependency vector is the GCD of the first
6330 elements, while ignoring any first elements whose offset
6331 is 0. */
6332 if (i == 0)
6334 /* Ignore dependence vectors whose first dimension is 0. */
6335 if (offset == 0)
6337 remove = true;
6338 goto next_ordered_clause;
6340 else
6342 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6344 error_at (OMP_CLAUSE_LOCATION (c),
6345 "first offset must be in opposite direction "
6346 "of loop iterations");
6347 goto lower_omp_ordered_ret;
6349 if (forward)
6350 offset = -offset;
6351 neg_offset_p = forward;
6352 /* Initialize the first time around. */
6353 if (folded_dep == NULL_TREE)
6355 folded_dep = c;
6356 folded_deps[0] = offset;
6358 else
6359 folded_deps[0] = wi::gcd (folded_deps[0],
6360 offset, UNSIGNED);
6363 /* Calculate minimum for the remaining dimensions. */
6364 else
6366 folded_deps[len + i - 1] = offset;
6367 if (folded_dep == c)
6368 folded_deps[i] = offset;
6369 else if (maybe_lexically_later
6370 && !wi::eq_p (folded_deps[i], offset))
6372 if (forward ^ wi::gts_p (folded_deps[i], offset))
6374 unsigned int j;
6375 folded_dep = c;
6376 for (j = 1; j <= i; j++)
6377 folded_deps[j] = folded_deps[len + j - 1];
6379 else
6380 maybe_lexically_later = false;
6384 gcc_assert (i == len);
6386 remove = true;
6388 next_ordered_clause:
6389 if (remove)
6390 *list_p = OMP_CLAUSE_CHAIN (c);
6391 else
6392 list_p = &OMP_CLAUSE_CHAIN (c);
6395 if (folded_dep)
6397 if (neg_offset_p)
6398 folded_deps[0] = -folded_deps[0];
6400 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6401 if (POINTER_TYPE_P (itype))
6402 itype = sizetype;
6404 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6405 = wide_int_to_tree (itype, folded_deps[0]);
6406 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6407 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6410 lower_omp_ordered_ret:
6412 /* Ordered without clauses is #pragma omp threads, while we want
6413 a nop instead if we remove all clauses. */
6414 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6415 gsi_replace (gsi_p, gimple_build_nop (), true);
6419 /* Expand code for an OpenMP ordered directive. */
6421 static void
6422 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6424 tree block;
6425 gimple *stmt = gsi_stmt (*gsi_p), *g;
6426 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6427 gcall *x;
6428 gbind *bind;
6429 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6430 OMP_CLAUSE_SIMD);
6431 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6432 loop. */
6433 bool maybe_simt
6434 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6435 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6436 OMP_CLAUSE_THREADS);
6438 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6439 OMP_CLAUSE_DEPEND))
6441 /* FIXME: This is needs to be moved to the expansion to verify various
6442 conditions only testable on cfg with dominators computed, and also
6443 all the depend clauses to be merged still might need to be available
6444 for the runtime checks. */
6445 if (0)
6446 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6447 return;
6450 push_gimplify_context ();
6452 block = make_node (BLOCK);
6453 bind = gimple_build_bind (NULL, NULL, block);
6454 gsi_replace (gsi_p, bind, true);
6455 gimple_bind_add_stmt (bind, stmt);
6457 if (simd)
6459 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6460 build_int_cst (NULL_TREE, threads));
6461 cfun->has_simduid_loops = true;
6463 else
6464 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6466 gimple_bind_add_stmt (bind, x);
6468 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6469 if (maybe_simt)
6471 counter = create_tmp_var (integer_type_node);
6472 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6473 gimple_call_set_lhs (g, counter);
6474 gimple_bind_add_stmt (bind, g);
6476 body = create_artificial_label (UNKNOWN_LOCATION);
6477 test = create_artificial_label (UNKNOWN_LOCATION);
6478 gimple_bind_add_stmt (bind, gimple_build_label (body));
6480 tree simt_pred = create_tmp_var (integer_type_node);
6481 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6482 gimple_call_set_lhs (g, simt_pred);
6483 gimple_bind_add_stmt (bind, g);
6485 tree t = create_artificial_label (UNKNOWN_LOCATION);
6486 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6487 gimple_bind_add_stmt (bind, g);
6489 gimple_bind_add_stmt (bind, gimple_build_label (t));
6491 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6492 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6493 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6494 gimple_omp_set_body (stmt, NULL);
6496 if (maybe_simt)
6498 gimple_bind_add_stmt (bind, gimple_build_label (test));
6499 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6500 gimple_bind_add_stmt (bind, g);
6502 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6503 tree nonneg = create_tmp_var (integer_type_node);
6504 gimple_seq tseq = NULL;
6505 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6506 gimple_bind_add_seq (bind, tseq);
6508 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6509 gimple_call_set_lhs (g, nonneg);
6510 gimple_bind_add_stmt (bind, g);
6512 tree end = create_artificial_label (UNKNOWN_LOCATION);
6513 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6514 gimple_bind_add_stmt (bind, g);
6516 gimple_bind_add_stmt (bind, gimple_build_label (end));
6518 if (simd)
6519 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6520 build_int_cst (NULL_TREE, threads));
6521 else
6522 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6524 gimple_bind_add_stmt (bind, x);
6526 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6528 pop_gimplify_context (bind);
6530 gimple_bind_append_vars (bind, ctx->block_vars);
6531 BLOCK_VARS (block) = gimple_bind_vars (bind);
6535 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6536 substitution of a couple of function calls. But in the NAMED case,
6537 requires that languages coordinate a symbol name. It is therefore
6538 best put here in common code. */
6540 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6542 static void
6543 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6545 tree block;
6546 tree name, lock, unlock;
6547 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6548 gbind *bind;
6549 location_t loc = gimple_location (stmt);
6550 gimple_seq tbody;
6552 name = gimple_omp_critical_name (stmt);
6553 if (name)
6555 tree decl;
6557 if (!critical_name_mutexes)
6558 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6560 tree *n = critical_name_mutexes->get (name);
6561 if (n == NULL)
6563 char *new_str;
6565 decl = create_tmp_var_raw (ptr_type_node);
6567 new_str = ACONCAT ((".gomp_critical_user_",
6568 IDENTIFIER_POINTER (name), NULL));
6569 DECL_NAME (decl) = get_identifier (new_str);
6570 TREE_PUBLIC (decl) = 1;
6571 TREE_STATIC (decl) = 1;
6572 DECL_COMMON (decl) = 1;
6573 DECL_ARTIFICIAL (decl) = 1;
6574 DECL_IGNORED_P (decl) = 1;
6576 varpool_node::finalize_decl (decl);
6578 critical_name_mutexes->put (name, decl);
6580 else
6581 decl = *n;
6583 /* If '#pragma omp critical' is inside offloaded region or
6584 inside function marked as offloadable, the symbol must be
6585 marked as offloadable too. */
6586 omp_context *octx;
6587 if (cgraph_node::get (current_function_decl)->offloadable)
6588 varpool_node::get_create (decl)->offloadable = 1;
6589 else
6590 for (octx = ctx->outer; octx; octx = octx->outer)
6591 if (is_gimple_omp_offloaded (octx->stmt))
6593 varpool_node::get_create (decl)->offloadable = 1;
6594 break;
6597 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6598 lock = build_call_expr_loc (loc, lock, 1,
6599 build_fold_addr_expr_loc (loc, decl));
6601 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6602 unlock = build_call_expr_loc (loc, unlock, 1,
6603 build_fold_addr_expr_loc (loc, decl));
6605 else
6607 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6608 lock = build_call_expr_loc (loc, lock, 0);
6610 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6611 unlock = build_call_expr_loc (loc, unlock, 0);
6614 push_gimplify_context ();
6616 block = make_node (BLOCK);
6617 bind = gimple_build_bind (NULL, NULL, block);
6618 gsi_replace (gsi_p, bind, true);
6619 gimple_bind_add_stmt (bind, stmt);
6621 tbody = gimple_bind_body (bind);
6622 gimplify_and_add (lock, &tbody);
6623 gimple_bind_set_body (bind, tbody);
6625 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6626 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6627 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6628 gimple_omp_set_body (stmt, NULL);
6630 tbody = gimple_bind_body (bind);
6631 gimplify_and_add (unlock, &tbody);
6632 gimple_bind_set_body (bind, tbody);
6634 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6636 pop_gimplify_context (bind);
6637 gimple_bind_append_vars (bind, ctx->block_vars);
6638 BLOCK_VARS (block) = gimple_bind_vars (bind);
6641 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6642 for a lastprivate clause. Given a loop control predicate of (V
6643 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6644 is appended to *DLIST, iterator initialization is appended to
6645 *BODY_P. */
6647 static void
6648 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6649 gimple_seq *dlist, struct omp_context *ctx)
6651 tree clauses, cond, vinit;
6652 enum tree_code cond_code;
6653 gimple_seq stmts;
6655 cond_code = fd->loop.cond_code;
6656 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6658 /* When possible, use a strict equality expression. This can let VRP
6659 type optimizations deduce the value and remove a copy. */
6660 if (tree_fits_shwi_p (fd->loop.step))
6662 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6663 if (step == 1 || step == -1)
6664 cond_code = EQ_EXPR;
6667 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6668 || gimple_omp_for_grid_phony (fd->for_stmt))
6669 cond = omp_grid_lastprivate_predicate (fd);
6670 else
6672 tree n2 = fd->loop.n2;
6673 if (fd->collapse > 1
6674 && TREE_CODE (n2) != INTEGER_CST
6675 && gimple_omp_for_combined_into_p (fd->for_stmt))
6677 struct omp_context *taskreg_ctx = NULL;
6678 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6680 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6681 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6682 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6684 if (gimple_omp_for_combined_into_p (gfor))
6686 gcc_assert (ctx->outer->outer
6687 && is_parallel_ctx (ctx->outer->outer));
6688 taskreg_ctx = ctx->outer->outer;
6690 else
6692 struct omp_for_data outer_fd;
6693 omp_extract_for_data (gfor, &outer_fd, NULL);
6694 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6697 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6698 taskreg_ctx = ctx->outer->outer;
6700 else if (is_taskreg_ctx (ctx->outer))
6701 taskreg_ctx = ctx->outer;
6702 if (taskreg_ctx)
6704 int i;
6705 tree taskreg_clauses
6706 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6707 tree innerc = omp_find_clause (taskreg_clauses,
6708 OMP_CLAUSE__LOOPTEMP_);
6709 gcc_assert (innerc);
6710 for (i = 0; i < fd->collapse; i++)
6712 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6713 OMP_CLAUSE__LOOPTEMP_);
6714 gcc_assert (innerc);
6716 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6717 OMP_CLAUSE__LOOPTEMP_);
6718 if (innerc)
6719 n2 = fold_convert (TREE_TYPE (n2),
6720 lookup_decl (OMP_CLAUSE_DECL (innerc),
6721 taskreg_ctx));
6724 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6727 clauses = gimple_omp_for_clauses (fd->for_stmt);
6728 stmts = NULL;
6729 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6730 if (!gimple_seq_empty_p (stmts))
6732 gimple_seq_add_seq (&stmts, *dlist);
6733 *dlist = stmts;
6735 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6736 vinit = fd->loop.n1;
6737 if (cond_code == EQ_EXPR
6738 && tree_fits_shwi_p (fd->loop.n2)
6739 && ! integer_zerop (fd->loop.n2))
6740 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6741 else
6742 vinit = unshare_expr (vinit);
6744 /* Initialize the iterator variable, so that threads that don't execute
6745 any iterations don't execute the lastprivate clauses by accident. */
6746 gimplify_assign (fd->loop.v, vinit, body_p);
6751 /* Lower code for an OMP loop directive. */
6753 static void
6754 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6756 tree *rhs_p, block;
6757 struct omp_for_data fd, *fdp = NULL;
6758 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6759 gbind *new_stmt;
6760 gimple_seq omp_for_body, body, dlist;
6761 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6762 size_t i;
6764 push_gimplify_context ();
6766 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6768 block = make_node (BLOCK);
6769 new_stmt = gimple_build_bind (NULL, NULL, block);
6770 /* Replace at gsi right away, so that 'stmt' is no member
6771 of a sequence anymore as we're going to add to a different
6772 one below. */
6773 gsi_replace (gsi_p, new_stmt, true);
6775 /* Move declaration of temporaries in the loop body before we make
6776 it go away. */
6777 omp_for_body = gimple_omp_body (stmt);
6778 if (!gimple_seq_empty_p (omp_for_body)
6779 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6781 gbind *inner_bind
6782 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6783 tree vars = gimple_bind_vars (inner_bind);
6784 gimple_bind_append_vars (new_stmt, vars);
6785 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6786 keep them on the inner_bind and it's block. */
6787 gimple_bind_set_vars (inner_bind, NULL_TREE);
6788 if (gimple_bind_block (inner_bind))
6789 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6792 if (gimple_omp_for_combined_into_p (stmt))
6794 omp_extract_for_data (stmt, &fd, NULL);
6795 fdp = &fd;
6797 /* We need two temporaries with fd.loop.v type (istart/iend)
6798 and then (fd.collapse - 1) temporaries with the same
6799 type for count2 ... countN-1 vars if not constant. */
6800 size_t count = 2;
6801 tree type = fd.iter_type;
6802 if (fd.collapse > 1
6803 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6804 count += fd.collapse - 1;
6805 bool taskreg_for
6806 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6807 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6808 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6809 tree simtc = NULL;
6810 tree clauses = *pc;
6811 if (taskreg_for)
6812 outerc
6813 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6814 OMP_CLAUSE__LOOPTEMP_);
6815 if (ctx->simt_stmt)
6816 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6817 OMP_CLAUSE__LOOPTEMP_);
6818 for (i = 0; i < count; i++)
6820 tree temp;
6821 if (taskreg_for)
6823 gcc_assert (outerc);
6824 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6825 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6826 OMP_CLAUSE__LOOPTEMP_);
6828 else
6830 /* If there are 2 adjacent SIMD stmts, one with _simt_
6831 clause, another without, make sure they have the same
6832 decls in _looptemp_ clauses, because the outer stmt
6833 they are combined into will look up just one inner_stmt. */
6834 if (ctx->simt_stmt)
6835 temp = OMP_CLAUSE_DECL (simtc);
6836 else
6837 temp = create_tmp_var (type);
6838 insert_decl_map (&ctx->outer->cb, temp, temp);
6840 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6841 OMP_CLAUSE_DECL (*pc) = temp;
6842 pc = &OMP_CLAUSE_CHAIN (*pc);
6843 if (ctx->simt_stmt)
6844 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6845 OMP_CLAUSE__LOOPTEMP_);
6847 *pc = clauses;
6850 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6851 dlist = NULL;
6852 body = NULL;
6853 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6854 fdp);
6855 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6857 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6859 /* Lower the header expressions. At this point, we can assume that
6860 the header is of the form:
6862 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6864 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6865 using the .omp_data_s mapping, if needed. */
6866 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6868 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6869 if (!is_gimple_min_invariant (*rhs_p))
6870 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6871 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6872 recompute_tree_invariant_for_addr_expr (*rhs_p);
6874 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6875 if (!is_gimple_min_invariant (*rhs_p))
6876 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6877 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6878 recompute_tree_invariant_for_addr_expr (*rhs_p);
6880 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6881 if (!is_gimple_min_invariant (*rhs_p))
6882 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6885 /* Once lowered, extract the bounds and clauses. */
6886 omp_extract_for_data (stmt, &fd, NULL);
6888 if (is_gimple_omp_oacc (ctx->stmt)
6889 && !ctx_in_oacc_kernels_region (ctx))
6890 lower_oacc_head_tail (gimple_location (stmt),
6891 gimple_omp_for_clauses (stmt),
6892 &oacc_head, &oacc_tail, ctx);
6894 /* Add OpenACC partitioning and reduction markers just before the loop. */
6895 if (oacc_head)
6896 gimple_seq_add_seq (&body, oacc_head);
6898 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6900 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6901 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6902 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6903 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6905 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6906 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6907 OMP_CLAUSE_LINEAR_STEP (c)
6908 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6909 ctx);
6912 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6913 && gimple_omp_for_grid_phony (stmt));
6914 if (!phony_loop)
6915 gimple_seq_add_stmt (&body, stmt);
6916 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6918 if (!phony_loop)
6919 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6920 fd.loop.v));
6922 /* After the loop, add exit clauses. */
6923 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6925 if (ctx->cancellable)
6926 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6928 gimple_seq_add_seq (&body, dlist);
6930 body = maybe_catch_exception (body);
6932 if (!phony_loop)
6934 /* Region exit marker goes at the end of the loop body. */
6935 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6936 maybe_add_implicit_barrier_cancel (ctx, &body);
6939 /* Add OpenACC joining and reduction markers just after the loop. */
6940 if (oacc_tail)
6941 gimple_seq_add_seq (&body, oacc_tail);
6943 pop_gimplify_context (new_stmt);
6945 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6946 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6947 if (BLOCK_VARS (block))
6948 TREE_USED (block) = 1;
6950 gimple_bind_set_body (new_stmt, body);
6951 gimple_omp_set_body (stmt, NULL);
6952 gimple_omp_for_set_pre_body (stmt, NULL);
6955 /* Callback for walk_stmts. Check if the current statement only contains
6956 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6958 static tree
6959 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6960 bool *handled_ops_p,
6961 struct walk_stmt_info *wi)
6963 int *info = (int *) wi->info;
6964 gimple *stmt = gsi_stmt (*gsi_p);
6966 *handled_ops_p = true;
6967 switch (gimple_code (stmt))
6969 WALK_SUBSTMTS;
6971 case GIMPLE_DEBUG:
6972 break;
6973 case GIMPLE_OMP_FOR:
6974 case GIMPLE_OMP_SECTIONS:
6975 *info = *info == 0 ? 1 : -1;
6976 break;
6977 default:
6978 *info = -1;
6979 break;
6981 return NULL;
6984 struct omp_taskcopy_context
6986 /* This field must be at the beginning, as we do "inheritance": Some
6987 callback functions for tree-inline.c (e.g., omp_copy_decl)
6988 receive a copy_body_data pointer that is up-casted to an
6989 omp_context pointer. */
6990 copy_body_data cb;
6991 omp_context *ctx;
6994 static tree
6995 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6997 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6999 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7000 return create_tmp_var (TREE_TYPE (var));
7002 return var;
7005 static tree
7006 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7008 tree name, new_fields = NULL, type, f;
7010 type = lang_hooks.types.make_type (RECORD_TYPE);
7011 name = DECL_NAME (TYPE_NAME (orig_type));
7012 name = build_decl (gimple_location (tcctx->ctx->stmt),
7013 TYPE_DECL, name, type);
7014 TYPE_NAME (type) = name;
7016 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7018 tree new_f = copy_node (f);
7019 DECL_CONTEXT (new_f) = type;
7020 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7021 TREE_CHAIN (new_f) = new_fields;
7022 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7023 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7024 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7025 &tcctx->cb, NULL);
7026 new_fields = new_f;
7027 tcctx->cb.decl_map->put (f, new_f);
7029 TYPE_FIELDS (type) = nreverse (new_fields);
7030 layout_type (type);
7031 return type;
7034 /* Create task copyfn. */
7036 static void
7037 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7039 struct function *child_cfun;
7040 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7041 tree record_type, srecord_type, bind, list;
7042 bool record_needs_remap = false, srecord_needs_remap = false;
7043 splay_tree_node n;
7044 struct omp_taskcopy_context tcctx;
7045 location_t loc = gimple_location (task_stmt);
7047 child_fn = gimple_omp_task_copy_fn (task_stmt);
7048 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7049 gcc_assert (child_cfun->cfg == NULL);
7050 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7052 /* Reset DECL_CONTEXT on function arguments. */
7053 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7054 DECL_CONTEXT (t) = child_fn;
7056 /* Populate the function. */
7057 push_gimplify_context ();
7058 push_cfun (child_cfun);
7060 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7061 TREE_SIDE_EFFECTS (bind) = 1;
7062 list = NULL;
7063 DECL_SAVED_TREE (child_fn) = bind;
7064 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7066 /* Remap src and dst argument types if needed. */
7067 record_type = ctx->record_type;
7068 srecord_type = ctx->srecord_type;
7069 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7070 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7072 record_needs_remap = true;
7073 break;
7075 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7076 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7078 srecord_needs_remap = true;
7079 break;
7082 if (record_needs_remap || srecord_needs_remap)
7084 memset (&tcctx, '\0', sizeof (tcctx));
7085 tcctx.cb.src_fn = ctx->cb.src_fn;
7086 tcctx.cb.dst_fn = child_fn;
7087 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7088 gcc_checking_assert (tcctx.cb.src_node);
7089 tcctx.cb.dst_node = tcctx.cb.src_node;
7090 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7091 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7092 tcctx.cb.eh_lp_nr = 0;
7093 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7094 tcctx.cb.decl_map = new hash_map<tree, tree>;
7095 tcctx.ctx = ctx;
7097 if (record_needs_remap)
7098 record_type = task_copyfn_remap_type (&tcctx, record_type);
7099 if (srecord_needs_remap)
7100 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7102 else
7103 tcctx.cb.decl_map = NULL;
7105 arg = DECL_ARGUMENTS (child_fn);
7106 TREE_TYPE (arg) = build_pointer_type (record_type);
7107 sarg = DECL_CHAIN (arg);
7108 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7110 /* First pass: initialize temporaries used in record_type and srecord_type
7111 sizes and field offsets. */
7112 if (tcctx.cb.decl_map)
7113 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7114 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7116 tree *p;
7118 decl = OMP_CLAUSE_DECL (c);
7119 p = tcctx.cb.decl_map->get (decl);
7120 if (p == NULL)
7121 continue;
7122 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7123 sf = (tree) n->value;
7124 sf = *tcctx.cb.decl_map->get (sf);
7125 src = build_simple_mem_ref_loc (loc, sarg);
7126 src = omp_build_component_ref (src, sf);
7127 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7128 append_to_statement_list (t, &list);
7131 /* Second pass: copy shared var pointers and copy construct non-VLA
7132 firstprivate vars. */
7133 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7134 switch (OMP_CLAUSE_CODE (c))
7136 splay_tree_key key;
7137 case OMP_CLAUSE_SHARED:
7138 decl = OMP_CLAUSE_DECL (c);
7139 key = (splay_tree_key) decl;
7140 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7141 key = (splay_tree_key) &DECL_UID (decl);
7142 n = splay_tree_lookup (ctx->field_map, key);
7143 if (n == NULL)
7144 break;
7145 f = (tree) n->value;
7146 if (tcctx.cb.decl_map)
7147 f = *tcctx.cb.decl_map->get (f);
7148 n = splay_tree_lookup (ctx->sfield_map, key);
7149 sf = (tree) n->value;
7150 if (tcctx.cb.decl_map)
7151 sf = *tcctx.cb.decl_map->get (sf);
7152 src = build_simple_mem_ref_loc (loc, sarg);
7153 src = omp_build_component_ref (src, sf);
7154 dst = build_simple_mem_ref_loc (loc, arg);
7155 dst = omp_build_component_ref (dst, f);
7156 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7157 append_to_statement_list (t, &list);
7158 break;
7159 case OMP_CLAUSE_FIRSTPRIVATE:
7160 decl = OMP_CLAUSE_DECL (c);
7161 if (is_variable_sized (decl))
7162 break;
7163 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7164 if (n == NULL)
7165 break;
7166 f = (tree) n->value;
7167 if (tcctx.cb.decl_map)
7168 f = *tcctx.cb.decl_map->get (f);
7169 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7170 if (n != NULL)
7172 sf = (tree) n->value;
7173 if (tcctx.cb.decl_map)
7174 sf = *tcctx.cb.decl_map->get (sf);
7175 src = build_simple_mem_ref_loc (loc, sarg);
7176 src = omp_build_component_ref (src, sf);
7177 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7178 src = build_simple_mem_ref_loc (loc, src);
7180 else
7181 src = decl;
7182 dst = build_simple_mem_ref_loc (loc, arg);
7183 dst = omp_build_component_ref (dst, f);
7184 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7185 append_to_statement_list (t, &list);
7186 break;
7187 case OMP_CLAUSE_PRIVATE:
7188 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7189 break;
7190 decl = OMP_CLAUSE_DECL (c);
7191 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7192 f = (tree) n->value;
7193 if (tcctx.cb.decl_map)
7194 f = *tcctx.cb.decl_map->get (f);
7195 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7196 if (n != NULL)
7198 sf = (tree) n->value;
7199 if (tcctx.cb.decl_map)
7200 sf = *tcctx.cb.decl_map->get (sf);
7201 src = build_simple_mem_ref_loc (loc, sarg);
7202 src = omp_build_component_ref (src, sf);
7203 if (use_pointer_for_field (decl, NULL))
7204 src = build_simple_mem_ref_loc (loc, src);
7206 else
7207 src = decl;
7208 dst = build_simple_mem_ref_loc (loc, arg);
7209 dst = omp_build_component_ref (dst, f);
7210 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7211 append_to_statement_list (t, &list);
7212 break;
7213 default:
7214 break;
7217 /* Last pass: handle VLA firstprivates. */
7218 if (tcctx.cb.decl_map)
7219 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7222 tree ind, ptr, df;
7224 decl = OMP_CLAUSE_DECL (c);
7225 if (!is_variable_sized (decl))
7226 continue;
7227 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7228 if (n == NULL)
7229 continue;
7230 f = (tree) n->value;
7231 f = *tcctx.cb.decl_map->get (f);
7232 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7233 ind = DECL_VALUE_EXPR (decl);
7234 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7235 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7236 n = splay_tree_lookup (ctx->sfield_map,
7237 (splay_tree_key) TREE_OPERAND (ind, 0));
7238 sf = (tree) n->value;
7239 sf = *tcctx.cb.decl_map->get (sf);
7240 src = build_simple_mem_ref_loc (loc, sarg);
7241 src = omp_build_component_ref (src, sf);
7242 src = build_simple_mem_ref_loc (loc, src);
7243 dst = build_simple_mem_ref_loc (loc, arg);
7244 dst = omp_build_component_ref (dst, f);
7245 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7246 append_to_statement_list (t, &list);
7247 n = splay_tree_lookup (ctx->field_map,
7248 (splay_tree_key) TREE_OPERAND (ind, 0));
7249 df = (tree) n->value;
7250 df = *tcctx.cb.decl_map->get (df);
7251 ptr = build_simple_mem_ref_loc (loc, arg);
7252 ptr = omp_build_component_ref (ptr, df);
7253 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7254 build_fold_addr_expr_loc (loc, dst));
7255 append_to_statement_list (t, &list);
7258 t = build1 (RETURN_EXPR, void_type_node, NULL);
7259 append_to_statement_list (t, &list);
7261 if (tcctx.cb.decl_map)
7262 delete tcctx.cb.decl_map;
7263 pop_gimplify_context (NULL);
7264 BIND_EXPR_BODY (bind) = list;
7265 pop_cfun ();
7268 static void
7269 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7271 tree c, clauses;
7272 gimple *g;
7273 size_t n_in = 0, n_out = 0, idx = 2, i;
7275 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7276 gcc_assert (clauses);
7277 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7278 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7279 switch (OMP_CLAUSE_DEPEND_KIND (c))
7281 case OMP_CLAUSE_DEPEND_IN:
7282 n_in++;
7283 break;
7284 case OMP_CLAUSE_DEPEND_OUT:
7285 case OMP_CLAUSE_DEPEND_INOUT:
7286 n_out++;
7287 break;
7288 case OMP_CLAUSE_DEPEND_SOURCE:
7289 case OMP_CLAUSE_DEPEND_SINK:
7290 /* FALLTHRU */
7291 default:
7292 gcc_unreachable ();
7294 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7295 tree array = create_tmp_var (type);
7296 TREE_ADDRESSABLE (array) = 1;
7297 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7298 NULL_TREE);
7299 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7300 gimple_seq_add_stmt (iseq, g);
7301 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7302 NULL_TREE);
7303 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7304 gimple_seq_add_stmt (iseq, g);
7305 for (i = 0; i < 2; i++)
7307 if ((i ? n_in : n_out) == 0)
7308 continue;
7309 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7310 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7311 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7313 tree t = OMP_CLAUSE_DECL (c);
7314 t = fold_convert (ptr_type_node, t);
7315 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7316 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7317 NULL_TREE, NULL_TREE);
7318 g = gimple_build_assign (r, t);
7319 gimple_seq_add_stmt (iseq, g);
7322 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7323 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7324 OMP_CLAUSE_CHAIN (c) = *pclauses;
7325 *pclauses = c;
7326 tree clobber = build_constructor (type, NULL);
7327 TREE_THIS_VOLATILE (clobber) = 1;
7328 g = gimple_build_assign (array, clobber);
7329 gimple_seq_add_stmt (oseq, g);
7332 /* Lower the OpenMP parallel or task directive in the current statement
7333 in GSI_P. CTX holds context information for the directive. */
7335 static void
7336 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7338 tree clauses;
7339 tree child_fn, t;
7340 gimple *stmt = gsi_stmt (*gsi_p);
7341 gbind *par_bind, *bind, *dep_bind = NULL;
7342 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7343 location_t loc = gimple_location (stmt);
7345 clauses = gimple_omp_taskreg_clauses (stmt);
7346 par_bind
7347 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7348 par_body = gimple_bind_body (par_bind);
7349 child_fn = ctx->cb.dst_fn;
7350 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7351 && !gimple_omp_parallel_combined_p (stmt))
7353 struct walk_stmt_info wi;
7354 int ws_num = 0;
7356 memset (&wi, 0, sizeof (wi));
7357 wi.info = &ws_num;
7358 wi.val_only = true;
7359 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7360 if (ws_num == 1)
7361 gimple_omp_parallel_set_combined_p (stmt, true);
7363 gimple_seq dep_ilist = NULL;
7364 gimple_seq dep_olist = NULL;
7365 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7366 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7368 push_gimplify_context ();
7369 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7370 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7371 &dep_ilist, &dep_olist);
7374 if (ctx->srecord_type)
7375 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7377 push_gimplify_context ();
7379 par_olist = NULL;
7380 par_ilist = NULL;
7381 par_rlist = NULL;
7382 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7383 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7384 if (phony_construct && ctx->record_type)
7386 gcc_checking_assert (!ctx->receiver_decl);
7387 ctx->receiver_decl = create_tmp_var
7388 (build_reference_type (ctx->record_type), ".omp_rec");
7390 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7391 lower_omp (&par_body, ctx);
7392 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7393 lower_reduction_clauses (clauses, &par_rlist, ctx);
7395 /* Declare all the variables created by mapping and the variables
7396 declared in the scope of the parallel body. */
7397 record_vars_into (ctx->block_vars, child_fn);
7398 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7400 if (ctx->record_type)
7402 ctx->sender_decl
7403 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7404 : ctx->record_type, ".omp_data_o");
7405 DECL_NAMELESS (ctx->sender_decl) = 1;
7406 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7407 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7410 olist = NULL;
7411 ilist = NULL;
7412 lower_send_clauses (clauses, &ilist, &olist, ctx);
7413 lower_send_shared_vars (&ilist, &olist, ctx);
7415 if (ctx->record_type)
7417 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7418 TREE_THIS_VOLATILE (clobber) = 1;
7419 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7420 clobber));
7423 /* Once all the expansions are done, sequence all the different
7424 fragments inside gimple_omp_body. */
7426 new_body = NULL;
7428 if (ctx->record_type)
7430 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7431 /* fixup_child_record_type might have changed receiver_decl's type. */
7432 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7433 gimple_seq_add_stmt (&new_body,
7434 gimple_build_assign (ctx->receiver_decl, t));
7437 gimple_seq_add_seq (&new_body, par_ilist);
7438 gimple_seq_add_seq (&new_body, par_body);
7439 gimple_seq_add_seq (&new_body, par_rlist);
7440 if (ctx->cancellable)
7441 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7442 gimple_seq_add_seq (&new_body, par_olist);
7443 new_body = maybe_catch_exception (new_body);
7444 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7445 gimple_seq_add_stmt (&new_body,
7446 gimple_build_omp_continue (integer_zero_node,
7447 integer_zero_node));
7448 if (!phony_construct)
7450 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7451 gimple_omp_set_body (stmt, new_body);
7454 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7455 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7456 gimple_bind_add_seq (bind, ilist);
7457 if (!phony_construct)
7458 gimple_bind_add_stmt (bind, stmt);
7459 else
7460 gimple_bind_add_seq (bind, new_body);
7461 gimple_bind_add_seq (bind, olist);
7463 pop_gimplify_context (NULL);
7465 if (dep_bind)
7467 gimple_bind_add_seq (dep_bind, dep_ilist);
7468 gimple_bind_add_stmt (dep_bind, bind);
7469 gimple_bind_add_seq (dep_bind, dep_olist);
7470 pop_gimplify_context (dep_bind);
7474 /* Lower the GIMPLE_OMP_TARGET in the current statement
7475 in GSI_P. CTX holds context information for the directive. */
7477 static void
7478 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7480 tree clauses;
7481 tree child_fn, t, c;
7482 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7483 gbind *tgt_bind, *bind, *dep_bind = NULL;
7484 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7485 location_t loc = gimple_location (stmt);
7486 bool offloaded, data_region;
7487 unsigned int map_cnt = 0;
7489 offloaded = is_gimple_omp_offloaded (stmt);
7490 switch (gimple_omp_target_kind (stmt))
7492 case GF_OMP_TARGET_KIND_REGION:
7493 case GF_OMP_TARGET_KIND_UPDATE:
7494 case GF_OMP_TARGET_KIND_ENTER_DATA:
7495 case GF_OMP_TARGET_KIND_EXIT_DATA:
7496 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7497 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7498 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7499 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7500 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7501 data_region = false;
7502 break;
7503 case GF_OMP_TARGET_KIND_DATA:
7504 case GF_OMP_TARGET_KIND_OACC_DATA:
7505 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7506 data_region = true;
7507 break;
7508 default:
7509 gcc_unreachable ();
7512 clauses = gimple_omp_target_clauses (stmt);
7514 gimple_seq dep_ilist = NULL;
7515 gimple_seq dep_olist = NULL;
7516 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7518 push_gimplify_context ();
7519 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7520 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7521 &dep_ilist, &dep_olist);
7524 tgt_bind = NULL;
7525 tgt_body = NULL;
7526 if (offloaded)
7528 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7529 tgt_body = gimple_bind_body (tgt_bind);
7531 else if (data_region)
7532 tgt_body = gimple_omp_body (stmt);
7533 child_fn = ctx->cb.dst_fn;
7535 push_gimplify_context ();
7536 fplist = NULL;
7538 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7539 switch (OMP_CLAUSE_CODE (c))
7541 tree var, x;
7543 default:
7544 break;
7545 case OMP_CLAUSE_MAP:
7546 #if CHECKING_P
7547 /* First check what we're prepared to handle in the following. */
7548 switch (OMP_CLAUSE_MAP_KIND (c))
7550 case GOMP_MAP_ALLOC:
7551 case GOMP_MAP_TO:
7552 case GOMP_MAP_FROM:
7553 case GOMP_MAP_TOFROM:
7554 case GOMP_MAP_POINTER:
7555 case GOMP_MAP_TO_PSET:
7556 case GOMP_MAP_DELETE:
7557 case GOMP_MAP_RELEASE:
7558 case GOMP_MAP_ALWAYS_TO:
7559 case GOMP_MAP_ALWAYS_FROM:
7560 case GOMP_MAP_ALWAYS_TOFROM:
7561 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7562 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7563 case GOMP_MAP_STRUCT:
7564 case GOMP_MAP_ALWAYS_POINTER:
7565 break;
7566 case GOMP_MAP_FORCE_ALLOC:
7567 case GOMP_MAP_FORCE_TO:
7568 case GOMP_MAP_FORCE_FROM:
7569 case GOMP_MAP_FORCE_TOFROM:
7570 case GOMP_MAP_FORCE_PRESENT:
7571 case GOMP_MAP_FORCE_DEVICEPTR:
7572 case GOMP_MAP_DEVICE_RESIDENT:
7573 case GOMP_MAP_LINK:
7574 gcc_assert (is_gimple_omp_oacc (stmt));
7575 break;
7576 default:
7577 gcc_unreachable ();
7579 #endif
7580 /* FALLTHRU */
7581 case OMP_CLAUSE_TO:
7582 case OMP_CLAUSE_FROM:
7583 oacc_firstprivate:
7584 var = OMP_CLAUSE_DECL (c);
7585 if (!DECL_P (var))
7587 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7588 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7589 && (OMP_CLAUSE_MAP_KIND (c)
7590 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7591 map_cnt++;
7592 continue;
7595 if (DECL_SIZE (var)
7596 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7598 tree var2 = DECL_VALUE_EXPR (var);
7599 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7600 var2 = TREE_OPERAND (var2, 0);
7601 gcc_assert (DECL_P (var2));
7602 var = var2;
7605 if (offloaded
7606 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7607 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7608 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7610 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7612 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7613 && varpool_node::get_create (var)->offloadable)
7614 continue;
7616 tree type = build_pointer_type (TREE_TYPE (var));
7617 tree new_var = lookup_decl (var, ctx);
7618 x = create_tmp_var_raw (type, get_name (new_var));
7619 gimple_add_tmp_var (x);
7620 x = build_simple_mem_ref (x);
7621 SET_DECL_VALUE_EXPR (new_var, x);
7622 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7624 continue;
7627 if (!maybe_lookup_field (var, ctx))
7628 continue;
7630 /* Don't remap oacc parallel reduction variables, because the
7631 intermediate result must be local to each gang. */
7632 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7633 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7635 x = build_receiver_ref (var, true, ctx);
7636 tree new_var = lookup_decl (var, ctx);
7638 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7639 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7640 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7641 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7642 x = build_simple_mem_ref (x);
7643 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7645 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7646 if (omp_is_reference (new_var))
7648 /* Create a local object to hold the instance
7649 value. */
7650 tree type = TREE_TYPE (TREE_TYPE (new_var));
7651 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7652 tree inst = create_tmp_var (type, id);
7653 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7654 x = build_fold_addr_expr (inst);
7656 gimplify_assign (new_var, x, &fplist);
7658 else if (DECL_P (new_var))
7660 SET_DECL_VALUE_EXPR (new_var, x);
7661 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7663 else
7664 gcc_unreachable ();
7666 map_cnt++;
7667 break;
7669 case OMP_CLAUSE_FIRSTPRIVATE:
7670 if (is_oacc_parallel (ctx))
7671 goto oacc_firstprivate;
7672 map_cnt++;
7673 var = OMP_CLAUSE_DECL (c);
7674 if (!omp_is_reference (var)
7675 && !is_gimple_reg_type (TREE_TYPE (var)))
7677 tree new_var = lookup_decl (var, ctx);
7678 if (is_variable_sized (var))
7680 tree pvar = DECL_VALUE_EXPR (var);
7681 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7682 pvar = TREE_OPERAND (pvar, 0);
7683 gcc_assert (DECL_P (pvar));
7684 tree new_pvar = lookup_decl (pvar, ctx);
7685 x = build_fold_indirect_ref (new_pvar);
7686 TREE_THIS_NOTRAP (x) = 1;
7688 else
7689 x = build_receiver_ref (var, true, ctx);
7690 SET_DECL_VALUE_EXPR (new_var, x);
7691 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7693 break;
7695 case OMP_CLAUSE_PRIVATE:
7696 if (is_gimple_omp_oacc (ctx->stmt))
7697 break;
7698 var = OMP_CLAUSE_DECL (c);
7699 if (is_variable_sized (var))
7701 tree new_var = lookup_decl (var, ctx);
7702 tree pvar = DECL_VALUE_EXPR (var);
7703 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7704 pvar = TREE_OPERAND (pvar, 0);
7705 gcc_assert (DECL_P (pvar));
7706 tree new_pvar = lookup_decl (pvar, ctx);
7707 x = build_fold_indirect_ref (new_pvar);
7708 TREE_THIS_NOTRAP (x) = 1;
7709 SET_DECL_VALUE_EXPR (new_var, x);
7710 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7712 break;
7714 case OMP_CLAUSE_USE_DEVICE_PTR:
7715 case OMP_CLAUSE_IS_DEVICE_PTR:
7716 var = OMP_CLAUSE_DECL (c);
7717 map_cnt++;
7718 if (is_variable_sized (var))
7720 tree new_var = lookup_decl (var, ctx);
7721 tree pvar = DECL_VALUE_EXPR (var);
7722 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7723 pvar = TREE_OPERAND (pvar, 0);
7724 gcc_assert (DECL_P (pvar));
7725 tree new_pvar = lookup_decl (pvar, ctx);
7726 x = build_fold_indirect_ref (new_pvar);
7727 TREE_THIS_NOTRAP (x) = 1;
7728 SET_DECL_VALUE_EXPR (new_var, x);
7729 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7731 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7733 tree new_var = lookup_decl (var, ctx);
7734 tree type = build_pointer_type (TREE_TYPE (var));
7735 x = create_tmp_var_raw (type, get_name (new_var));
7736 gimple_add_tmp_var (x);
7737 x = build_simple_mem_ref (x);
7738 SET_DECL_VALUE_EXPR (new_var, x);
7739 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7741 else
7743 tree new_var = lookup_decl (var, ctx);
7744 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7745 gimple_add_tmp_var (x);
7746 SET_DECL_VALUE_EXPR (new_var, x);
7747 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7749 break;
7752 if (offloaded)
7754 target_nesting_level++;
7755 lower_omp (&tgt_body, ctx);
7756 target_nesting_level--;
7758 else if (data_region)
7759 lower_omp (&tgt_body, ctx);
7761 if (offloaded)
7763 /* Declare all the variables created by mapping and the variables
7764 declared in the scope of the target body. */
7765 record_vars_into (ctx->block_vars, child_fn);
7766 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7769 olist = NULL;
7770 ilist = NULL;
7771 if (ctx->record_type)
7773 ctx->sender_decl
7774 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7775 DECL_NAMELESS (ctx->sender_decl) = 1;
7776 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7777 t = make_tree_vec (3);
7778 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7779 TREE_VEC_ELT (t, 1)
7780 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7781 ".omp_data_sizes");
7782 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7783 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7784 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7785 tree tkind_type = short_unsigned_type_node;
7786 int talign_shift = 8;
7787 TREE_VEC_ELT (t, 2)
7788 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7789 ".omp_data_kinds");
7790 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7791 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7792 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7793 gimple_omp_target_set_data_arg (stmt, t);
7795 vec<constructor_elt, va_gc> *vsize;
7796 vec<constructor_elt, va_gc> *vkind;
7797 vec_alloc (vsize, map_cnt);
7798 vec_alloc (vkind, map_cnt);
7799 unsigned int map_idx = 0;
7801 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7802 switch (OMP_CLAUSE_CODE (c))
7804 tree ovar, nc, s, purpose, var, x, type;
7805 unsigned int talign;
7807 default:
7808 break;
7810 case OMP_CLAUSE_MAP:
7811 case OMP_CLAUSE_TO:
7812 case OMP_CLAUSE_FROM:
7813 oacc_firstprivate_map:
7814 nc = c;
7815 ovar = OMP_CLAUSE_DECL (c);
7816 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7817 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7818 || (OMP_CLAUSE_MAP_KIND (c)
7819 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7820 break;
7821 if (!DECL_P (ovar))
7823 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7824 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7826 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7827 == get_base_address (ovar));
7828 nc = OMP_CLAUSE_CHAIN (c);
7829 ovar = OMP_CLAUSE_DECL (nc);
7831 else
7833 tree x = build_sender_ref (ovar, ctx);
7834 tree v
7835 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7836 gimplify_assign (x, v, &ilist);
7837 nc = NULL_TREE;
7840 else
7842 if (DECL_SIZE (ovar)
7843 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7845 tree ovar2 = DECL_VALUE_EXPR (ovar);
7846 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7847 ovar2 = TREE_OPERAND (ovar2, 0);
7848 gcc_assert (DECL_P (ovar2));
7849 ovar = ovar2;
7851 if (!maybe_lookup_field (ovar, ctx))
7852 continue;
7855 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7856 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7857 talign = DECL_ALIGN_UNIT (ovar);
7858 if (nc)
7860 var = lookup_decl_in_outer_ctx (ovar, ctx);
7861 x = build_sender_ref (ovar, ctx);
7863 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7864 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7865 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7866 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7868 gcc_assert (offloaded);
7869 tree avar
7870 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7871 mark_addressable (avar);
7872 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7873 talign = DECL_ALIGN_UNIT (avar);
7874 avar = build_fold_addr_expr (avar);
7875 gimplify_assign (x, avar, &ilist);
7877 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7879 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7880 if (!omp_is_reference (var))
7882 if (is_gimple_reg (var)
7883 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7884 TREE_NO_WARNING (var) = 1;
7885 var = build_fold_addr_expr (var);
7887 else
7888 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7889 gimplify_assign (x, var, &ilist);
7891 else if (is_gimple_reg (var))
7893 gcc_assert (offloaded);
7894 tree avar = create_tmp_var (TREE_TYPE (var));
7895 mark_addressable (avar);
7896 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7897 if (GOMP_MAP_COPY_TO_P (map_kind)
7898 || map_kind == GOMP_MAP_POINTER
7899 || map_kind == GOMP_MAP_TO_PSET
7900 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7902 /* If we need to initialize a temporary
7903 with VAR because it is not addressable, and
7904 the variable hasn't been initialized yet, then
7905 we'll get a warning for the store to avar.
7906 Don't warn in that case, the mapping might
7907 be implicit. */
7908 TREE_NO_WARNING (var) = 1;
7909 gimplify_assign (avar, var, &ilist);
7911 avar = build_fold_addr_expr (avar);
7912 gimplify_assign (x, avar, &ilist);
7913 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7914 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7915 && !TYPE_READONLY (TREE_TYPE (var)))
7917 x = unshare_expr (x);
7918 x = build_simple_mem_ref (x);
7919 gimplify_assign (var, x, &olist);
7922 else
7924 var = build_fold_addr_expr (var);
7925 gimplify_assign (x, var, &ilist);
7928 s = NULL_TREE;
7929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7931 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7932 s = TREE_TYPE (ovar);
7933 if (TREE_CODE (s) == REFERENCE_TYPE)
7934 s = TREE_TYPE (s);
7935 s = TYPE_SIZE_UNIT (s);
7937 else
7938 s = OMP_CLAUSE_SIZE (c);
7939 if (s == NULL_TREE)
7940 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7941 s = fold_convert (size_type_node, s);
7942 purpose = size_int (map_idx++);
7943 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7944 if (TREE_CODE (s) != INTEGER_CST)
7945 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7947 unsigned HOST_WIDE_INT tkind, tkind_zero;
7948 switch (OMP_CLAUSE_CODE (c))
7950 case OMP_CLAUSE_MAP:
7951 tkind = OMP_CLAUSE_MAP_KIND (c);
7952 tkind_zero = tkind;
7953 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7954 switch (tkind)
7956 case GOMP_MAP_ALLOC:
7957 case GOMP_MAP_TO:
7958 case GOMP_MAP_FROM:
7959 case GOMP_MAP_TOFROM:
7960 case GOMP_MAP_ALWAYS_TO:
7961 case GOMP_MAP_ALWAYS_FROM:
7962 case GOMP_MAP_ALWAYS_TOFROM:
7963 case GOMP_MAP_RELEASE:
7964 case GOMP_MAP_FORCE_TO:
7965 case GOMP_MAP_FORCE_FROM:
7966 case GOMP_MAP_FORCE_TOFROM:
7967 case GOMP_MAP_FORCE_PRESENT:
7968 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7969 break;
7970 case GOMP_MAP_DELETE:
7971 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7972 default:
7973 break;
7975 if (tkind_zero != tkind)
7977 if (integer_zerop (s))
7978 tkind = tkind_zero;
7979 else if (integer_nonzerop (s))
7980 tkind_zero = tkind;
7982 break;
7983 case OMP_CLAUSE_FIRSTPRIVATE:
7984 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7985 tkind = GOMP_MAP_TO;
7986 tkind_zero = tkind;
7987 break;
7988 case OMP_CLAUSE_TO:
7989 tkind = GOMP_MAP_TO;
7990 tkind_zero = tkind;
7991 break;
7992 case OMP_CLAUSE_FROM:
7993 tkind = GOMP_MAP_FROM;
7994 tkind_zero = tkind;
7995 break;
7996 default:
7997 gcc_unreachable ();
7999 gcc_checking_assert (tkind
8000 < (HOST_WIDE_INT_C (1U) << talign_shift));
8001 gcc_checking_assert (tkind_zero
8002 < (HOST_WIDE_INT_C (1U) << talign_shift));
8003 talign = ceil_log2 (talign);
8004 tkind |= talign << talign_shift;
8005 tkind_zero |= talign << talign_shift;
8006 gcc_checking_assert (tkind
8007 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8008 gcc_checking_assert (tkind_zero
8009 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8010 if (tkind == tkind_zero)
8011 x = build_int_cstu (tkind_type, tkind);
8012 else
8014 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8015 x = build3 (COND_EXPR, tkind_type,
8016 fold_build2 (EQ_EXPR, boolean_type_node,
8017 unshare_expr (s), size_zero_node),
8018 build_int_cstu (tkind_type, tkind_zero),
8019 build_int_cstu (tkind_type, tkind));
8021 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8022 if (nc && nc != c)
8023 c = nc;
8024 break;
8026 case OMP_CLAUSE_FIRSTPRIVATE:
8027 if (is_oacc_parallel (ctx))
8028 goto oacc_firstprivate_map;
8029 ovar = OMP_CLAUSE_DECL (c);
8030 if (omp_is_reference (ovar))
8031 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8032 else
8033 talign = DECL_ALIGN_UNIT (ovar);
8034 var = lookup_decl_in_outer_ctx (ovar, ctx);
8035 x = build_sender_ref (ovar, ctx);
8036 tkind = GOMP_MAP_FIRSTPRIVATE;
8037 type = TREE_TYPE (ovar);
8038 if (omp_is_reference (ovar))
8039 type = TREE_TYPE (type);
8040 if ((INTEGRAL_TYPE_P (type)
8041 && TYPE_PRECISION (type) <= POINTER_SIZE)
8042 || TREE_CODE (type) == POINTER_TYPE)
8044 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8045 tree t = var;
8046 if (omp_is_reference (var))
8047 t = build_simple_mem_ref (var);
8048 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8049 TREE_NO_WARNING (var) = 1;
8050 if (TREE_CODE (type) != POINTER_TYPE)
8051 t = fold_convert (pointer_sized_int_node, t);
8052 t = fold_convert (TREE_TYPE (x), t);
8053 gimplify_assign (x, t, &ilist);
8055 else if (omp_is_reference (var))
8056 gimplify_assign (x, var, &ilist);
8057 else if (is_gimple_reg (var))
8059 tree avar = create_tmp_var (TREE_TYPE (var));
8060 mark_addressable (avar);
8061 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8062 TREE_NO_WARNING (var) = 1;
8063 gimplify_assign (avar, var, &ilist);
8064 avar = build_fold_addr_expr (avar);
8065 gimplify_assign (x, avar, &ilist);
8067 else
8069 var = build_fold_addr_expr (var);
8070 gimplify_assign (x, var, &ilist);
8072 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8073 s = size_int (0);
8074 else if (omp_is_reference (ovar))
8075 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8076 else
8077 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8078 s = fold_convert (size_type_node, s);
8079 purpose = size_int (map_idx++);
8080 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8081 if (TREE_CODE (s) != INTEGER_CST)
8082 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8084 gcc_checking_assert (tkind
8085 < (HOST_WIDE_INT_C (1U) << talign_shift));
8086 talign = ceil_log2 (talign);
8087 tkind |= talign << talign_shift;
8088 gcc_checking_assert (tkind
8089 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8090 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8091 build_int_cstu (tkind_type, tkind));
8092 break;
8094 case OMP_CLAUSE_USE_DEVICE_PTR:
8095 case OMP_CLAUSE_IS_DEVICE_PTR:
8096 ovar = OMP_CLAUSE_DECL (c);
8097 var = lookup_decl_in_outer_ctx (ovar, ctx);
8098 x = build_sender_ref (ovar, ctx);
8099 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8100 tkind = GOMP_MAP_USE_DEVICE_PTR;
8101 else
8102 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8103 type = TREE_TYPE (ovar);
8104 if (TREE_CODE (type) == ARRAY_TYPE)
8105 var = build_fold_addr_expr (var);
8106 else
8108 if (omp_is_reference (ovar))
8110 type = TREE_TYPE (type);
8111 if (TREE_CODE (type) != ARRAY_TYPE)
8112 var = build_simple_mem_ref (var);
8113 var = fold_convert (TREE_TYPE (x), var);
8116 gimplify_assign (x, var, &ilist);
8117 s = size_int (0);
8118 purpose = size_int (map_idx++);
8119 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8120 gcc_checking_assert (tkind
8121 < (HOST_WIDE_INT_C (1U) << talign_shift));
8122 gcc_checking_assert (tkind
8123 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8124 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8125 build_int_cstu (tkind_type, tkind));
8126 break;
8129 gcc_assert (map_idx == map_cnt);
8131 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8132 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8133 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8134 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8135 for (int i = 1; i <= 2; i++)
8136 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8138 gimple_seq initlist = NULL;
8139 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8140 TREE_VEC_ELT (t, i)),
8141 &initlist, true, NULL_TREE);
8142 gimple_seq_add_seq (&ilist, initlist);
8144 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8145 NULL);
8146 TREE_THIS_VOLATILE (clobber) = 1;
8147 gimple_seq_add_stmt (&olist,
8148 gimple_build_assign (TREE_VEC_ELT (t, i),
8149 clobber));
8152 tree clobber = build_constructor (ctx->record_type, NULL);
8153 TREE_THIS_VOLATILE (clobber) = 1;
8154 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8155 clobber));
8158 /* Once all the expansions are done, sequence all the different
8159 fragments inside gimple_omp_body. */
8161 new_body = NULL;
8163 if (offloaded
8164 && ctx->record_type)
8166 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8167 /* fixup_child_record_type might have changed receiver_decl's type. */
8168 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8169 gimple_seq_add_stmt (&new_body,
8170 gimple_build_assign (ctx->receiver_decl, t));
8172 gimple_seq_add_seq (&new_body, fplist);
8174 if (offloaded || data_region)
8176 tree prev = NULL_TREE;
8177 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8178 switch (OMP_CLAUSE_CODE (c))
8180 tree var, x;
8181 default:
8182 break;
8183 case OMP_CLAUSE_FIRSTPRIVATE:
8184 if (is_gimple_omp_oacc (ctx->stmt))
8185 break;
8186 var = OMP_CLAUSE_DECL (c);
8187 if (omp_is_reference (var)
8188 || is_gimple_reg_type (TREE_TYPE (var)))
8190 tree new_var = lookup_decl (var, ctx);
8191 tree type;
8192 type = TREE_TYPE (var);
8193 if (omp_is_reference (var))
8194 type = TREE_TYPE (type);
8195 if ((INTEGRAL_TYPE_P (type)
8196 && TYPE_PRECISION (type) <= POINTER_SIZE)
8197 || TREE_CODE (type) == POINTER_TYPE)
8199 x = build_receiver_ref (var, false, ctx);
8200 if (TREE_CODE (type) != POINTER_TYPE)
8201 x = fold_convert (pointer_sized_int_node, x);
8202 x = fold_convert (type, x);
8203 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8204 fb_rvalue);
8205 if (omp_is_reference (var))
8207 tree v = create_tmp_var_raw (type, get_name (var));
8208 gimple_add_tmp_var (v);
8209 TREE_ADDRESSABLE (v) = 1;
8210 gimple_seq_add_stmt (&new_body,
8211 gimple_build_assign (v, x));
8212 x = build_fold_addr_expr (v);
8214 gimple_seq_add_stmt (&new_body,
8215 gimple_build_assign (new_var, x));
8217 else
8219 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8220 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8221 fb_rvalue);
8222 gimple_seq_add_stmt (&new_body,
8223 gimple_build_assign (new_var, x));
8226 else if (is_variable_sized (var))
8228 tree pvar = DECL_VALUE_EXPR (var);
8229 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8230 pvar = TREE_OPERAND (pvar, 0);
8231 gcc_assert (DECL_P (pvar));
8232 tree new_var = lookup_decl (pvar, ctx);
8233 x = build_receiver_ref (var, false, ctx);
8234 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8235 gimple_seq_add_stmt (&new_body,
8236 gimple_build_assign (new_var, x));
8238 break;
8239 case OMP_CLAUSE_PRIVATE:
8240 if (is_gimple_omp_oacc (ctx->stmt))
8241 break;
8242 var = OMP_CLAUSE_DECL (c);
8243 if (omp_is_reference (var))
8245 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8246 tree new_var = lookup_decl (var, ctx);
8247 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8248 if (TREE_CONSTANT (x))
8250 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8251 get_name (var));
8252 gimple_add_tmp_var (x);
8253 TREE_ADDRESSABLE (x) = 1;
8254 x = build_fold_addr_expr_loc (clause_loc, x);
8256 else
8257 break;
8259 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8260 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8261 gimple_seq_add_stmt (&new_body,
8262 gimple_build_assign (new_var, x));
8264 break;
8265 case OMP_CLAUSE_USE_DEVICE_PTR:
8266 case OMP_CLAUSE_IS_DEVICE_PTR:
8267 var = OMP_CLAUSE_DECL (c);
8268 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8269 x = build_sender_ref (var, ctx);
8270 else
8271 x = build_receiver_ref (var, false, ctx);
8272 if (is_variable_sized (var))
8274 tree pvar = DECL_VALUE_EXPR (var);
8275 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8276 pvar = TREE_OPERAND (pvar, 0);
8277 gcc_assert (DECL_P (pvar));
8278 tree new_var = lookup_decl (pvar, ctx);
8279 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8280 gimple_seq_add_stmt (&new_body,
8281 gimple_build_assign (new_var, x));
8283 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8285 tree new_var = lookup_decl (var, ctx);
8286 new_var = DECL_VALUE_EXPR (new_var);
8287 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8288 new_var = TREE_OPERAND (new_var, 0);
8289 gcc_assert (DECL_P (new_var));
8290 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8291 gimple_seq_add_stmt (&new_body,
8292 gimple_build_assign (new_var, x));
8294 else
8296 tree type = TREE_TYPE (var);
8297 tree new_var = lookup_decl (var, ctx);
8298 if (omp_is_reference (var))
8300 type = TREE_TYPE (type);
8301 if (TREE_CODE (type) != ARRAY_TYPE)
8303 tree v = create_tmp_var_raw (type, get_name (var));
8304 gimple_add_tmp_var (v);
8305 TREE_ADDRESSABLE (v) = 1;
8306 x = fold_convert (type, x);
8307 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8308 fb_rvalue);
8309 gimple_seq_add_stmt (&new_body,
8310 gimple_build_assign (v, x));
8311 x = build_fold_addr_expr (v);
8314 new_var = DECL_VALUE_EXPR (new_var);
8315 x = fold_convert (TREE_TYPE (new_var), x);
8316 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8317 gimple_seq_add_stmt (&new_body,
8318 gimple_build_assign (new_var, x));
8320 break;
8322 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8323 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8324 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8325 or references to VLAs. */
8326 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8327 switch (OMP_CLAUSE_CODE (c))
8329 tree var;
8330 default:
8331 break;
8332 case OMP_CLAUSE_MAP:
8333 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8334 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8336 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8337 poly_int64 offset = 0;
8338 gcc_assert (prev);
8339 var = OMP_CLAUSE_DECL (c);
8340 if (DECL_P (var)
8341 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8342 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8343 ctx))
8344 && varpool_node::get_create (var)->offloadable)
8345 break;
8346 if (TREE_CODE (var) == INDIRECT_REF
8347 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8348 var = TREE_OPERAND (var, 0);
8349 if (TREE_CODE (var) == COMPONENT_REF)
8351 var = get_addr_base_and_unit_offset (var, &offset);
8352 gcc_assert (var != NULL_TREE && DECL_P (var));
8354 else if (DECL_SIZE (var)
8355 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8357 tree var2 = DECL_VALUE_EXPR (var);
8358 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8359 var2 = TREE_OPERAND (var2, 0);
8360 gcc_assert (DECL_P (var2));
8361 var = var2;
8363 tree new_var = lookup_decl (var, ctx), x;
8364 tree type = TREE_TYPE (new_var);
8365 bool is_ref;
8366 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8367 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8368 == COMPONENT_REF))
8370 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8371 is_ref = true;
8372 new_var = build2 (MEM_REF, type,
8373 build_fold_addr_expr (new_var),
8374 build_int_cst (build_pointer_type (type),
8375 offset));
8377 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8379 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8380 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8381 new_var = build2 (MEM_REF, type,
8382 build_fold_addr_expr (new_var),
8383 build_int_cst (build_pointer_type (type),
8384 offset));
8386 else
8387 is_ref = omp_is_reference (var);
8388 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8389 is_ref = false;
8390 bool ref_to_array = false;
8391 if (is_ref)
8393 type = TREE_TYPE (type);
8394 if (TREE_CODE (type) == ARRAY_TYPE)
8396 type = build_pointer_type (type);
8397 ref_to_array = true;
8400 else if (TREE_CODE (type) == ARRAY_TYPE)
8402 tree decl2 = DECL_VALUE_EXPR (new_var);
8403 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8404 decl2 = TREE_OPERAND (decl2, 0);
8405 gcc_assert (DECL_P (decl2));
8406 new_var = decl2;
8407 type = TREE_TYPE (new_var);
8409 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8410 x = fold_convert_loc (clause_loc, type, x);
8411 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8413 tree bias = OMP_CLAUSE_SIZE (c);
8414 if (DECL_P (bias))
8415 bias = lookup_decl (bias, ctx);
8416 bias = fold_convert_loc (clause_loc, sizetype, bias);
8417 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8418 bias);
8419 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8420 TREE_TYPE (x), x, bias);
8422 if (ref_to_array)
8423 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8424 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8425 if (is_ref && !ref_to_array)
8427 tree t = create_tmp_var_raw (type, get_name (var));
8428 gimple_add_tmp_var (t);
8429 TREE_ADDRESSABLE (t) = 1;
8430 gimple_seq_add_stmt (&new_body,
8431 gimple_build_assign (t, x));
8432 x = build_fold_addr_expr_loc (clause_loc, t);
8434 gimple_seq_add_stmt (&new_body,
8435 gimple_build_assign (new_var, x));
8436 prev = NULL_TREE;
8438 else if (OMP_CLAUSE_CHAIN (c)
8439 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8440 == OMP_CLAUSE_MAP
8441 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8442 == GOMP_MAP_FIRSTPRIVATE_POINTER
8443 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8444 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8445 prev = c;
8446 break;
8447 case OMP_CLAUSE_PRIVATE:
8448 var = OMP_CLAUSE_DECL (c);
8449 if (is_variable_sized (var))
8451 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8452 tree new_var = lookup_decl (var, ctx);
8453 tree pvar = DECL_VALUE_EXPR (var);
8454 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8455 pvar = TREE_OPERAND (pvar, 0);
8456 gcc_assert (DECL_P (pvar));
8457 tree new_pvar = lookup_decl (pvar, ctx);
8458 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8459 tree al = size_int (DECL_ALIGN (var));
8460 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8461 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8462 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8463 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8464 gimple_seq_add_stmt (&new_body,
8465 gimple_build_assign (new_pvar, x));
8467 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8469 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8470 tree new_var = lookup_decl (var, ctx);
8471 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8472 if (TREE_CONSTANT (x))
8473 break;
8474 else
8476 tree atmp
8477 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8478 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8479 tree al = size_int (TYPE_ALIGN (rtype));
8480 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8483 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8484 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8485 gimple_seq_add_stmt (&new_body,
8486 gimple_build_assign (new_var, x));
8488 break;
8491 gimple_seq fork_seq = NULL;
8492 gimple_seq join_seq = NULL;
8494 if (is_oacc_parallel (ctx))
8496 /* If there are reductions on the offloaded region itself, treat
8497 them as a dummy GANG loop. */
8498 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8500 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8501 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8504 gimple_seq_add_seq (&new_body, fork_seq);
8505 gimple_seq_add_seq (&new_body, tgt_body);
8506 gimple_seq_add_seq (&new_body, join_seq);
8508 if (offloaded)
8509 new_body = maybe_catch_exception (new_body);
8511 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8512 gimple_omp_set_body (stmt, new_body);
8515 bind = gimple_build_bind (NULL, NULL,
8516 tgt_bind ? gimple_bind_block (tgt_bind)
8517 : NULL_TREE);
8518 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8519 gimple_bind_add_seq (bind, ilist);
8520 gimple_bind_add_stmt (bind, stmt);
8521 gimple_bind_add_seq (bind, olist);
8523 pop_gimplify_context (NULL);
8525 if (dep_bind)
8527 gimple_bind_add_seq (dep_bind, dep_ilist);
8528 gimple_bind_add_stmt (dep_bind, bind);
8529 gimple_bind_add_seq (dep_bind, dep_olist);
8530 pop_gimplify_context (dep_bind);
8534 /* Expand code for an OpenMP teams directive. */
8536 static void
8537 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8539 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8540 push_gimplify_context ();
8542 tree block = make_node (BLOCK);
8543 gbind *bind = gimple_build_bind (NULL, NULL, block);
8544 gsi_replace (gsi_p, bind, true);
8545 gimple_seq bind_body = NULL;
8546 gimple_seq dlist = NULL;
8547 gimple_seq olist = NULL;
8549 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8550 OMP_CLAUSE_NUM_TEAMS);
8551 if (num_teams == NULL_TREE)
8552 num_teams = build_int_cst (unsigned_type_node, 0);
8553 else
8555 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8556 num_teams = fold_convert (unsigned_type_node, num_teams);
8557 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8559 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8560 OMP_CLAUSE_THREAD_LIMIT);
8561 if (thread_limit == NULL_TREE)
8562 thread_limit = build_int_cst (unsigned_type_node, 0);
8563 else
8565 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8566 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8567 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8568 fb_rvalue);
8571 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8572 &bind_body, &dlist, ctx, NULL);
8573 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8574 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8575 if (!gimple_omp_teams_grid_phony (teams_stmt))
8577 gimple_seq_add_stmt (&bind_body, teams_stmt);
8578 location_t loc = gimple_location (teams_stmt);
8579 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8580 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8581 gimple_set_location (call, loc);
8582 gimple_seq_add_stmt (&bind_body, call);
8585 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8586 gimple_omp_set_body (teams_stmt, NULL);
8587 gimple_seq_add_seq (&bind_body, olist);
8588 gimple_seq_add_seq (&bind_body, dlist);
8589 if (!gimple_omp_teams_grid_phony (teams_stmt))
8590 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8591 gimple_bind_set_body (bind, bind_body);
8593 pop_gimplify_context (bind);
8595 gimple_bind_append_vars (bind, ctx->block_vars);
8596 BLOCK_VARS (block) = ctx->block_vars;
8597 if (BLOCK_VARS (block))
8598 TREE_USED (block) = 1;
8601 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8603 static void
8604 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8606 gimple *stmt = gsi_stmt (*gsi_p);
8607 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8608 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8609 gimple_build_omp_return (false));
8613 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8614 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8615 of OMP context, but with task_shared_vars set. */
8617 static tree
8618 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8619 void *data)
8621 tree t = *tp;
8623 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8624 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8625 return t;
8627 if (task_shared_vars
8628 && DECL_P (t)
8629 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8630 return t;
8632 /* If a global variable has been privatized, TREE_CONSTANT on
8633 ADDR_EXPR might be wrong. */
8634 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8635 recompute_tree_invariant_for_addr_expr (t);
8637 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8638 return NULL_TREE;
8641 /* Data to be communicated between lower_omp_regimplify_operands and
8642 lower_omp_regimplify_operands_p. */
8644 struct lower_omp_regimplify_operands_data
8646 omp_context *ctx;
8647 vec<tree> *decls;
8650 /* Helper function for lower_omp_regimplify_operands. Find
8651 omp_member_access_dummy_var vars and adjust temporarily their
8652 DECL_VALUE_EXPRs if needed. */
8654 static tree
8655 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8656 void *data)
8658 tree t = omp_member_access_dummy_var (*tp);
8659 if (t)
8661 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8662 lower_omp_regimplify_operands_data *ldata
8663 = (lower_omp_regimplify_operands_data *) wi->info;
8664 tree o = maybe_lookup_decl (t, ldata->ctx);
8665 if (o != t)
8667 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8668 ldata->decls->safe_push (*tp);
8669 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8670 SET_DECL_VALUE_EXPR (*tp, v);
8673 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8674 return NULL_TREE;
8677 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8678 of omp_member_access_dummy_var vars during regimplification. */
8680 static void
8681 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8682 gimple_stmt_iterator *gsi_p)
8684 auto_vec<tree, 10> decls;
8685 if (ctx)
8687 struct walk_stmt_info wi;
8688 memset (&wi, '\0', sizeof (wi));
8689 struct lower_omp_regimplify_operands_data data;
8690 data.ctx = ctx;
8691 data.decls = &decls;
8692 wi.info = &data;
8693 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8695 gimple_regimplify_operands (stmt, gsi_p);
8696 while (!decls.is_empty ())
8698 tree t = decls.pop ();
8699 tree v = decls.pop ();
8700 SET_DECL_VALUE_EXPR (t, v);
8704 static void
8705 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8707 gimple *stmt = gsi_stmt (*gsi_p);
8708 struct walk_stmt_info wi;
8709 gcall *call_stmt;
8711 if (gimple_has_location (stmt))
8712 input_location = gimple_location (stmt);
8714 if (task_shared_vars)
8715 memset (&wi, '\0', sizeof (wi));
8717 /* If we have issued syntax errors, avoid doing any heavy lifting.
8718 Just replace the OMP directives with a NOP to avoid
8719 confusing RTL expansion. */
8720 if (seen_error () && is_gimple_omp (stmt))
8722 gsi_replace (gsi_p, gimple_build_nop (), true);
8723 return;
8726 switch (gimple_code (stmt))
8728 case GIMPLE_COND:
8730 gcond *cond_stmt = as_a <gcond *> (stmt);
8731 if ((ctx || task_shared_vars)
8732 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8733 lower_omp_regimplify_p,
8734 ctx ? NULL : &wi, NULL)
8735 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8736 lower_omp_regimplify_p,
8737 ctx ? NULL : &wi, NULL)))
8738 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8740 break;
8741 case GIMPLE_CATCH:
8742 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8743 break;
8744 case GIMPLE_EH_FILTER:
8745 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8746 break;
8747 case GIMPLE_TRY:
8748 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8749 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8750 break;
8751 case GIMPLE_TRANSACTION:
8752 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8753 ctx);
8754 break;
8755 case GIMPLE_BIND:
8756 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8757 break;
8758 case GIMPLE_OMP_PARALLEL:
8759 case GIMPLE_OMP_TASK:
8760 ctx = maybe_lookup_ctx (stmt);
8761 gcc_assert (ctx);
8762 if (ctx->cancellable)
8763 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8764 lower_omp_taskreg (gsi_p, ctx);
8765 break;
8766 case GIMPLE_OMP_FOR:
8767 ctx = maybe_lookup_ctx (stmt);
8768 gcc_assert (ctx);
8769 if (ctx->cancellable)
8770 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8771 lower_omp_for (gsi_p, ctx);
8772 break;
8773 case GIMPLE_OMP_SECTIONS:
8774 ctx = maybe_lookup_ctx (stmt);
8775 gcc_assert (ctx);
8776 if (ctx->cancellable)
8777 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8778 lower_omp_sections (gsi_p, ctx);
8779 break;
8780 case GIMPLE_OMP_SINGLE:
8781 ctx = maybe_lookup_ctx (stmt);
8782 gcc_assert (ctx);
8783 lower_omp_single (gsi_p, ctx);
8784 break;
8785 case GIMPLE_OMP_MASTER:
8786 ctx = maybe_lookup_ctx (stmt);
8787 gcc_assert (ctx);
8788 lower_omp_master (gsi_p, ctx);
8789 break;
8790 case GIMPLE_OMP_TASKGROUP:
8791 ctx = maybe_lookup_ctx (stmt);
8792 gcc_assert (ctx);
8793 lower_omp_taskgroup (gsi_p, ctx);
8794 break;
8795 case GIMPLE_OMP_ORDERED:
8796 ctx = maybe_lookup_ctx (stmt);
8797 gcc_assert (ctx);
8798 lower_omp_ordered (gsi_p, ctx);
8799 break;
8800 case GIMPLE_OMP_CRITICAL:
8801 ctx = maybe_lookup_ctx (stmt);
8802 gcc_assert (ctx);
8803 lower_omp_critical (gsi_p, ctx);
8804 break;
8805 case GIMPLE_OMP_ATOMIC_LOAD:
8806 if ((ctx || task_shared_vars)
8807 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8808 as_a <gomp_atomic_load *> (stmt)),
8809 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8810 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8811 break;
8812 case GIMPLE_OMP_TARGET:
8813 ctx = maybe_lookup_ctx (stmt);
8814 gcc_assert (ctx);
8815 lower_omp_target (gsi_p, ctx);
8816 break;
8817 case GIMPLE_OMP_TEAMS:
8818 ctx = maybe_lookup_ctx (stmt);
8819 gcc_assert (ctx);
8820 lower_omp_teams (gsi_p, ctx);
8821 break;
8822 case GIMPLE_OMP_GRID_BODY:
8823 ctx = maybe_lookup_ctx (stmt);
8824 gcc_assert (ctx);
8825 lower_omp_grid_body (gsi_p, ctx);
8826 break;
8827 case GIMPLE_CALL:
8828 tree fndecl;
8829 call_stmt = as_a <gcall *> (stmt);
8830 fndecl = gimple_call_fndecl (call_stmt);
8831 if (fndecl
8832 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8833 switch (DECL_FUNCTION_CODE (fndecl))
8835 case BUILT_IN_GOMP_BARRIER:
8836 if (ctx == NULL)
8837 break;
8838 /* FALLTHRU */
8839 case BUILT_IN_GOMP_CANCEL:
8840 case BUILT_IN_GOMP_CANCELLATION_POINT:
8841 omp_context *cctx;
8842 cctx = ctx;
8843 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8844 cctx = cctx->outer;
8845 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8846 if (!cctx->cancellable)
8848 if (DECL_FUNCTION_CODE (fndecl)
8849 == BUILT_IN_GOMP_CANCELLATION_POINT)
8851 stmt = gimple_build_nop ();
8852 gsi_replace (gsi_p, stmt, false);
8854 break;
8856 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8858 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8859 gimple_call_set_fndecl (call_stmt, fndecl);
8860 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8862 tree lhs;
8863 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8864 gimple_call_set_lhs (call_stmt, lhs);
8865 tree fallthru_label;
8866 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8867 gimple *g;
8868 g = gimple_build_label (fallthru_label);
8869 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8870 g = gimple_build_cond (NE_EXPR, lhs,
8871 fold_convert (TREE_TYPE (lhs),
8872 boolean_false_node),
8873 cctx->cancel_label, fallthru_label);
8874 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8875 break;
8876 default:
8877 break;
8879 /* FALLTHRU */
8880 default:
8881 if ((ctx || task_shared_vars)
8882 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8883 ctx ? NULL : &wi))
8885 /* Just remove clobbers, this should happen only if we have
8886 "privatized" local addressable variables in SIMD regions,
8887 the clobber isn't needed in that case and gimplifying address
8888 of the ARRAY_REF into a pointer and creating MEM_REF based
8889 clobber would create worse code than we get with the clobber
8890 dropped. */
8891 if (gimple_clobber_p (stmt))
8893 gsi_replace (gsi_p, gimple_build_nop (), true);
8894 break;
8896 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8898 break;
8902 static void
8903 lower_omp (gimple_seq *body, omp_context *ctx)
8905 location_t saved_location = input_location;
8906 gimple_stmt_iterator gsi;
8907 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8908 lower_omp_1 (&gsi, ctx);
8909 /* During gimplification, we haven't folded statments inside offloading
8910 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8911 if (target_nesting_level || taskreg_nesting_level)
8912 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8913 fold_stmt (&gsi);
8914 input_location = saved_location;
8917 /* Main entry point. */
8919 static unsigned int
8920 execute_lower_omp (void)
8922 gimple_seq body;
8923 int i;
8924 omp_context *ctx;
8926 /* This pass always runs, to provide PROP_gimple_lomp.
8927 But often, there is nothing to do. */
8928 if (flag_openacc == 0 && flag_openmp == 0
8929 && flag_openmp_simd == 0)
8930 return 0;
8932 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8933 delete_omp_context);
8935 body = gimple_body (current_function_decl);
8937 if (hsa_gen_requested_p ())
8938 omp_grid_gridify_all_targets (&body);
8940 scan_omp (&body, NULL);
8941 gcc_assert (taskreg_nesting_level == 0);
8942 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8943 finish_taskreg_scan (ctx);
8944 taskreg_contexts.release ();
8946 if (all_contexts->root)
8948 if (task_shared_vars)
8949 push_gimplify_context ();
8950 lower_omp (&body, NULL);
8951 if (task_shared_vars)
8952 pop_gimplify_context (NULL);
8955 if (all_contexts)
8957 splay_tree_delete (all_contexts);
8958 all_contexts = NULL;
8960 BITMAP_FREE (task_shared_vars);
8961 return 0;
8964 namespace {
8966 const pass_data pass_data_lower_omp =
8968 GIMPLE_PASS, /* type */
8969 "omplower", /* name */
8970 OPTGROUP_OMP, /* optinfo_flags */
8971 TV_NONE, /* tv_id */
8972 PROP_gimple_any, /* properties_required */
8973 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8974 0, /* properties_destroyed */
8975 0, /* todo_flags_start */
8976 0, /* todo_flags_finish */
8979 class pass_lower_omp : public gimple_opt_pass
8981 public:
8982 pass_lower_omp (gcc::context *ctxt)
8983 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8986 /* opt_pass methods: */
8987 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8989 }; // class pass_lower_omp
8991 } // anon namespace
8993 gimple_opt_pass *
8994 make_pass_lower_omp (gcc::context *ctxt)
8996 return new pass_lower_omp (ctxt);
8999 /* The following is a utility to diagnose structured block violations.
9000 It is not part of the "omplower" pass, as that's invoked too late. It
9001 should be invoked by the respective front ends after gimplification. */
9003 static splay_tree all_labels;
9005 /* Check for mismatched contexts and generate an error if needed. Return
9006 true if an error is detected. */
9008 static bool
9009 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9010 gimple *branch_ctx, gimple *label_ctx)
9012 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9013 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9015 if (label_ctx == branch_ctx)
9016 return false;
9018 const char* kind = NULL;
9020 if (flag_openacc)
9022 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9023 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9025 gcc_checking_assert (kind == NULL);
9026 kind = "OpenACC";
9029 if (kind == NULL)
9031 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9032 kind = "OpenMP";
9035 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9036 so we could traverse it and issue a correct "exit" or "enter" error
9037 message upon a structured block violation.
9039 We built the context by building a list with tree_cons'ing, but there is
9040 no easy counterpart in gimple tuples. It seems like far too much work
9041 for issuing exit/enter error messages. If someone really misses the
9042 distinct error message... patches welcome. */
9044 #if 0
9045 /* Try to avoid confusing the user by producing and error message
9046 with correct "exit" or "enter" verbiage. We prefer "exit"
9047 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9048 if (branch_ctx == NULL)
9049 exit_p = false;
9050 else
9052 while (label_ctx)
9054 if (TREE_VALUE (label_ctx) == branch_ctx)
9056 exit_p = false;
9057 break;
9059 label_ctx = TREE_CHAIN (label_ctx);
9063 if (exit_p)
9064 error ("invalid exit from %s structured block", kind);
9065 else
9066 error ("invalid entry to %s structured block", kind);
9067 #endif
9069 /* If it's obvious we have an invalid entry, be specific about the error. */
9070 if (branch_ctx == NULL)
9071 error ("invalid entry to %s structured block", kind);
9072 else
9074 /* Otherwise, be vague and lazy, but efficient. */
9075 error ("invalid branch to/from %s structured block", kind);
9078 gsi_replace (gsi_p, gimple_build_nop (), false);
9079 return true;
9082 /* Pass 1: Create a minimal tree of structured blocks, and record
9083 where each label is found. */
9085 static tree
9086 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9087 struct walk_stmt_info *wi)
9089 gimple *context = (gimple *) wi->info;
9090 gimple *inner_context;
9091 gimple *stmt = gsi_stmt (*gsi_p);
9093 *handled_ops_p = true;
9095 switch (gimple_code (stmt))
9097 WALK_SUBSTMTS;
9099 case GIMPLE_OMP_PARALLEL:
9100 case GIMPLE_OMP_TASK:
9101 case GIMPLE_OMP_SECTIONS:
9102 case GIMPLE_OMP_SINGLE:
9103 case GIMPLE_OMP_SECTION:
9104 case GIMPLE_OMP_MASTER:
9105 case GIMPLE_OMP_ORDERED:
9106 case GIMPLE_OMP_CRITICAL:
9107 case GIMPLE_OMP_TARGET:
9108 case GIMPLE_OMP_TEAMS:
9109 case GIMPLE_OMP_TASKGROUP:
9110 /* The minimal context here is just the current OMP construct. */
9111 inner_context = stmt;
9112 wi->info = inner_context;
9113 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9114 wi->info = context;
9115 break;
9117 case GIMPLE_OMP_FOR:
9118 inner_context = stmt;
9119 wi->info = inner_context;
9120 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9121 walk them. */
9122 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9123 diagnose_sb_1, NULL, wi);
9124 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9125 wi->info = context;
9126 break;
9128 case GIMPLE_LABEL:
9129 splay_tree_insert (all_labels,
9130 (splay_tree_key) gimple_label_label (
9131 as_a <glabel *> (stmt)),
9132 (splay_tree_value) context);
9133 break;
9135 default:
9136 break;
9139 return NULL_TREE;
9142 /* Pass 2: Check each branch and see if its context differs from that of
9143 the destination label's context. */
9145 static tree
9146 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9147 struct walk_stmt_info *wi)
9149 gimple *context = (gimple *) wi->info;
9150 splay_tree_node n;
9151 gimple *stmt = gsi_stmt (*gsi_p);
9153 *handled_ops_p = true;
9155 switch (gimple_code (stmt))
9157 WALK_SUBSTMTS;
9159 case GIMPLE_OMP_PARALLEL:
9160 case GIMPLE_OMP_TASK:
9161 case GIMPLE_OMP_SECTIONS:
9162 case GIMPLE_OMP_SINGLE:
9163 case GIMPLE_OMP_SECTION:
9164 case GIMPLE_OMP_MASTER:
9165 case GIMPLE_OMP_ORDERED:
9166 case GIMPLE_OMP_CRITICAL:
9167 case GIMPLE_OMP_TARGET:
9168 case GIMPLE_OMP_TEAMS:
9169 case GIMPLE_OMP_TASKGROUP:
9170 wi->info = stmt;
9171 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9172 wi->info = context;
9173 break;
9175 case GIMPLE_OMP_FOR:
9176 wi->info = stmt;
9177 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9178 walk them. */
9179 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9180 diagnose_sb_2, NULL, wi);
9181 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9182 wi->info = context;
9183 break;
9185 case GIMPLE_COND:
9187 gcond *cond_stmt = as_a <gcond *> (stmt);
9188 tree lab = gimple_cond_true_label (cond_stmt);
9189 if (lab)
9191 n = splay_tree_lookup (all_labels,
9192 (splay_tree_key) lab);
9193 diagnose_sb_0 (gsi_p, context,
9194 n ? (gimple *) n->value : NULL);
9196 lab = gimple_cond_false_label (cond_stmt);
9197 if (lab)
9199 n = splay_tree_lookup (all_labels,
9200 (splay_tree_key) lab);
9201 diagnose_sb_0 (gsi_p, context,
9202 n ? (gimple *) n->value : NULL);
9205 break;
9207 case GIMPLE_GOTO:
9209 tree lab = gimple_goto_dest (stmt);
9210 if (TREE_CODE (lab) != LABEL_DECL)
9211 break;
9213 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9214 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9216 break;
9218 case GIMPLE_SWITCH:
9220 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9221 unsigned int i;
9222 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9224 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9225 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9226 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9227 break;
9230 break;
9232 case GIMPLE_RETURN:
9233 diagnose_sb_0 (gsi_p, context, NULL);
9234 break;
9236 default:
9237 break;
9240 return NULL_TREE;
9243 static unsigned int
9244 diagnose_omp_structured_block_errors (void)
9246 struct walk_stmt_info wi;
9247 gimple_seq body = gimple_body (current_function_decl);
9249 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9251 memset (&wi, 0, sizeof (wi));
9252 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9254 memset (&wi, 0, sizeof (wi));
9255 wi.want_locations = true;
9256 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9258 gimple_set_body (current_function_decl, body);
9260 splay_tree_delete (all_labels);
9261 all_labels = NULL;
9263 return 0;
9266 namespace {
9268 const pass_data pass_data_diagnose_omp_blocks =
9270 GIMPLE_PASS, /* type */
9271 "*diagnose_omp_blocks", /* name */
9272 OPTGROUP_OMP, /* optinfo_flags */
9273 TV_NONE, /* tv_id */
9274 PROP_gimple_any, /* properties_required */
9275 0, /* properties_provided */
9276 0, /* properties_destroyed */
9277 0, /* todo_flags_start */
9278 0, /* todo_flags_finish */
9281 class pass_diagnose_omp_blocks : public gimple_opt_pass
9283 public:
9284 pass_diagnose_omp_blocks (gcc::context *ctxt)
9285 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9288 /* opt_pass methods: */
9289 virtual bool gate (function *)
9291 return flag_openacc || flag_openmp || flag_openmp_simd;
9293 virtual unsigned int execute (function *)
9295 return diagnose_omp_structured_block_errors ();
9298 }; // class pass_diagnose_omp_blocks
9300 } // anon namespace
9302 gimple_opt_pass *
9303 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9305 return new pass_diagnose_omp_blocks (ctxt);
9309 #include "gt-omp-low.h"