poly_int: omp_max_vf
[official-gcc.git] / gcc / omp-low.c
blobf6267ece858fe7f85c52bc708c01b42935b006e6
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 break;
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1201 && varpool_node::get_create (decl)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl)))
1204 break;
1205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1206 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx->stmt)
1211 && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1213 break;
1215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1220 if (TREE_CODE (decl) == COMPONENT_REF
1221 || (TREE_CODE (decl) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 == REFERENCE_TYPE)))
1225 break;
1226 if (DECL_SIZE (decl)
1227 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1229 tree decl2 = DECL_VALUE_EXPR (decl);
1230 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 decl2 = TREE_OPERAND (decl2, 0);
1232 gcc_assert (DECL_P (decl2));
1233 install_var_local (decl2, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 if (DECL_P (decl))
1240 if (DECL_SIZE (decl)
1241 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1243 tree decl2 = DECL_VALUE_EXPR (decl);
1244 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 decl2 = TREE_OPERAND (decl2, 0);
1246 gcc_assert (DECL_P (decl2));
1247 install_var_field (decl2, true, 3, ctx);
1248 install_var_local (decl2, ctx);
1249 install_var_local (decl, ctx);
1251 else
1253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 7, ctx);
1258 else
1259 install_var_field (decl, true, 3, ctx,
1260 base_pointers_restrict);
1261 if (is_gimple_omp_offloaded (ctx->stmt)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1263 install_var_local (decl, ctx);
1266 else
1268 tree base = get_base_address (decl);
1269 tree nc = OMP_CLAUSE_CHAIN (c);
1270 if (DECL_P (base)
1271 && nc != NULL_TREE
1272 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc) == base
1274 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1280 else
1282 if (ctx->outer)
1284 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 decl = OMP_CLAUSE_DECL (c);
1287 gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 (splay_tree_key) decl));
1289 tree field
1290 = build_decl (OMP_CLAUSE_LOCATION (c),
1291 FIELD_DECL, NULL_TREE, ptr_type_node);
1292 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1293 insert_field_into_struct (ctx->record_type, field);
1294 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 (splay_tree_value) field);
1298 break;
1300 case OMP_CLAUSE__GRIDDIM_:
1301 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1306 break;
1308 case OMP_CLAUSE_NOWAIT:
1309 case OMP_CLAUSE_ORDERED:
1310 case OMP_CLAUSE_COLLAPSE:
1311 case OMP_CLAUSE_UNTIED:
1312 case OMP_CLAUSE_MERGEABLE:
1313 case OMP_CLAUSE_PROC_BIND:
1314 case OMP_CLAUSE_SAFELEN:
1315 case OMP_CLAUSE_SIMDLEN:
1316 case OMP_CLAUSE_THREADS:
1317 case OMP_CLAUSE_SIMD:
1318 case OMP_CLAUSE_NOGROUP:
1319 case OMP_CLAUSE_DEFAULTMAP:
1320 case OMP_CLAUSE_ASYNC:
1321 case OMP_CLAUSE_WAIT:
1322 case OMP_CLAUSE_GANG:
1323 case OMP_CLAUSE_WORKER:
1324 case OMP_CLAUSE_VECTOR:
1325 case OMP_CLAUSE_INDEPENDENT:
1326 case OMP_CLAUSE_AUTO:
1327 case OMP_CLAUSE_SEQ:
1328 case OMP_CLAUSE_TILE:
1329 case OMP_CLAUSE__SIMT_:
1330 case OMP_CLAUSE_DEFAULT:
1331 break;
1333 case OMP_CLAUSE_ALIGNED:
1334 decl = OMP_CLAUSE_DECL (c);
1335 if (is_global_var (decl)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_local (decl, ctx);
1338 break;
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE_ASYNC:
1489 case OMP_CLAUSE_WAIT:
1490 case OMP_CLAUSE_NUM_GANGS:
1491 case OMP_CLAUSE_NUM_WORKERS:
1492 case OMP_CLAUSE_VECTOR_LENGTH:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
1499 case OMP_CLAUSE_TILE:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE__CACHE_:
1505 default:
1506 gcc_unreachable ();
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx->stmt));
1512 if (scan_array_reductions)
1514 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1521 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1530 /* Create a new name for omp child function. Returns an identifier. */
1532 static tree
1533 create_omp_child_function_name (bool task_copy)
1535 return clone_function_name (current_function_decl,
1536 task_copy ? "_omp_cpyfn" : "_omp_fn");
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1542 static bool
1543 omp_maybe_offloaded_ctx (omp_context *ctx)
1545 if (cgraph_node::get (current_function_decl)->offloadable)
1546 return true;
1547 for (; ctx; ctx = ctx->outer)
1548 if (is_gimple_omp_offloaded (ctx->stmt))
1549 return true;
1550 return false;
1553 /* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1556 static void
1557 create_omp_child_function (omp_context *ctx, bool task_copy)
1559 tree decl, type, name, t;
1561 name = create_omp_child_function_name (task_copy);
1562 if (task_copy)
1563 type = build_function_type_list (void_type_node, ptr_type_node,
1564 ptr_type_node, NULL_TREE);
1565 else
1566 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1568 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 || !task_copy);
1572 if (!task_copy)
1573 ctx->cb.dst_fn = decl;
1574 else
1575 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1577 TREE_STATIC (decl) = 1;
1578 TREE_USED (decl) = 1;
1579 DECL_ARTIFICIAL (decl) = 1;
1580 DECL_IGNORED_P (decl) = 0;
1581 TREE_PUBLIC (decl) = 0;
1582 DECL_UNINLINABLE (decl) = 1;
1583 DECL_EXTERNAL (decl) = 0;
1584 DECL_CONTEXT (decl) = NULL_TREE;
1585 DECL_INITIAL (decl) = make_node (BLOCK);
1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1587 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1588 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1589 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1590 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1591 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1592 DECL_FUNCTION_VERSIONED (decl)
1593 = DECL_FUNCTION_VERSIONED (current_function_decl);
1595 if (omp_maybe_offloaded_ctx (ctx))
1597 cgraph_node::get_create (decl)->offloadable = 1;
1598 if (ENABLE_OFFLOADING)
1599 g->have_offload = true;
1602 if (cgraph_node::get_create (decl)->offloadable
1603 && !lookup_attribute ("omp declare target",
1604 DECL_ATTRIBUTES (current_function_decl)))
1606 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1607 ? "omp target entrypoint"
1608 : "omp declare target");
1609 DECL_ATTRIBUTES (decl)
1610 = tree_cons (get_identifier (target_attr),
1611 NULL_TREE, DECL_ATTRIBUTES (decl));
1614 t = build_decl (DECL_SOURCE_LOCATION (decl),
1615 RESULT_DECL, NULL_TREE, void_type_node);
1616 DECL_ARTIFICIAL (t) = 1;
1617 DECL_IGNORED_P (t) = 1;
1618 DECL_CONTEXT (t) = decl;
1619 DECL_RESULT (decl) = t;
1621 tree data_name = get_identifier (".omp_data_i");
1622 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1623 ptr_type_node);
1624 DECL_ARTIFICIAL (t) = 1;
1625 DECL_NAMELESS (t) = 1;
1626 DECL_ARG_TYPE (t) = ptr_type_node;
1627 DECL_CONTEXT (t) = current_function_decl;
1628 TREE_USED (t) = 1;
1629 TREE_READONLY (t) = 1;
1630 DECL_ARGUMENTS (decl) = t;
1631 if (!task_copy)
1632 ctx->receiver_decl = t;
1633 else
1635 t = build_decl (DECL_SOURCE_LOCATION (decl),
1636 PARM_DECL, get_identifier (".omp_data_o"),
1637 ptr_type_node);
1638 DECL_ARTIFICIAL (t) = 1;
1639 DECL_NAMELESS (t) = 1;
1640 DECL_ARG_TYPE (t) = ptr_type_node;
1641 DECL_CONTEXT (t) = current_function_decl;
1642 TREE_USED (t) = 1;
1643 TREE_ADDRESSABLE (t) = 1;
1644 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1645 DECL_ARGUMENTS (decl) = t;
1648 /* Allocate memory for the function structure. The call to
1649 allocate_struct_function clobbers CFUN, so we need to restore
1650 it afterward. */
1651 push_struct_function (decl);
1652 cfun->function_end_locus = gimple_location (ctx->stmt);
1653 init_tree_ssa (cfun);
1654 pop_cfun ();
1657 /* Callback for walk_gimple_seq. Check if combined parallel
1658 contains gimple_omp_for_combined_into_p OMP_FOR. */
1660 tree
1661 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1662 bool *handled_ops_p,
1663 struct walk_stmt_info *wi)
1665 gimple *stmt = gsi_stmt (*gsi_p);
1667 *handled_ops_p = true;
1668 switch (gimple_code (stmt))
1670 WALK_SUBSTMTS;
1672 case GIMPLE_OMP_FOR:
1673 if (gimple_omp_for_combined_into_p (stmt)
1674 && gimple_omp_for_kind (stmt)
1675 == *(const enum gf_mask *) (wi->info))
1677 wi->info = stmt;
1678 return integer_zero_node;
1680 break;
1681 default:
1682 break;
1684 return NULL;
1687 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1689 static void
1690 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1691 omp_context *outer_ctx)
1693 struct walk_stmt_info wi;
1695 memset (&wi, 0, sizeof (wi));
1696 wi.val_only = true;
1697 wi.info = (void *) &msk;
1698 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1699 if (wi.info != (void *) &msk)
1701 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1702 struct omp_for_data fd;
1703 omp_extract_for_data (for_stmt, &fd, NULL);
1704 /* We need two temporaries with fd.loop.v type (istart/iend)
1705 and then (fd.collapse - 1) temporaries with the same
1706 type for count2 ... countN-1 vars if not constant. */
1707 size_t count = 2, i;
1708 tree type = fd.iter_type;
1709 if (fd.collapse > 1
1710 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1712 count += fd.collapse - 1;
1713 /* If there are lastprivate clauses on the inner
1714 GIMPLE_OMP_FOR, add one more temporaries for the total number
1715 of iterations (product of count1 ... countN-1). */
1716 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1717 OMP_CLAUSE_LASTPRIVATE))
1718 count++;
1719 else if (msk == GF_OMP_FOR_KIND_FOR
1720 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1721 OMP_CLAUSE_LASTPRIVATE))
1722 count++;
1724 for (i = 0; i < count; i++)
1726 tree temp = create_tmp_var (type);
1727 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1728 insert_decl_map (&outer_ctx->cb, temp, temp);
1729 OMP_CLAUSE_DECL (c) = temp;
1730 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1731 gimple_omp_taskreg_set_clauses (stmt, c);
1736 /* Scan an OpenMP parallel directive. */
1738 static void
1739 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1741 omp_context *ctx;
1742 tree name;
1743 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1745 /* Ignore parallel directives with empty bodies, unless there
1746 are copyin clauses. */
1747 if (optimize > 0
1748 && empty_body_p (gimple_omp_body (stmt))
1749 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1750 OMP_CLAUSE_COPYIN) == NULL)
1752 gsi_replace (gsi, gimple_build_nop (), false);
1753 return;
1756 if (gimple_omp_parallel_combined_p (stmt))
1757 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1759 ctx = new_omp_context (stmt, outer_ctx);
1760 taskreg_contexts.safe_push (ctx);
1761 if (taskreg_nesting_level > 1)
1762 ctx->is_nested = true;
1763 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1764 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1765 name = create_tmp_var_name (".omp_data_s");
1766 name = build_decl (gimple_location (stmt),
1767 TYPE_DECL, name, ctx->record_type);
1768 DECL_ARTIFICIAL (name) = 1;
1769 DECL_NAMELESS (name) = 1;
1770 TYPE_NAME (ctx->record_type) = name;
1771 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1772 if (!gimple_omp_parallel_grid_phony (stmt))
1774 create_omp_child_function (ctx, false);
1775 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1778 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1779 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1781 if (TYPE_FIELDS (ctx->record_type) == NULL)
1782 ctx->record_type = ctx->receiver_decl = NULL;
1785 /* Scan an OpenMP task directive. */
1787 static void
1788 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1790 omp_context *ctx;
1791 tree name, t;
1792 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1794 /* Ignore task directives with empty bodies, unless they have depend
1795 clause. */
1796 if (optimize > 0
1797 && empty_body_p (gimple_omp_body (stmt))
1798 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1800 gsi_replace (gsi, gimple_build_nop (), false);
1801 return;
1804 if (gimple_omp_task_taskloop_p (stmt))
1805 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1807 ctx = new_omp_context (stmt, outer_ctx);
1808 taskreg_contexts.safe_push (ctx);
1809 if (taskreg_nesting_level > 1)
1810 ctx->is_nested = true;
1811 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1812 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1813 name = create_tmp_var_name (".omp_data_s");
1814 name = build_decl (gimple_location (stmt),
1815 TYPE_DECL, name, ctx->record_type);
1816 DECL_ARTIFICIAL (name) = 1;
1817 DECL_NAMELESS (name) = 1;
1818 TYPE_NAME (ctx->record_type) = name;
1819 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1820 create_omp_child_function (ctx, false);
1821 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1823 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1825 if (ctx->srecord_type)
1827 name = create_tmp_var_name (".omp_data_a");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->srecord_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->srecord_type) = name;
1833 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1834 create_omp_child_function (ctx, true);
1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1841 ctx->record_type = ctx->receiver_decl = NULL;
1842 t = build_int_cst (long_integer_type_node, 0);
1843 gimple_omp_task_set_arg_size (stmt, t);
1844 t = build_int_cst (long_integer_type_node, 1);
1845 gimple_omp_task_set_arg_align (stmt, t);
1849 /* Helper function for finish_taskreg_scan, called through walk_tree.
1850 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1851 tree, replace it in the expression. */
1853 static tree
1854 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1856 if (VAR_P (*tp))
1858 omp_context *ctx = (omp_context *) data;
1859 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1860 if (t != *tp)
1862 if (DECL_HAS_VALUE_EXPR_P (t))
1863 t = unshare_expr (DECL_VALUE_EXPR (t));
1864 *tp = t;
1866 *walk_subtrees = 0;
1868 else if (IS_TYPE_OR_DECL_P (*tp))
1869 *walk_subtrees = 0;
1870 return NULL_TREE;
1873 /* If any decls have been made addressable during scan_omp,
1874 adjust their fields if needed, and layout record types
1875 of parallel/task constructs. */
1877 static void
1878 finish_taskreg_scan (omp_context *ctx)
1880 if (ctx->record_type == NULL_TREE)
1881 return;
1883 /* If any task_shared_vars were needed, verify all
1884 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1885 statements if use_pointer_for_field hasn't changed
1886 because of that. If it did, update field types now. */
1887 if (task_shared_vars)
1889 tree c;
1891 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1892 c; c = OMP_CLAUSE_CHAIN (c))
1893 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1894 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1896 tree decl = OMP_CLAUSE_DECL (c);
1898 /* Global variables don't need to be copied,
1899 the receiver side will use them directly. */
1900 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1901 continue;
1902 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1903 || !use_pointer_for_field (decl, ctx))
1904 continue;
1905 tree field = lookup_field (decl, ctx);
1906 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1907 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1908 continue;
1909 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1910 TREE_THIS_VOLATILE (field) = 0;
1911 DECL_USER_ALIGN (field) = 0;
1912 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1913 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1914 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1915 if (ctx->srecord_type)
1917 tree sfield = lookup_sfield (decl, ctx);
1918 TREE_TYPE (sfield) = TREE_TYPE (field);
1919 TREE_THIS_VOLATILE (sfield) = 0;
1920 DECL_USER_ALIGN (sfield) = 0;
1921 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1922 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1923 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1928 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1930 layout_type (ctx->record_type);
1931 fixup_child_record_type (ctx);
1933 else
1935 location_t loc = gimple_location (ctx->stmt);
1936 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1937 /* Move VLA fields to the end. */
1938 p = &TYPE_FIELDS (ctx->record_type);
1939 while (*p)
1940 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1941 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1943 *q = *p;
1944 *p = TREE_CHAIN (*p);
1945 TREE_CHAIN (*q) = NULL_TREE;
1946 q = &TREE_CHAIN (*q);
1948 else
1949 p = &DECL_CHAIN (*p);
1950 *p = vla_fields;
1951 if (gimple_omp_task_taskloop_p (ctx->stmt))
1953 /* Move fields corresponding to first and second _looptemp_
1954 clause first. There are filled by GOMP_taskloop
1955 and thus need to be in specific positions. */
1956 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1957 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1958 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1959 OMP_CLAUSE__LOOPTEMP_);
1960 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1961 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1962 p = &TYPE_FIELDS (ctx->record_type);
1963 while (*p)
1964 if (*p == f1 || *p == f2)
1965 *p = DECL_CHAIN (*p);
1966 else
1967 p = &DECL_CHAIN (*p);
1968 DECL_CHAIN (f1) = f2;
1969 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1970 TYPE_FIELDS (ctx->record_type) = f1;
1971 if (ctx->srecord_type)
1973 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1974 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1975 p = &TYPE_FIELDS (ctx->srecord_type);
1976 while (*p)
1977 if (*p == f1 || *p == f2)
1978 *p = DECL_CHAIN (*p);
1979 else
1980 p = &DECL_CHAIN (*p);
1981 DECL_CHAIN (f1) = f2;
1982 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1983 TYPE_FIELDS (ctx->srecord_type) = f1;
1986 layout_type (ctx->record_type);
1987 fixup_child_record_type (ctx);
1988 if (ctx->srecord_type)
1989 layout_type (ctx->srecord_type);
1990 tree t = fold_convert_loc (loc, long_integer_type_node,
1991 TYPE_SIZE_UNIT (ctx->record_type));
1992 if (TREE_CODE (t) != INTEGER_CST)
1994 t = unshare_expr (t);
1995 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
1997 gimple_omp_task_set_arg_size (ctx->stmt, t);
1998 t = build_int_cst (long_integer_type_node,
1999 TYPE_ALIGN_UNIT (ctx->record_type));
2000 gimple_omp_task_set_arg_align (ctx->stmt, t);
2004 /* Find the enclosing offload context. */
2006 static omp_context *
2007 enclosing_target_ctx (omp_context *ctx)
2009 for (; ctx; ctx = ctx->outer)
2010 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2011 break;
2013 return ctx;
2016 /* Return true if ctx is part of an oacc kernels region. */
2018 static bool
2019 ctx_in_oacc_kernels_region (omp_context *ctx)
2021 for (;ctx != NULL; ctx = ctx->outer)
2023 gimple *stmt = ctx->stmt;
2024 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2025 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2026 return true;
2029 return false;
2032 /* Check the parallelism clauses inside a kernels regions.
2033 Until kernels handling moves to use the same loop indirection
2034 scheme as parallel, we need to do this checking early. */
2036 static unsigned
2037 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2039 bool checking = true;
2040 unsigned outer_mask = 0;
2041 unsigned this_mask = 0;
2042 bool has_seq = false, has_auto = false;
2044 if (ctx->outer)
2045 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2046 if (!stmt)
2048 checking = false;
2049 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2050 return outer_mask;
2051 stmt = as_a <gomp_for *> (ctx->stmt);
2054 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2056 switch (OMP_CLAUSE_CODE (c))
2058 case OMP_CLAUSE_GANG:
2059 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2060 break;
2061 case OMP_CLAUSE_WORKER:
2062 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2063 break;
2064 case OMP_CLAUSE_VECTOR:
2065 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2066 break;
2067 case OMP_CLAUSE_SEQ:
2068 has_seq = true;
2069 break;
2070 case OMP_CLAUSE_AUTO:
2071 has_auto = true;
2072 break;
2073 default:
2074 break;
2078 if (checking)
2080 if (has_seq && (this_mask || has_auto))
2081 error_at (gimple_location (stmt), "%<seq%> overrides other"
2082 " OpenACC loop specifiers");
2083 else if (has_auto && this_mask)
2084 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2085 " OpenACC loop specifiers");
2087 if (this_mask & outer_mask)
2088 error_at (gimple_location (stmt), "inner loop uses same"
2089 " OpenACC parallelism as containing loop");
2092 return outer_mask | this_mask;
2095 /* Scan a GIMPLE_OMP_FOR. */
2097 static omp_context *
2098 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2100 omp_context *ctx;
2101 size_t i;
2102 tree clauses = gimple_omp_for_clauses (stmt);
2104 ctx = new_omp_context (stmt, outer_ctx);
2106 if (is_gimple_omp_oacc (stmt))
2108 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2110 if (!tgt || is_oacc_parallel (tgt))
2111 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2113 char const *check = NULL;
2115 switch (OMP_CLAUSE_CODE (c))
2117 case OMP_CLAUSE_GANG:
2118 check = "gang";
2119 break;
2121 case OMP_CLAUSE_WORKER:
2122 check = "worker";
2123 break;
2125 case OMP_CLAUSE_VECTOR:
2126 check = "vector";
2127 break;
2129 default:
2130 break;
2133 if (check && OMP_CLAUSE_OPERAND (c, 0))
2134 error_at (gimple_location (stmt),
2135 "argument not permitted on %qs clause in"
2136 " OpenACC %<parallel%>", check);
2139 if (tgt && is_oacc_kernels (tgt))
2141 /* Strip out reductions, as they are not handled yet. */
2142 tree *prev_ptr = &clauses;
2144 while (tree probe = *prev_ptr)
2146 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2148 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2149 *prev_ptr = *next_ptr;
2150 else
2151 prev_ptr = next_ptr;
2154 gimple_omp_for_set_clauses (stmt, clauses);
2155 check_oacc_kernel_gwv (stmt, ctx);
2159 scan_sharing_clauses (clauses, ctx);
2161 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2162 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2164 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2165 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2166 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2167 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2169 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2170 return ctx;
2173 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2175 static void
2176 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2177 omp_context *outer_ctx)
2179 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2180 gsi_replace (gsi, bind, false);
2181 gimple_seq seq = NULL;
2182 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2183 tree cond = create_tmp_var_raw (integer_type_node);
2184 DECL_CONTEXT (cond) = current_function_decl;
2185 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2186 gimple_bind_set_vars (bind, cond);
2187 gimple_call_set_lhs (g, cond);
2188 gimple_seq_add_stmt (&seq, g);
2189 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2190 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2191 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2192 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2193 gimple_seq_add_stmt (&seq, g);
2194 g = gimple_build_label (lab1);
2195 gimple_seq_add_stmt (&seq, g);
2196 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2197 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2198 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2199 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2200 gimple_omp_for_set_clauses (new_stmt, clause);
2201 gimple_seq_add_stmt (&seq, new_stmt);
2202 g = gimple_build_goto (lab3);
2203 gimple_seq_add_stmt (&seq, g);
2204 g = gimple_build_label (lab2);
2205 gimple_seq_add_stmt (&seq, g);
2206 gimple_seq_add_stmt (&seq, stmt);
2207 g = gimple_build_label (lab3);
2208 gimple_seq_add_stmt (&seq, g);
2209 gimple_bind_set_body (bind, seq);
2210 update_stmt (bind);
2211 scan_omp_for (new_stmt, outer_ctx);
2212 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2215 /* Scan an OpenMP sections directive. */
2217 static void
2218 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2220 omp_context *ctx;
2222 ctx = new_omp_context (stmt, outer_ctx);
2223 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2224 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2227 /* Scan an OpenMP single directive. */
2229 static void
2230 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2232 omp_context *ctx;
2233 tree name;
2235 ctx = new_omp_context (stmt, outer_ctx);
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_copy_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 TYPE_NAME (ctx->record_type) = name;
2243 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2244 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2246 if (TYPE_FIELDS (ctx->record_type) == NULL)
2247 ctx->record_type = NULL;
2248 else
2249 layout_type (ctx->record_type);
2252 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2253 used in the corresponding offloaded function are restrict. */
2255 static bool
2256 omp_target_base_pointers_restrict_p (tree clauses)
2258 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2259 used by OpenACC. */
2260 if (flag_openacc == 0)
2261 return false;
2263 /* I. Basic example:
2265 void foo (void)
2267 unsigned int a[2], b[2];
2269 #pragma acc kernels \
2270 copyout (a) \
2271 copyout (b)
2273 a[0] = 0;
2274 b[0] = 1;
2278 After gimplification, we have:
2280 #pragma omp target oacc_kernels \
2281 map(force_from:a [len: 8]) \
2282 map(force_from:b [len: 8])
2284 a[0] = 0;
2285 b[0] = 1;
2288 Because both mappings have the force prefix, we know that they will be
2289 allocated when calling the corresponding offloaded function, which means we
2290 can mark the base pointers for a and b in the offloaded function as
2291 restrict. */
2293 tree c;
2294 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2296 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2297 return false;
2299 switch (OMP_CLAUSE_MAP_KIND (c))
2301 case GOMP_MAP_FORCE_ALLOC:
2302 case GOMP_MAP_FORCE_TO:
2303 case GOMP_MAP_FORCE_FROM:
2304 case GOMP_MAP_FORCE_TOFROM:
2305 break;
2306 default:
2307 return false;
2311 return true;
2314 /* Scan a GIMPLE_OMP_TARGET. */
2316 static void
2317 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2319 omp_context *ctx;
2320 tree name;
2321 bool offloaded = is_gimple_omp_offloaded (stmt);
2322 tree clauses = gimple_omp_target_clauses (stmt);
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2326 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2327 name = create_tmp_var_name (".omp_data_t");
2328 name = build_decl (gimple_location (stmt),
2329 TYPE_DECL, name, ctx->record_type);
2330 DECL_ARTIFICIAL (name) = 1;
2331 DECL_NAMELESS (name) = 1;
2332 TYPE_NAME (ctx->record_type) = name;
2333 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2335 bool base_pointers_restrict = false;
2336 if (offloaded)
2338 create_omp_child_function (ctx, false);
2339 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2341 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2342 if (base_pointers_restrict
2343 && dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file,
2345 "Base pointers in offloaded function are restrict\n");
2348 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2349 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2351 if (TYPE_FIELDS (ctx->record_type) == NULL)
2352 ctx->record_type = ctx->receiver_decl = NULL;
2353 else
2355 TYPE_FIELDS (ctx->record_type)
2356 = nreverse (TYPE_FIELDS (ctx->record_type));
2357 if (flag_checking)
2359 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2360 for (tree field = TYPE_FIELDS (ctx->record_type);
2361 field;
2362 field = DECL_CHAIN (field))
2363 gcc_assert (DECL_ALIGN (field) == align);
2365 layout_type (ctx->record_type);
2366 if (offloaded)
2367 fixup_child_record_type (ctx);
2371 /* Scan an OpenMP teams directive. */
2373 static void
2374 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2376 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2377 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2381 /* Check nesting restrictions. */
2382 static bool
2383 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2385 tree c;
2387 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2388 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2389 the original copy of its contents. */
2390 return true;
2392 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2393 inside an OpenACC CTX. */
2394 if (!(is_gimple_omp (stmt)
2395 && is_gimple_omp_oacc (stmt))
2396 /* Except for atomic codes that we share with OpenMP. */
2397 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2398 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2400 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2402 error_at (gimple_location (stmt),
2403 "non-OpenACC construct inside of OpenACC routine");
2404 return false;
2406 else
2407 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2408 if (is_gimple_omp (octx->stmt)
2409 && is_gimple_omp_oacc (octx->stmt))
2411 error_at (gimple_location (stmt),
2412 "non-OpenACC construct inside of OpenACC region");
2413 return false;
2417 if (ctx != NULL)
2419 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2420 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2422 c = NULL_TREE;
2423 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2425 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2426 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2428 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2429 && (ctx->outer == NULL
2430 || !gimple_omp_for_combined_into_p (ctx->stmt)
2431 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2432 || (gimple_omp_for_kind (ctx->outer->stmt)
2433 != GF_OMP_FOR_KIND_FOR)
2434 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2436 error_at (gimple_location (stmt),
2437 "%<ordered simd threads%> must be closely "
2438 "nested inside of %<for simd%> region");
2439 return false;
2441 return true;
2444 error_at (gimple_location (stmt),
2445 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2446 " may not be nested inside %<simd%> region");
2447 return false;
2449 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2451 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2452 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2453 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2454 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2456 error_at (gimple_location (stmt),
2457 "only %<distribute%> or %<parallel%> regions are "
2458 "allowed to be strictly nested inside %<teams%> "
2459 "region");
2460 return false;
2464 switch (gimple_code (stmt))
2466 case GIMPLE_OMP_FOR:
2467 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2468 return true;
2469 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2471 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2473 error_at (gimple_location (stmt),
2474 "%<distribute%> region must be strictly nested "
2475 "inside %<teams%> construct");
2476 return false;
2478 return true;
2480 /* We split taskloop into task and nested taskloop in it. */
2481 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2482 return true;
2483 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2485 bool ok = false;
2487 if (ctx)
2488 switch (gimple_code (ctx->stmt))
2490 case GIMPLE_OMP_FOR:
2491 ok = (gimple_omp_for_kind (ctx->stmt)
2492 == GF_OMP_FOR_KIND_OACC_LOOP);
2493 break;
2495 case GIMPLE_OMP_TARGET:
2496 switch (gimple_omp_target_kind (ctx->stmt))
2498 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2499 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2500 ok = true;
2501 break;
2503 default:
2504 break;
2507 default:
2508 break;
2510 else if (oacc_get_fn_attrib (current_function_decl))
2511 ok = true;
2512 if (!ok)
2514 error_at (gimple_location (stmt),
2515 "OpenACC loop directive must be associated with"
2516 " an OpenACC compute region");
2517 return false;
2520 /* FALLTHRU */
2521 case GIMPLE_CALL:
2522 if (is_gimple_call (stmt)
2523 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2524 == BUILT_IN_GOMP_CANCEL
2525 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2526 == BUILT_IN_GOMP_CANCELLATION_POINT))
2528 const char *bad = NULL;
2529 const char *kind = NULL;
2530 const char *construct
2531 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2532 == BUILT_IN_GOMP_CANCEL)
2533 ? "#pragma omp cancel"
2534 : "#pragma omp cancellation point";
2535 if (ctx == NULL)
2537 error_at (gimple_location (stmt), "orphaned %qs construct",
2538 construct);
2539 return false;
2541 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2542 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2543 : 0)
2545 case 1:
2546 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2547 bad = "#pragma omp parallel";
2548 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 == BUILT_IN_GOMP_CANCEL
2550 && !integer_zerop (gimple_call_arg (stmt, 1)))
2551 ctx->cancellable = true;
2552 kind = "parallel";
2553 break;
2554 case 2:
2555 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2556 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2557 bad = "#pragma omp for";
2558 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 == BUILT_IN_GOMP_CANCEL
2560 && !integer_zerop (gimple_call_arg (stmt, 1)))
2562 ctx->cancellable = true;
2563 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2564 OMP_CLAUSE_NOWAIT))
2565 warning_at (gimple_location (stmt), 0,
2566 "%<#pragma omp cancel for%> inside "
2567 "%<nowait%> for construct");
2568 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2569 OMP_CLAUSE_ORDERED))
2570 warning_at (gimple_location (stmt), 0,
2571 "%<#pragma omp cancel for%> inside "
2572 "%<ordered%> for construct");
2574 kind = "for";
2575 break;
2576 case 4:
2577 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2578 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2579 bad = "#pragma omp sections";
2580 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2581 == BUILT_IN_GOMP_CANCEL
2582 && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2586 ctx->cancellable = true;
2587 if (omp_find_clause (gimple_omp_sections_clauses
2588 (ctx->stmt),
2589 OMP_CLAUSE_NOWAIT))
2590 warning_at (gimple_location (stmt), 0,
2591 "%<#pragma omp cancel sections%> inside "
2592 "%<nowait%> sections construct");
2594 else
2596 gcc_assert (ctx->outer
2597 && gimple_code (ctx->outer->stmt)
2598 == GIMPLE_OMP_SECTIONS);
2599 ctx->outer->cancellable = true;
2600 if (omp_find_clause (gimple_omp_sections_clauses
2601 (ctx->outer->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel sections%> inside "
2605 "%<nowait%> sections construct");
2608 kind = "sections";
2609 break;
2610 case 8:
2611 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2612 bad = "#pragma omp task";
2613 else
2615 for (omp_context *octx = ctx->outer;
2616 octx; octx = octx->outer)
2618 switch (gimple_code (octx->stmt))
2620 case GIMPLE_OMP_TASKGROUP:
2621 break;
2622 case GIMPLE_OMP_TARGET:
2623 if (gimple_omp_target_kind (octx->stmt)
2624 != GF_OMP_TARGET_KIND_REGION)
2625 continue;
2626 /* FALLTHRU */
2627 case GIMPLE_OMP_PARALLEL:
2628 case GIMPLE_OMP_TEAMS:
2629 error_at (gimple_location (stmt),
2630 "%<%s taskgroup%> construct not closely "
2631 "nested inside of %<taskgroup%> region",
2632 construct);
2633 return false;
2634 default:
2635 continue;
2637 break;
2639 ctx->cancellable = true;
2641 kind = "taskgroup";
2642 break;
2643 default:
2644 error_at (gimple_location (stmt), "invalid arguments");
2645 return false;
2647 if (bad)
2649 error_at (gimple_location (stmt),
2650 "%<%s %s%> construct not closely nested inside of %qs",
2651 construct, kind, bad);
2652 return false;
2655 /* FALLTHRU */
2656 case GIMPLE_OMP_SECTIONS:
2657 case GIMPLE_OMP_SINGLE:
2658 for (; ctx != NULL; ctx = ctx->outer)
2659 switch (gimple_code (ctx->stmt))
2661 case GIMPLE_OMP_FOR:
2662 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2663 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2664 break;
2665 /* FALLTHRU */
2666 case GIMPLE_OMP_SECTIONS:
2667 case GIMPLE_OMP_SINGLE:
2668 case GIMPLE_OMP_ORDERED:
2669 case GIMPLE_OMP_MASTER:
2670 case GIMPLE_OMP_TASK:
2671 case GIMPLE_OMP_CRITICAL:
2672 if (is_gimple_call (stmt))
2674 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2675 != BUILT_IN_GOMP_BARRIER)
2676 return true;
2677 error_at (gimple_location (stmt),
2678 "barrier region may not be closely nested inside "
2679 "of work-sharing, %<critical%>, %<ordered%>, "
2680 "%<master%>, explicit %<task%> or %<taskloop%> "
2681 "region");
2682 return false;
2684 error_at (gimple_location (stmt),
2685 "work-sharing region may not be closely nested inside "
2686 "of work-sharing, %<critical%>, %<ordered%>, "
2687 "%<master%>, explicit %<task%> or %<taskloop%> region");
2688 return false;
2689 case GIMPLE_OMP_PARALLEL:
2690 case GIMPLE_OMP_TEAMS:
2691 return true;
2692 case GIMPLE_OMP_TARGET:
2693 if (gimple_omp_target_kind (ctx->stmt)
2694 == GF_OMP_TARGET_KIND_REGION)
2695 return true;
2696 break;
2697 default:
2698 break;
2700 break;
2701 case GIMPLE_OMP_MASTER:
2702 for (; ctx != NULL; ctx = ctx->outer)
2703 switch (gimple_code (ctx->stmt))
2705 case GIMPLE_OMP_FOR:
2706 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2707 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2708 break;
2709 /* FALLTHRU */
2710 case GIMPLE_OMP_SECTIONS:
2711 case GIMPLE_OMP_SINGLE:
2712 case GIMPLE_OMP_TASK:
2713 error_at (gimple_location (stmt),
2714 "%<master%> region may not be closely nested inside "
2715 "of work-sharing, explicit %<task%> or %<taskloop%> "
2716 "region");
2717 return false;
2718 case GIMPLE_OMP_PARALLEL:
2719 case GIMPLE_OMP_TEAMS:
2720 return true;
2721 case GIMPLE_OMP_TARGET:
2722 if (gimple_omp_target_kind (ctx->stmt)
2723 == GF_OMP_TARGET_KIND_REGION)
2724 return true;
2725 break;
2726 default:
2727 break;
2729 break;
2730 case GIMPLE_OMP_TASK:
2731 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2733 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2734 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2736 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2737 error_at (OMP_CLAUSE_LOCATION (c),
2738 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2739 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2740 return false;
2742 break;
2743 case GIMPLE_OMP_ORDERED:
2744 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2745 c; c = OMP_CLAUSE_CHAIN (c))
2747 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2749 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2750 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2751 continue;
2753 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2755 || kind == OMP_CLAUSE_DEPEND_SINK)
2757 tree oclause;
2758 /* Look for containing ordered(N) loop. */
2759 if (ctx == NULL
2760 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2761 || (oclause
2762 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2763 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2765 error_at (OMP_CLAUSE_LOCATION (c),
2766 "%<ordered%> construct with %<depend%> clause "
2767 "must be closely nested inside an %<ordered%> "
2768 "loop");
2769 return false;
2771 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2773 error_at (OMP_CLAUSE_LOCATION (c),
2774 "%<ordered%> construct with %<depend%> clause "
2775 "must be closely nested inside a loop with "
2776 "%<ordered%> clause with a parameter");
2777 return false;
2780 else
2782 error_at (OMP_CLAUSE_LOCATION (c),
2783 "invalid depend kind in omp %<ordered%> %<depend%>");
2784 return false;
2787 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2788 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2790 /* ordered simd must be closely nested inside of simd region,
2791 and simd region must not encounter constructs other than
2792 ordered simd, therefore ordered simd may be either orphaned,
2793 or ctx->stmt must be simd. The latter case is handled already
2794 earlier. */
2795 if (ctx != NULL)
2797 error_at (gimple_location (stmt),
2798 "%<ordered%> %<simd%> must be closely nested inside "
2799 "%<simd%> region");
2800 return false;
2803 for (; ctx != NULL; ctx = ctx->outer)
2804 switch (gimple_code (ctx->stmt))
2806 case GIMPLE_OMP_CRITICAL:
2807 case GIMPLE_OMP_TASK:
2808 case GIMPLE_OMP_ORDERED:
2809 ordered_in_taskloop:
2810 error_at (gimple_location (stmt),
2811 "%<ordered%> region may not be closely nested inside "
2812 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2813 "%<taskloop%> region");
2814 return false;
2815 case GIMPLE_OMP_FOR:
2816 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2817 goto ordered_in_taskloop;
2818 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2819 OMP_CLAUSE_ORDERED) == NULL)
2821 error_at (gimple_location (stmt),
2822 "%<ordered%> region must be closely nested inside "
2823 "a loop region with an %<ordered%> clause");
2824 return false;
2826 return true;
2827 case GIMPLE_OMP_TARGET:
2828 if (gimple_omp_target_kind (ctx->stmt)
2829 != GF_OMP_TARGET_KIND_REGION)
2830 break;
2831 /* FALLTHRU */
2832 case GIMPLE_OMP_PARALLEL:
2833 case GIMPLE_OMP_TEAMS:
2834 error_at (gimple_location (stmt),
2835 "%<ordered%> region must be closely nested inside "
2836 "a loop region with an %<ordered%> clause");
2837 return false;
2838 default:
2839 break;
2841 break;
2842 case GIMPLE_OMP_CRITICAL:
2844 tree this_stmt_name
2845 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2846 for (; ctx != NULL; ctx = ctx->outer)
2847 if (gomp_critical *other_crit
2848 = dyn_cast <gomp_critical *> (ctx->stmt))
2849 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2851 error_at (gimple_location (stmt),
2852 "%<critical%> region may not be nested inside "
2853 "a %<critical%> region with the same name");
2854 return false;
2857 break;
2858 case GIMPLE_OMP_TEAMS:
2859 if (ctx == NULL
2860 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2861 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2863 error_at (gimple_location (stmt),
2864 "%<teams%> construct not closely nested inside of "
2865 "%<target%> construct");
2866 return false;
2868 break;
2869 case GIMPLE_OMP_TARGET:
2870 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2871 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2872 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2873 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2875 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2876 error_at (OMP_CLAUSE_LOCATION (c),
2877 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2878 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2879 return false;
2881 if (is_gimple_omp_offloaded (stmt)
2882 && oacc_get_fn_attrib (cfun->decl) != NULL)
2884 error_at (gimple_location (stmt),
2885 "OpenACC region inside of OpenACC routine, nested "
2886 "parallelism not supported yet");
2887 return false;
2889 for (; ctx != NULL; ctx = ctx->outer)
2891 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2893 if (is_gimple_omp (stmt)
2894 && is_gimple_omp_oacc (stmt)
2895 && is_gimple_omp (ctx->stmt))
2897 error_at (gimple_location (stmt),
2898 "OpenACC construct inside of non-OpenACC region");
2899 return false;
2901 continue;
2904 const char *stmt_name, *ctx_stmt_name;
2905 switch (gimple_omp_target_kind (stmt))
2907 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2908 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2909 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2910 case GF_OMP_TARGET_KIND_ENTER_DATA:
2911 stmt_name = "target enter data"; break;
2912 case GF_OMP_TARGET_KIND_EXIT_DATA:
2913 stmt_name = "target exit data"; break;
2914 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2915 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2916 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2917 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2918 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2919 stmt_name = "enter/exit data"; break;
2920 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2921 break;
2922 default: gcc_unreachable ();
2924 switch (gimple_omp_target_kind (ctx->stmt))
2926 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2927 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2928 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2929 ctx_stmt_name = "parallel"; break;
2930 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2931 ctx_stmt_name = "kernels"; break;
2932 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2933 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2934 ctx_stmt_name = "host_data"; break;
2935 default: gcc_unreachable ();
2938 /* OpenACC/OpenMP mismatch? */
2939 if (is_gimple_omp_oacc (stmt)
2940 != is_gimple_omp_oacc (ctx->stmt))
2942 error_at (gimple_location (stmt),
2943 "%s %qs construct inside of %s %qs region",
2944 (is_gimple_omp_oacc (stmt)
2945 ? "OpenACC" : "OpenMP"), stmt_name,
2946 (is_gimple_omp_oacc (ctx->stmt)
2947 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2948 return false;
2950 if (is_gimple_omp_offloaded (ctx->stmt))
2952 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2953 if (is_gimple_omp_oacc (ctx->stmt))
2955 error_at (gimple_location (stmt),
2956 "%qs construct inside of %qs region",
2957 stmt_name, ctx_stmt_name);
2958 return false;
2960 else
2962 warning_at (gimple_location (stmt), 0,
2963 "%qs construct inside of %qs region",
2964 stmt_name, ctx_stmt_name);
2968 break;
2969 default:
2970 break;
2972 return true;
2976 /* Helper function scan_omp.
2978 Callback for walk_tree or operators in walk_gimple_stmt used to
2979 scan for OMP directives in TP. */
2981 static tree
2982 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2984 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2985 omp_context *ctx = (omp_context *) wi->info;
2986 tree t = *tp;
2988 switch (TREE_CODE (t))
2990 case VAR_DECL:
2991 case PARM_DECL:
2992 case LABEL_DECL:
2993 case RESULT_DECL:
2994 if (ctx)
2996 tree repl = remap_decl (t, &ctx->cb);
2997 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2998 *tp = repl;
3000 break;
3002 default:
3003 if (ctx && TYPE_P (t))
3004 *tp = remap_type (t, &ctx->cb);
3005 else if (!DECL_P (t))
3007 *walk_subtrees = 1;
3008 if (ctx)
3010 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3011 if (tem != TREE_TYPE (t))
3013 if (TREE_CODE (t) == INTEGER_CST)
3014 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3015 else
3016 TREE_TYPE (t) = tem;
3020 break;
3023 return NULL_TREE;
3026 /* Return true if FNDECL is a setjmp or a longjmp. */
3028 static bool
3029 setjmp_or_longjmp_p (const_tree fndecl)
3031 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3032 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3033 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3034 return true;
3036 tree declname = DECL_NAME (fndecl);
3037 if (!declname)
3038 return false;
3039 const char *name = IDENTIFIER_POINTER (declname);
3040 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3044 /* Helper function for scan_omp.
3046 Callback for walk_gimple_stmt used to scan for OMP directives in
3047 the current statement in GSI. */
3049 static tree
3050 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3051 struct walk_stmt_info *wi)
3053 gimple *stmt = gsi_stmt (*gsi);
3054 omp_context *ctx = (omp_context *) wi->info;
3056 if (gimple_has_location (stmt))
3057 input_location = gimple_location (stmt);
3059 /* Check the nesting restrictions. */
3060 bool remove = false;
3061 if (is_gimple_omp (stmt))
3062 remove = !check_omp_nesting_restrictions (stmt, ctx);
3063 else if (is_gimple_call (stmt))
3065 tree fndecl = gimple_call_fndecl (stmt);
3066 if (fndecl)
3068 if (setjmp_or_longjmp_p (fndecl)
3069 && ctx
3070 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3071 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3073 remove = true;
3074 error_at (gimple_location (stmt),
3075 "setjmp/longjmp inside simd construct");
3077 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3078 switch (DECL_FUNCTION_CODE (fndecl))
3080 case BUILT_IN_GOMP_BARRIER:
3081 case BUILT_IN_GOMP_CANCEL:
3082 case BUILT_IN_GOMP_CANCELLATION_POINT:
3083 case BUILT_IN_GOMP_TASKYIELD:
3084 case BUILT_IN_GOMP_TASKWAIT:
3085 case BUILT_IN_GOMP_TASKGROUP_START:
3086 case BUILT_IN_GOMP_TASKGROUP_END:
3087 remove = !check_omp_nesting_restrictions (stmt, ctx);
3088 break;
3089 default:
3090 break;
3094 if (remove)
3096 stmt = gimple_build_nop ();
3097 gsi_replace (gsi, stmt, false);
3100 *handled_ops_p = true;
3102 switch (gimple_code (stmt))
3104 case GIMPLE_OMP_PARALLEL:
3105 taskreg_nesting_level++;
3106 scan_omp_parallel (gsi, ctx);
3107 taskreg_nesting_level--;
3108 break;
3110 case GIMPLE_OMP_TASK:
3111 taskreg_nesting_level++;
3112 scan_omp_task (gsi, ctx);
3113 taskreg_nesting_level--;
3114 break;
3116 case GIMPLE_OMP_FOR:
3117 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3118 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3119 && omp_maybe_offloaded_ctx (ctx)
3120 && omp_max_simt_vf ())
3121 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3122 else
3123 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3124 break;
3126 case GIMPLE_OMP_SECTIONS:
3127 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3128 break;
3130 case GIMPLE_OMP_SINGLE:
3131 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3132 break;
3134 case GIMPLE_OMP_SECTION:
3135 case GIMPLE_OMP_MASTER:
3136 case GIMPLE_OMP_TASKGROUP:
3137 case GIMPLE_OMP_ORDERED:
3138 case GIMPLE_OMP_CRITICAL:
3139 case GIMPLE_OMP_GRID_BODY:
3140 ctx = new_omp_context (stmt, ctx);
3141 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3142 break;
3144 case GIMPLE_OMP_TARGET:
3145 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3146 break;
3148 case GIMPLE_OMP_TEAMS:
3149 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3150 break;
3152 case GIMPLE_BIND:
3154 tree var;
3156 *handled_ops_p = false;
3157 if (ctx)
3158 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3159 var ;
3160 var = DECL_CHAIN (var))
3161 insert_decl_map (&ctx->cb, var, var);
3163 break;
3164 default:
3165 *handled_ops_p = false;
3166 break;
3169 return NULL_TREE;
3173 /* Scan all the statements starting at the current statement. CTX
3174 contains context information about the OMP directives and
3175 clauses found during the scan. */
3177 static void
3178 scan_omp (gimple_seq *body_p, omp_context *ctx)
3180 location_t saved_location;
3181 struct walk_stmt_info wi;
3183 memset (&wi, 0, sizeof (wi));
3184 wi.info = ctx;
3185 wi.want_locations = true;
3187 saved_location = input_location;
3188 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3189 input_location = saved_location;
3192 /* Re-gimplification and code generation routines. */
3194 /* If a context was created for STMT when it was scanned, return it. */
3196 static omp_context *
3197 maybe_lookup_ctx (gimple *stmt)
3199 splay_tree_node n;
3200 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3201 return n ? (omp_context *) n->value : NULL;
3205 /* Find the mapping for DECL in CTX or the immediately enclosing
3206 context that has a mapping for DECL.
3208 If CTX is a nested parallel directive, we may have to use the decl
3209 mappings created in CTX's parent context. Suppose that we have the
3210 following parallel nesting (variable UIDs showed for clarity):
3212 iD.1562 = 0;
3213 #omp parallel shared(iD.1562) -> outer parallel
3214 iD.1562 = iD.1562 + 1;
3216 #omp parallel shared (iD.1562) -> inner parallel
3217 iD.1562 = iD.1562 - 1;
3219 Each parallel structure will create a distinct .omp_data_s structure
3220 for copying iD.1562 in/out of the directive:
3222 outer parallel .omp_data_s.1.i -> iD.1562
3223 inner parallel .omp_data_s.2.i -> iD.1562
3225 A shared variable mapping will produce a copy-out operation before
3226 the parallel directive and a copy-in operation after it. So, in
3227 this case we would have:
3229 iD.1562 = 0;
3230 .omp_data_o.1.i = iD.1562;
3231 #omp parallel shared(iD.1562) -> outer parallel
3232 .omp_data_i.1 = &.omp_data_o.1
3233 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3235 .omp_data_o.2.i = iD.1562; -> **
3236 #omp parallel shared(iD.1562) -> inner parallel
3237 .omp_data_i.2 = &.omp_data_o.2
3238 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3241 ** This is a problem. The symbol iD.1562 cannot be referenced
3242 inside the body of the outer parallel region. But since we are
3243 emitting this copy operation while expanding the inner parallel
3244 directive, we need to access the CTX structure of the outer
3245 parallel directive to get the correct mapping:
3247 .omp_data_o.2.i = .omp_data_i.1->i
3249 Since there may be other workshare or parallel directives enclosing
3250 the parallel directive, it may be necessary to walk up the context
3251 parent chain. This is not a problem in general because nested
3252 parallelism happens only rarely. */
3254 static tree
3255 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3257 tree t;
3258 omp_context *up;
3260 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3261 t = maybe_lookup_decl (decl, up);
3263 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3265 return t ? t : decl;
3269 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3270 in outer contexts. */
3272 static tree
3273 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3275 tree t = NULL;
3276 omp_context *up;
3278 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3279 t = maybe_lookup_decl (decl, up);
3281 return t ? t : decl;
3285 /* Construct the initialization value for reduction operation OP. */
3287 tree
3288 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3290 switch (op)
3292 case PLUS_EXPR:
3293 case MINUS_EXPR:
3294 case BIT_IOR_EXPR:
3295 case BIT_XOR_EXPR:
3296 case TRUTH_OR_EXPR:
3297 case TRUTH_ORIF_EXPR:
3298 case TRUTH_XOR_EXPR:
3299 case NE_EXPR:
3300 return build_zero_cst (type);
3302 case MULT_EXPR:
3303 case TRUTH_AND_EXPR:
3304 case TRUTH_ANDIF_EXPR:
3305 case EQ_EXPR:
3306 return fold_convert_loc (loc, type, integer_one_node);
3308 case BIT_AND_EXPR:
3309 return fold_convert_loc (loc, type, integer_minus_one_node);
3311 case MAX_EXPR:
3312 if (SCALAR_FLOAT_TYPE_P (type))
3314 REAL_VALUE_TYPE max, min;
3315 if (HONOR_INFINITIES (type))
3317 real_inf (&max);
3318 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3320 else
3321 real_maxval (&min, 1, TYPE_MODE (type));
3322 return build_real (type, min);
3324 else if (POINTER_TYPE_P (type))
3326 wide_int min
3327 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3328 return wide_int_to_tree (type, min);
3330 else
3332 gcc_assert (INTEGRAL_TYPE_P (type));
3333 return TYPE_MIN_VALUE (type);
3336 case MIN_EXPR:
3337 if (SCALAR_FLOAT_TYPE_P (type))
3339 REAL_VALUE_TYPE max;
3340 if (HONOR_INFINITIES (type))
3341 real_inf (&max);
3342 else
3343 real_maxval (&max, 0, TYPE_MODE (type));
3344 return build_real (type, max);
3346 else if (POINTER_TYPE_P (type))
3348 wide_int max
3349 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3350 return wide_int_to_tree (type, max);
3352 else
3354 gcc_assert (INTEGRAL_TYPE_P (type));
3355 return TYPE_MAX_VALUE (type);
3358 default:
3359 gcc_unreachable ();
3363 /* Construct the initialization value for reduction CLAUSE. */
3365 tree
3366 omp_reduction_init (tree clause, tree type)
3368 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3369 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3372 /* Return alignment to be assumed for var in CLAUSE, which should be
3373 OMP_CLAUSE_ALIGNED. */
3375 static tree
3376 omp_clause_aligned_alignment (tree clause)
3378 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3379 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3381 /* Otherwise return implementation defined alignment. */
3382 unsigned int al = 1;
3383 opt_scalar_mode mode_iter;
3384 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3385 if (vs)
3386 vs = 1 << floor_log2 (vs);
3387 static enum mode_class classes[]
3388 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3389 for (int i = 0; i < 4; i += 2)
3390 /* The for loop above dictates that we only walk through scalar classes. */
3391 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3393 scalar_mode mode = mode_iter.require ();
3394 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3395 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3396 continue;
3397 while (vs
3398 && GET_MODE_SIZE (vmode) < vs
3399 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3400 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3402 tree type = lang_hooks.types.type_for_mode (mode, 1);
3403 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3404 continue;
3405 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3406 / GET_MODE_SIZE (mode));
3407 if (TYPE_MODE (type) != vmode)
3408 continue;
3409 if (TYPE_ALIGN_UNIT (type) > al)
3410 al = TYPE_ALIGN_UNIT (type);
3412 return build_int_cst (integer_type_node, al);
3416 /* This structure is part of the interface between lower_rec_simd_input_clauses
3417 and lower_rec_input_clauses. */
3419 struct omplow_simd_context {
3420 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3421 tree idx;
3422 tree lane;
3423 vec<tree, va_heap> simt_eargs;
3424 gimple_seq simt_dlist;
3425 poly_uint64_pod max_vf;
3426 bool is_simt;
3429 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3430 privatization. */
3432 static bool
3433 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3434 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3436 if (known_eq (sctx->max_vf, 0U))
3438 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3439 if (maybe_gt (sctx->max_vf, 1U))
3441 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3442 OMP_CLAUSE_SAFELEN);
3443 if (c)
3445 poly_uint64 safe_len;
3446 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3447 || maybe_lt (safe_len, 1U))
3448 sctx->max_vf = 1;
3449 else
3450 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3453 if (maybe_gt (sctx->max_vf, 1U))
3455 sctx->idx = create_tmp_var (unsigned_type_node);
3456 sctx->lane = create_tmp_var (unsigned_type_node);
3459 if (known_eq (sctx->max_vf, 1U))
3460 return false;
3462 if (sctx->is_simt)
3464 if (is_gimple_reg (new_var))
3466 ivar = lvar = new_var;
3467 return true;
3469 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3470 ivar = lvar = create_tmp_var (type);
3471 TREE_ADDRESSABLE (ivar) = 1;
3472 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3473 NULL, DECL_ATTRIBUTES (ivar));
3474 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3475 tree clobber = build_constructor (type, NULL);
3476 TREE_THIS_VOLATILE (clobber) = 1;
3477 gimple *g = gimple_build_assign (ivar, clobber);
3478 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3480 else
3482 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3483 tree avar = create_tmp_var_raw (atype);
3484 if (TREE_ADDRESSABLE (new_var))
3485 TREE_ADDRESSABLE (avar) = 1;
3486 DECL_ATTRIBUTES (avar)
3487 = tree_cons (get_identifier ("omp simd array"), NULL,
3488 DECL_ATTRIBUTES (avar));
3489 gimple_add_tmp_var (avar);
3490 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3491 NULL_TREE, NULL_TREE);
3492 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3493 NULL_TREE, NULL_TREE);
3495 if (DECL_P (new_var))
3497 SET_DECL_VALUE_EXPR (new_var, lvar);
3498 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3500 return true;
3503 /* Helper function of lower_rec_input_clauses. For a reference
3504 in simd reduction, add an underlying variable it will reference. */
3506 static void
3507 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3509 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3510 if (TREE_CONSTANT (z))
3512 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3513 get_name (new_vard));
3514 gimple_add_tmp_var (z);
3515 TREE_ADDRESSABLE (z) = 1;
3516 z = build_fold_addr_expr_loc (loc, z);
3517 gimplify_assign (new_vard, z, ilist);
3521 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3522 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3523 private variables. Initialization statements go in ILIST, while calls
3524 to destructors go in DLIST. */
3526 static void
3527 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3528 omp_context *ctx, struct omp_for_data *fd)
3530 tree c, dtor, copyin_seq, x, ptr;
3531 bool copyin_by_ref = false;
3532 bool lastprivate_firstprivate = false;
3533 bool reduction_omp_orig_ref = false;
3534 int pass;
3535 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3536 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3537 omplow_simd_context sctx = omplow_simd_context ();
3538 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3539 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3540 gimple_seq llist[3] = { };
3542 copyin_seq = NULL;
3543 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3545 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3546 with data sharing clauses referencing variable sized vars. That
3547 is unnecessarily hard to support and very unlikely to result in
3548 vectorized code anyway. */
3549 if (is_simd)
3550 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3551 switch (OMP_CLAUSE_CODE (c))
3553 case OMP_CLAUSE_LINEAR:
3554 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3555 sctx.max_vf = 1;
3556 /* FALLTHRU */
3557 case OMP_CLAUSE_PRIVATE:
3558 case OMP_CLAUSE_FIRSTPRIVATE:
3559 case OMP_CLAUSE_LASTPRIVATE:
3560 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3561 sctx.max_vf = 1;
3562 break;
3563 case OMP_CLAUSE_REDUCTION:
3564 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3565 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3566 sctx.max_vf = 1;
3567 break;
3568 default:
3569 continue;
3572 /* Add a placeholder for simduid. */
3573 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3574 sctx.simt_eargs.safe_push (NULL_TREE);
3576 /* Do all the fixed sized types in the first pass, and the variable sized
3577 types in the second pass. This makes sure that the scalar arguments to
3578 the variable sized types are processed before we use them in the
3579 variable sized operations. */
3580 for (pass = 0; pass < 2; ++pass)
3582 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3584 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3585 tree var, new_var;
3586 bool by_ref;
3587 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3589 switch (c_kind)
3591 case OMP_CLAUSE_PRIVATE:
3592 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3593 continue;
3594 break;
3595 case OMP_CLAUSE_SHARED:
3596 /* Ignore shared directives in teams construct. */
3597 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3598 continue;
3599 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3601 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3602 || is_global_var (OMP_CLAUSE_DECL (c)));
3603 continue;
3605 case OMP_CLAUSE_FIRSTPRIVATE:
3606 case OMP_CLAUSE_COPYIN:
3607 break;
3608 case OMP_CLAUSE_LINEAR:
3609 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3610 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3611 lastprivate_firstprivate = true;
3612 break;
3613 case OMP_CLAUSE_REDUCTION:
3614 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3615 reduction_omp_orig_ref = true;
3616 break;
3617 case OMP_CLAUSE__LOOPTEMP_:
3618 /* Handle _looptemp_ clauses only on parallel/task. */
3619 if (fd)
3620 continue;
3621 break;
3622 case OMP_CLAUSE_LASTPRIVATE:
3623 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3625 lastprivate_firstprivate = true;
3626 if (pass != 0 || is_taskloop_ctx (ctx))
3627 continue;
3629 /* Even without corresponding firstprivate, if
3630 decl is Fortran allocatable, it needs outer var
3631 reference. */
3632 else if (pass == 0
3633 && lang_hooks.decls.omp_private_outer_ref
3634 (OMP_CLAUSE_DECL (c)))
3635 lastprivate_firstprivate = true;
3636 break;
3637 case OMP_CLAUSE_ALIGNED:
3638 if (pass == 0)
3639 continue;
3640 var = OMP_CLAUSE_DECL (c);
3641 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3642 && !is_global_var (var))
3644 new_var = maybe_lookup_decl (var, ctx);
3645 if (new_var == NULL_TREE)
3646 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3647 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3648 tree alarg = omp_clause_aligned_alignment (c);
3649 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3650 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3651 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3652 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3653 gimplify_and_add (x, ilist);
3655 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3656 && is_global_var (var))
3658 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3659 new_var = lookup_decl (var, ctx);
3660 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3661 t = build_fold_addr_expr_loc (clause_loc, t);
3662 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3663 tree alarg = omp_clause_aligned_alignment (c);
3664 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3665 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3666 t = fold_convert_loc (clause_loc, ptype, t);
3667 x = create_tmp_var (ptype);
3668 t = build2 (MODIFY_EXPR, ptype, x, t);
3669 gimplify_and_add (t, ilist);
3670 t = build_simple_mem_ref_loc (clause_loc, x);
3671 SET_DECL_VALUE_EXPR (new_var, t);
3672 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3674 continue;
3675 default:
3676 continue;
3679 new_var = var = OMP_CLAUSE_DECL (c);
3680 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3682 var = TREE_OPERAND (var, 0);
3683 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3684 var = TREE_OPERAND (var, 0);
3685 if (TREE_CODE (var) == INDIRECT_REF
3686 || TREE_CODE (var) == ADDR_EXPR)
3687 var = TREE_OPERAND (var, 0);
3688 if (is_variable_sized (var))
3690 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3691 var = DECL_VALUE_EXPR (var);
3692 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3693 var = TREE_OPERAND (var, 0);
3694 gcc_assert (DECL_P (var));
3696 new_var = var;
3698 if (c_kind != OMP_CLAUSE_COPYIN)
3699 new_var = lookup_decl (var, ctx);
3701 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3703 if (pass != 0)
3704 continue;
3706 /* C/C++ array section reductions. */
3707 else if (c_kind == OMP_CLAUSE_REDUCTION
3708 && var != OMP_CLAUSE_DECL (c))
3710 if (pass == 0)
3711 continue;
3713 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3714 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3715 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3717 tree b = TREE_OPERAND (orig_var, 1);
3718 b = maybe_lookup_decl (b, ctx);
3719 if (b == NULL)
3721 b = TREE_OPERAND (orig_var, 1);
3722 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3724 if (integer_zerop (bias))
3725 bias = b;
3726 else
3728 bias = fold_convert_loc (clause_loc,
3729 TREE_TYPE (b), bias);
3730 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3731 TREE_TYPE (b), b, bias);
3733 orig_var = TREE_OPERAND (orig_var, 0);
3735 if (TREE_CODE (orig_var) == INDIRECT_REF
3736 || TREE_CODE (orig_var) == ADDR_EXPR)
3737 orig_var = TREE_OPERAND (orig_var, 0);
3738 tree d = OMP_CLAUSE_DECL (c);
3739 tree type = TREE_TYPE (d);
3740 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3741 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3742 const char *name = get_name (orig_var);
3743 if (TREE_CONSTANT (v))
3745 x = create_tmp_var_raw (type, name);
3746 gimple_add_tmp_var (x);
3747 TREE_ADDRESSABLE (x) = 1;
3748 x = build_fold_addr_expr_loc (clause_loc, x);
3750 else
3752 tree atmp
3753 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3754 tree t = maybe_lookup_decl (v, ctx);
3755 if (t)
3756 v = t;
3757 else
3758 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3759 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3760 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3761 TREE_TYPE (v), v,
3762 build_int_cst (TREE_TYPE (v), 1));
3763 t = fold_build2_loc (clause_loc, MULT_EXPR,
3764 TREE_TYPE (v), t,
3765 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3766 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3767 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3770 tree ptype = build_pointer_type (TREE_TYPE (type));
3771 x = fold_convert_loc (clause_loc, ptype, x);
3772 tree y = create_tmp_var (ptype, name);
3773 gimplify_assign (y, x, ilist);
3774 x = y;
3775 tree yb = y;
3777 if (!integer_zerop (bias))
3779 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3780 bias);
3781 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3783 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3784 pointer_sized_int_node, yb, bias);
3785 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3786 yb = create_tmp_var (ptype, name);
3787 gimplify_assign (yb, x, ilist);
3788 x = yb;
3791 d = TREE_OPERAND (d, 0);
3792 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3793 d = TREE_OPERAND (d, 0);
3794 if (TREE_CODE (d) == ADDR_EXPR)
3796 if (orig_var != var)
3798 gcc_assert (is_variable_sized (orig_var));
3799 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3801 gimplify_assign (new_var, x, ilist);
3802 tree new_orig_var = lookup_decl (orig_var, ctx);
3803 tree t = build_fold_indirect_ref (new_var);
3804 DECL_IGNORED_P (new_var) = 0;
3805 TREE_THIS_NOTRAP (t);
3806 SET_DECL_VALUE_EXPR (new_orig_var, t);
3807 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3809 else
3811 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3812 build_int_cst (ptype, 0));
3813 SET_DECL_VALUE_EXPR (new_var, x);
3814 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3817 else
3819 gcc_assert (orig_var == var);
3820 if (TREE_CODE (d) == INDIRECT_REF)
3822 x = create_tmp_var (ptype, name);
3823 TREE_ADDRESSABLE (x) = 1;
3824 gimplify_assign (x, yb, ilist);
3825 x = build_fold_addr_expr_loc (clause_loc, x);
3827 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3828 gimplify_assign (new_var, x, ilist);
3830 tree y1 = create_tmp_var (ptype, NULL);
3831 gimplify_assign (y1, y, ilist);
3832 tree i2 = NULL_TREE, y2 = NULL_TREE;
3833 tree body2 = NULL_TREE, end2 = NULL_TREE;
3834 tree y3 = NULL_TREE, y4 = NULL_TREE;
3835 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3837 y2 = create_tmp_var (ptype, NULL);
3838 gimplify_assign (y2, y, ilist);
3839 tree ref = build_outer_var_ref (var, ctx);
3840 /* For ref build_outer_var_ref already performs this. */
3841 if (TREE_CODE (d) == INDIRECT_REF)
3842 gcc_assert (omp_is_reference (var));
3843 else if (TREE_CODE (d) == ADDR_EXPR)
3844 ref = build_fold_addr_expr (ref);
3845 else if (omp_is_reference (var))
3846 ref = build_fold_addr_expr (ref);
3847 ref = fold_convert_loc (clause_loc, ptype, ref);
3848 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3849 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3851 y3 = create_tmp_var (ptype, NULL);
3852 gimplify_assign (y3, unshare_expr (ref), ilist);
3854 if (is_simd)
3856 y4 = create_tmp_var (ptype, NULL);
3857 gimplify_assign (y4, ref, dlist);
3860 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3861 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3862 tree body = create_artificial_label (UNKNOWN_LOCATION);
3863 tree end = create_artificial_label (UNKNOWN_LOCATION);
3864 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3865 if (y2)
3867 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3868 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3869 body2 = create_artificial_label (UNKNOWN_LOCATION);
3870 end2 = create_artificial_label (UNKNOWN_LOCATION);
3871 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3873 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3875 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3876 tree decl_placeholder
3877 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3878 SET_DECL_VALUE_EXPR (decl_placeholder,
3879 build_simple_mem_ref (y1));
3880 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3881 SET_DECL_VALUE_EXPR (placeholder,
3882 y3 ? build_simple_mem_ref (y3)
3883 : error_mark_node);
3884 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3885 x = lang_hooks.decls.omp_clause_default_ctor
3886 (c, build_simple_mem_ref (y1),
3887 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3888 if (x)
3889 gimplify_and_add (x, ilist);
3890 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3892 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3893 lower_omp (&tseq, ctx);
3894 gimple_seq_add_seq (ilist, tseq);
3896 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3897 if (is_simd)
3899 SET_DECL_VALUE_EXPR (decl_placeholder,
3900 build_simple_mem_ref (y2));
3901 SET_DECL_VALUE_EXPR (placeholder,
3902 build_simple_mem_ref (y4));
3903 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3904 lower_omp (&tseq, ctx);
3905 gimple_seq_add_seq (dlist, tseq);
3906 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3908 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3909 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3910 x = lang_hooks.decls.omp_clause_dtor
3911 (c, build_simple_mem_ref (y2));
3912 if (x)
3914 gimple_seq tseq = NULL;
3915 dtor = x;
3916 gimplify_stmt (&dtor, &tseq);
3917 gimple_seq_add_seq (dlist, tseq);
3920 else
3922 x = omp_reduction_init (c, TREE_TYPE (type));
3923 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3925 /* reduction(-:var) sums up the partial results, so it
3926 acts identically to reduction(+:var). */
3927 if (code == MINUS_EXPR)
3928 code = PLUS_EXPR;
3930 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3931 if (is_simd)
3933 x = build2 (code, TREE_TYPE (type),
3934 build_simple_mem_ref (y4),
3935 build_simple_mem_ref (y2));
3936 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3939 gimple *g
3940 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3941 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3942 gimple_seq_add_stmt (ilist, g);
3943 if (y3)
3945 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3946 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3947 gimple_seq_add_stmt (ilist, g);
3949 g = gimple_build_assign (i, PLUS_EXPR, i,
3950 build_int_cst (TREE_TYPE (i), 1));
3951 gimple_seq_add_stmt (ilist, g);
3952 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3953 gimple_seq_add_stmt (ilist, g);
3954 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3955 if (y2)
3957 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3958 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3959 gimple_seq_add_stmt (dlist, g);
3960 if (y4)
3962 g = gimple_build_assign
3963 (y4, POINTER_PLUS_EXPR, y4,
3964 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3965 gimple_seq_add_stmt (dlist, g);
3967 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3968 build_int_cst (TREE_TYPE (i2), 1));
3969 gimple_seq_add_stmt (dlist, g);
3970 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3971 gimple_seq_add_stmt (dlist, g);
3972 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3974 continue;
3976 else if (is_variable_sized (var))
3978 /* For variable sized types, we need to allocate the
3979 actual storage here. Call alloca and store the
3980 result in the pointer decl that we created elsewhere. */
3981 if (pass == 0)
3982 continue;
3984 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3986 gcall *stmt;
3987 tree tmp, atmp;
3989 ptr = DECL_VALUE_EXPR (new_var);
3990 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3991 ptr = TREE_OPERAND (ptr, 0);
3992 gcc_assert (DECL_P (ptr));
3993 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3995 /* void *tmp = __builtin_alloca */
3996 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3997 stmt = gimple_build_call (atmp, 2, x,
3998 size_int (DECL_ALIGN (var)));
3999 tmp = create_tmp_var_raw (ptr_type_node);
4000 gimple_add_tmp_var (tmp);
4001 gimple_call_set_lhs (stmt, tmp);
4003 gimple_seq_add_stmt (ilist, stmt);
4005 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4006 gimplify_assign (ptr, x, ilist);
4009 else if (omp_is_reference (var))
4011 /* For references that are being privatized for Fortran,
4012 allocate new backing storage for the new pointer
4013 variable. This allows us to avoid changing all the
4014 code that expects a pointer to something that expects
4015 a direct variable. */
4016 if (pass == 0)
4017 continue;
4019 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4020 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4022 x = build_receiver_ref (var, false, ctx);
4023 x = build_fold_addr_expr_loc (clause_loc, x);
4025 else if (TREE_CONSTANT (x))
4027 /* For reduction in SIMD loop, defer adding the
4028 initialization of the reference, because if we decide
4029 to use SIMD array for it, the initilization could cause
4030 expansion ICE. */
4031 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4032 x = NULL_TREE;
4033 else
4035 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4036 get_name (var));
4037 gimple_add_tmp_var (x);
4038 TREE_ADDRESSABLE (x) = 1;
4039 x = build_fold_addr_expr_loc (clause_loc, x);
4042 else
4044 tree atmp
4045 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4046 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4047 tree al = size_int (TYPE_ALIGN (rtype));
4048 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4051 if (x)
4053 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4054 gimplify_assign (new_var, x, ilist);
4057 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4059 else if (c_kind == OMP_CLAUSE_REDUCTION
4060 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4062 if (pass == 0)
4063 continue;
4065 else if (pass != 0)
4066 continue;
4068 switch (OMP_CLAUSE_CODE (c))
4070 case OMP_CLAUSE_SHARED:
4071 /* Ignore shared directives in teams construct. */
4072 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4073 continue;
4074 /* Shared global vars are just accessed directly. */
4075 if (is_global_var (new_var))
4076 break;
4077 /* For taskloop firstprivate/lastprivate, represented
4078 as firstprivate and shared clause on the task, new_var
4079 is the firstprivate var. */
4080 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4081 break;
4082 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4083 needs to be delayed until after fixup_child_record_type so
4084 that we get the correct type during the dereference. */
4085 by_ref = use_pointer_for_field (var, ctx);
4086 x = build_receiver_ref (var, by_ref, ctx);
4087 SET_DECL_VALUE_EXPR (new_var, x);
4088 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4090 /* ??? If VAR is not passed by reference, and the variable
4091 hasn't been initialized yet, then we'll get a warning for
4092 the store into the omp_data_s structure. Ideally, we'd be
4093 able to notice this and not store anything at all, but
4094 we're generating code too early. Suppress the warning. */
4095 if (!by_ref)
4096 TREE_NO_WARNING (var) = 1;
4097 break;
4099 case OMP_CLAUSE_LASTPRIVATE:
4100 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4101 break;
4102 /* FALLTHRU */
4104 case OMP_CLAUSE_PRIVATE:
4105 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4106 x = build_outer_var_ref (var, ctx);
4107 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4109 if (is_task_ctx (ctx))
4110 x = build_receiver_ref (var, false, ctx);
4111 else
4112 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4114 else
4115 x = NULL;
4116 do_private:
4117 tree nx;
4118 nx = lang_hooks.decls.omp_clause_default_ctor
4119 (c, unshare_expr (new_var), x);
4120 if (is_simd)
4122 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4123 if ((TREE_ADDRESSABLE (new_var) || nx || y
4124 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4125 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4126 ivar, lvar))
4128 if (nx)
4129 x = lang_hooks.decls.omp_clause_default_ctor
4130 (c, unshare_expr (ivar), x);
4131 if (nx && x)
4132 gimplify_and_add (x, &llist[0]);
4133 if (y)
4135 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4136 if (y)
4138 gimple_seq tseq = NULL;
4140 dtor = y;
4141 gimplify_stmt (&dtor, &tseq);
4142 gimple_seq_add_seq (&llist[1], tseq);
4145 break;
4148 if (nx)
4149 gimplify_and_add (nx, ilist);
4150 /* FALLTHRU */
4152 do_dtor:
4153 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4154 if (x)
4156 gimple_seq tseq = NULL;
4158 dtor = x;
4159 gimplify_stmt (&dtor, &tseq);
4160 gimple_seq_add_seq (dlist, tseq);
4162 break;
4164 case OMP_CLAUSE_LINEAR:
4165 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4166 goto do_firstprivate;
4167 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4168 x = NULL;
4169 else
4170 x = build_outer_var_ref (var, ctx);
4171 goto do_private;
4173 case OMP_CLAUSE_FIRSTPRIVATE:
4174 if (is_task_ctx (ctx))
4176 if (omp_is_reference (var) || is_variable_sized (var))
4177 goto do_dtor;
4178 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4179 ctx))
4180 || use_pointer_for_field (var, NULL))
4182 x = build_receiver_ref (var, false, ctx);
4183 SET_DECL_VALUE_EXPR (new_var, x);
4184 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4185 goto do_dtor;
4188 do_firstprivate:
4189 x = build_outer_var_ref (var, ctx);
4190 if (is_simd)
4192 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4193 && gimple_omp_for_combined_into_p (ctx->stmt))
4195 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4196 tree stept = TREE_TYPE (t);
4197 tree ct = omp_find_clause (clauses,
4198 OMP_CLAUSE__LOOPTEMP_);
4199 gcc_assert (ct);
4200 tree l = OMP_CLAUSE_DECL (ct);
4201 tree n1 = fd->loop.n1;
4202 tree step = fd->loop.step;
4203 tree itype = TREE_TYPE (l);
4204 if (POINTER_TYPE_P (itype))
4205 itype = signed_type_for (itype);
4206 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4207 if (TYPE_UNSIGNED (itype)
4208 && fd->loop.cond_code == GT_EXPR)
4209 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4210 fold_build1 (NEGATE_EXPR, itype, l),
4211 fold_build1 (NEGATE_EXPR,
4212 itype, step));
4213 else
4214 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4215 t = fold_build2 (MULT_EXPR, stept,
4216 fold_convert (stept, l), t);
4218 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4220 x = lang_hooks.decls.omp_clause_linear_ctor
4221 (c, new_var, x, t);
4222 gimplify_and_add (x, ilist);
4223 goto do_dtor;
4226 if (POINTER_TYPE_P (TREE_TYPE (x)))
4227 x = fold_build2 (POINTER_PLUS_EXPR,
4228 TREE_TYPE (x), x, t);
4229 else
4230 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4233 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4234 || TREE_ADDRESSABLE (new_var))
4235 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4236 ivar, lvar))
4238 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4240 tree iv = create_tmp_var (TREE_TYPE (new_var));
4241 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4242 gimplify_and_add (x, ilist);
4243 gimple_stmt_iterator gsi
4244 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4245 gassign *g
4246 = gimple_build_assign (unshare_expr (lvar), iv);
4247 gsi_insert_before_without_update (&gsi, g,
4248 GSI_SAME_STMT);
4249 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4250 enum tree_code code = PLUS_EXPR;
4251 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4252 code = POINTER_PLUS_EXPR;
4253 g = gimple_build_assign (iv, code, iv, t);
4254 gsi_insert_before_without_update (&gsi, g,
4255 GSI_SAME_STMT);
4256 break;
4258 x = lang_hooks.decls.omp_clause_copy_ctor
4259 (c, unshare_expr (ivar), x);
4260 gimplify_and_add (x, &llist[0]);
4261 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4262 if (x)
4264 gimple_seq tseq = NULL;
4266 dtor = x;
4267 gimplify_stmt (&dtor, &tseq);
4268 gimple_seq_add_seq (&llist[1], tseq);
4270 break;
4273 x = lang_hooks.decls.omp_clause_copy_ctor
4274 (c, unshare_expr (new_var), x);
4275 gimplify_and_add (x, ilist);
4276 goto do_dtor;
4278 case OMP_CLAUSE__LOOPTEMP_:
4279 gcc_assert (is_taskreg_ctx (ctx));
4280 x = build_outer_var_ref (var, ctx);
4281 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4282 gimplify_and_add (x, ilist);
4283 break;
4285 case OMP_CLAUSE_COPYIN:
4286 by_ref = use_pointer_for_field (var, NULL);
4287 x = build_receiver_ref (var, by_ref, ctx);
4288 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4289 append_to_statement_list (x, &copyin_seq);
4290 copyin_by_ref |= by_ref;
4291 break;
4293 case OMP_CLAUSE_REDUCTION:
4294 /* OpenACC reductions are initialized using the
4295 GOACC_REDUCTION internal function. */
4296 if (is_gimple_omp_oacc (ctx->stmt))
4297 break;
4298 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4300 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4301 gimple *tseq;
4302 x = build_outer_var_ref (var, ctx);
4304 if (omp_is_reference (var)
4305 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4306 TREE_TYPE (x)))
4307 x = build_fold_addr_expr_loc (clause_loc, x);
4308 SET_DECL_VALUE_EXPR (placeholder, x);
4309 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4310 tree new_vard = new_var;
4311 if (omp_is_reference (var))
4313 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4314 new_vard = TREE_OPERAND (new_var, 0);
4315 gcc_assert (DECL_P (new_vard));
4317 if (is_simd
4318 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4319 ivar, lvar))
4321 if (new_vard == new_var)
4323 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4324 SET_DECL_VALUE_EXPR (new_var, ivar);
4326 else
4328 SET_DECL_VALUE_EXPR (new_vard,
4329 build_fold_addr_expr (ivar));
4330 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4332 x = lang_hooks.decls.omp_clause_default_ctor
4333 (c, unshare_expr (ivar),
4334 build_outer_var_ref (var, ctx));
4335 if (x)
4336 gimplify_and_add (x, &llist[0]);
4337 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4339 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4340 lower_omp (&tseq, ctx);
4341 gimple_seq_add_seq (&llist[0], tseq);
4343 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4344 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4345 lower_omp (&tseq, ctx);
4346 gimple_seq_add_seq (&llist[1], tseq);
4347 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4348 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4349 if (new_vard == new_var)
4350 SET_DECL_VALUE_EXPR (new_var, lvar);
4351 else
4352 SET_DECL_VALUE_EXPR (new_vard,
4353 build_fold_addr_expr (lvar));
4354 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4355 if (x)
4357 tseq = NULL;
4358 dtor = x;
4359 gimplify_stmt (&dtor, &tseq);
4360 gimple_seq_add_seq (&llist[1], tseq);
4362 break;
4364 /* If this is a reference to constant size reduction var
4365 with placeholder, we haven't emitted the initializer
4366 for it because it is undesirable if SIMD arrays are used.
4367 But if they aren't used, we need to emit the deferred
4368 initialization now. */
4369 else if (omp_is_reference (var) && is_simd)
4370 handle_simd_reference (clause_loc, new_vard, ilist);
4371 x = lang_hooks.decls.omp_clause_default_ctor
4372 (c, unshare_expr (new_var),
4373 build_outer_var_ref (var, ctx));
4374 if (x)
4375 gimplify_and_add (x, ilist);
4376 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4378 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4379 lower_omp (&tseq, ctx);
4380 gimple_seq_add_seq (ilist, tseq);
4382 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4383 if (is_simd)
4385 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4386 lower_omp (&tseq, ctx);
4387 gimple_seq_add_seq (dlist, tseq);
4388 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4390 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4391 goto do_dtor;
4393 else
4395 x = omp_reduction_init (c, TREE_TYPE (new_var));
4396 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4397 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4399 /* reduction(-:var) sums up the partial results, so it
4400 acts identically to reduction(+:var). */
4401 if (code == MINUS_EXPR)
4402 code = PLUS_EXPR;
4404 tree new_vard = new_var;
4405 if (is_simd && omp_is_reference (var))
4407 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4408 new_vard = TREE_OPERAND (new_var, 0);
4409 gcc_assert (DECL_P (new_vard));
4411 if (is_simd
4412 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4413 ivar, lvar))
4415 tree ref = build_outer_var_ref (var, ctx);
4417 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4419 if (sctx.is_simt)
4421 if (!simt_lane)
4422 simt_lane = create_tmp_var (unsigned_type_node);
4423 x = build_call_expr_internal_loc
4424 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4425 TREE_TYPE (ivar), 2, ivar, simt_lane);
4426 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4427 gimplify_assign (ivar, x, &llist[2]);
4429 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4430 ref = build_outer_var_ref (var, ctx);
4431 gimplify_assign (ref, x, &llist[1]);
4433 if (new_vard != new_var)
4435 SET_DECL_VALUE_EXPR (new_vard,
4436 build_fold_addr_expr (lvar));
4437 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4440 else
4442 if (omp_is_reference (var) && is_simd)
4443 handle_simd_reference (clause_loc, new_vard, ilist);
4444 gimplify_assign (new_var, x, ilist);
4445 if (is_simd)
4447 tree ref = build_outer_var_ref (var, ctx);
4449 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4450 ref = build_outer_var_ref (var, ctx);
4451 gimplify_assign (ref, x, dlist);
4455 break;
4457 default:
4458 gcc_unreachable ();
4463 if (known_eq (sctx.max_vf, 1U))
4464 sctx.is_simt = false;
4466 if (sctx.lane || sctx.is_simt)
4468 uid = create_tmp_var (ptr_type_node, "simduid");
4469 /* Don't want uninit warnings on simduid, it is always uninitialized,
4470 but we use it not for the value, but for the DECL_UID only. */
4471 TREE_NO_WARNING (uid) = 1;
4472 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4473 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4474 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4475 gimple_omp_for_set_clauses (ctx->stmt, c);
4477 /* Emit calls denoting privatized variables and initializing a pointer to
4478 structure that holds private variables as fields after ompdevlow pass. */
4479 if (sctx.is_simt)
4481 sctx.simt_eargs[0] = uid;
4482 gimple *g
4483 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4484 gimple_call_set_lhs (g, uid);
4485 gimple_seq_add_stmt (ilist, g);
4486 sctx.simt_eargs.release ();
4488 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4489 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4490 gimple_call_set_lhs (g, simtrec);
4491 gimple_seq_add_stmt (ilist, g);
4493 if (sctx.lane)
4495 gimple *g
4496 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4497 gimple_call_set_lhs (g, sctx.lane);
4498 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4499 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4500 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4501 build_int_cst (unsigned_type_node, 0));
4502 gimple_seq_add_stmt (ilist, g);
4503 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4504 if (llist[2])
4506 tree simt_vf = create_tmp_var (unsigned_type_node);
4507 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4508 gimple_call_set_lhs (g, simt_vf);
4509 gimple_seq_add_stmt (dlist, g);
4511 tree t = build_int_cst (unsigned_type_node, 1);
4512 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4513 gimple_seq_add_stmt (dlist, g);
4515 t = build_int_cst (unsigned_type_node, 0);
4516 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4517 gimple_seq_add_stmt (dlist, g);
4519 tree body = create_artificial_label (UNKNOWN_LOCATION);
4520 tree header = create_artificial_label (UNKNOWN_LOCATION);
4521 tree end = create_artificial_label (UNKNOWN_LOCATION);
4522 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4523 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4525 gimple_seq_add_seq (dlist, llist[2]);
4527 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4528 gimple_seq_add_stmt (dlist, g);
4530 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4531 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4532 gimple_seq_add_stmt (dlist, g);
4534 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4536 for (int i = 0; i < 2; i++)
4537 if (llist[i])
4539 tree vf = create_tmp_var (unsigned_type_node);
4540 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4541 gimple_call_set_lhs (g, vf);
4542 gimple_seq *seq = i == 0 ? ilist : dlist;
4543 gimple_seq_add_stmt (seq, g);
4544 tree t = build_int_cst (unsigned_type_node, 0);
4545 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4546 gimple_seq_add_stmt (seq, g);
4547 tree body = create_artificial_label (UNKNOWN_LOCATION);
4548 tree header = create_artificial_label (UNKNOWN_LOCATION);
4549 tree end = create_artificial_label (UNKNOWN_LOCATION);
4550 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4551 gimple_seq_add_stmt (seq, gimple_build_label (body));
4552 gimple_seq_add_seq (seq, llist[i]);
4553 t = build_int_cst (unsigned_type_node, 1);
4554 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4555 gimple_seq_add_stmt (seq, g);
4556 gimple_seq_add_stmt (seq, gimple_build_label (header));
4557 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4558 gimple_seq_add_stmt (seq, g);
4559 gimple_seq_add_stmt (seq, gimple_build_label (end));
4562 if (sctx.is_simt)
4564 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4565 gimple *g
4566 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4567 gimple_seq_add_stmt (dlist, g);
4570 /* The copyin sequence is not to be executed by the main thread, since
4571 that would result in self-copies. Perhaps not visible to scalars,
4572 but it certainly is to C++ operator=. */
4573 if (copyin_seq)
4575 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4577 x = build2 (NE_EXPR, boolean_type_node, x,
4578 build_int_cst (TREE_TYPE (x), 0));
4579 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4580 gimplify_and_add (x, ilist);
4583 /* If any copyin variable is passed by reference, we must ensure the
4584 master thread doesn't modify it before it is copied over in all
4585 threads. Similarly for variables in both firstprivate and
4586 lastprivate clauses we need to ensure the lastprivate copying
4587 happens after firstprivate copying in all threads. And similarly
4588 for UDRs if initializer expression refers to omp_orig. */
4589 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4591 /* Don't add any barrier for #pragma omp simd or
4592 #pragma omp distribute. */
4593 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4594 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4595 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4598 /* If max_vf is non-zero, then we can use only a vectorization factor
4599 up to the max_vf we chose. So stick it into the safelen clause. */
4600 if (maybe_ne (sctx.max_vf, 0U))
4602 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4603 OMP_CLAUSE_SAFELEN);
4604 poly_uint64 safe_len;
4605 if (c == NULL_TREE
4606 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4607 && maybe_gt (safe_len, sctx.max_vf)))
4609 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4610 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4611 sctx.max_vf);
4612 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4613 gimple_omp_for_set_clauses (ctx->stmt, c);
4619 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4620 both parallel and workshare constructs. PREDICATE may be NULL if it's
4621 always true. */
4623 static void
4624 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4625 omp_context *ctx)
4627 tree x, c, label = NULL, orig_clauses = clauses;
4628 bool par_clauses = false;
4629 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4631 /* Early exit if there are no lastprivate or linear clauses. */
4632 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4633 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4634 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4635 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4636 break;
4637 if (clauses == NULL)
4639 /* If this was a workshare clause, see if it had been combined
4640 with its parallel. In that case, look for the clauses on the
4641 parallel statement itself. */
4642 if (is_parallel_ctx (ctx))
4643 return;
4645 ctx = ctx->outer;
4646 if (ctx == NULL || !is_parallel_ctx (ctx))
4647 return;
4649 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4650 OMP_CLAUSE_LASTPRIVATE);
4651 if (clauses == NULL)
4652 return;
4653 par_clauses = true;
4656 bool maybe_simt = false;
4657 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4658 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4660 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4661 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4662 if (simduid)
4663 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4666 if (predicate)
4668 gcond *stmt;
4669 tree label_true, arm1, arm2;
4670 enum tree_code pred_code = TREE_CODE (predicate);
4672 label = create_artificial_label (UNKNOWN_LOCATION);
4673 label_true = create_artificial_label (UNKNOWN_LOCATION);
4674 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4676 arm1 = TREE_OPERAND (predicate, 0);
4677 arm2 = TREE_OPERAND (predicate, 1);
4678 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4679 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4681 else
4683 arm1 = predicate;
4684 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4685 arm2 = boolean_false_node;
4686 pred_code = NE_EXPR;
4688 if (maybe_simt)
4690 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4691 c = fold_convert (integer_type_node, c);
4692 simtcond = create_tmp_var (integer_type_node);
4693 gimplify_assign (simtcond, c, stmt_list);
4694 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4695 1, simtcond);
4696 c = create_tmp_var (integer_type_node);
4697 gimple_call_set_lhs (g, c);
4698 gimple_seq_add_stmt (stmt_list, g);
4699 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4700 label_true, label);
4702 else
4703 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4704 gimple_seq_add_stmt (stmt_list, stmt);
4705 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4708 for (c = clauses; c ;)
4710 tree var, new_var;
4711 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4713 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4714 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4715 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4717 var = OMP_CLAUSE_DECL (c);
4718 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4719 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4720 && is_taskloop_ctx (ctx))
4722 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4723 new_var = lookup_decl (var, ctx->outer);
4725 else
4727 new_var = lookup_decl (var, ctx);
4728 /* Avoid uninitialized warnings for lastprivate and
4729 for linear iterators. */
4730 if (predicate
4731 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4732 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4733 TREE_NO_WARNING (new_var) = 1;
4736 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4738 tree val = DECL_VALUE_EXPR (new_var);
4739 if (TREE_CODE (val) == ARRAY_REF
4740 && VAR_P (TREE_OPERAND (val, 0))
4741 && lookup_attribute ("omp simd array",
4742 DECL_ATTRIBUTES (TREE_OPERAND (val,
4743 0))))
4745 if (lastlane == NULL)
4747 lastlane = create_tmp_var (unsigned_type_node);
4748 gcall *g
4749 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4750 2, simduid,
4751 TREE_OPERAND (val, 1));
4752 gimple_call_set_lhs (g, lastlane);
4753 gimple_seq_add_stmt (stmt_list, g);
4755 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4756 TREE_OPERAND (val, 0), lastlane,
4757 NULL_TREE, NULL_TREE);
4760 else if (maybe_simt)
4762 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4763 ? DECL_VALUE_EXPR (new_var)
4764 : new_var);
4765 if (simtlast == NULL)
4767 simtlast = create_tmp_var (unsigned_type_node);
4768 gcall *g = gimple_build_call_internal
4769 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4770 gimple_call_set_lhs (g, simtlast);
4771 gimple_seq_add_stmt (stmt_list, g);
4773 x = build_call_expr_internal_loc
4774 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4775 TREE_TYPE (val), 2, val, simtlast);
4776 new_var = unshare_expr (new_var);
4777 gimplify_assign (new_var, x, stmt_list);
4778 new_var = unshare_expr (new_var);
4781 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4782 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4784 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4785 gimple_seq_add_seq (stmt_list,
4786 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4787 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4789 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4790 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4792 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4793 gimple_seq_add_seq (stmt_list,
4794 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4795 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4798 x = NULL_TREE;
4799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4800 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4802 gcc_checking_assert (is_taskloop_ctx (ctx));
4803 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4804 ctx->outer->outer);
4805 if (is_global_var (ovar))
4806 x = ovar;
4808 if (!x)
4809 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4810 if (omp_is_reference (var))
4811 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4812 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4813 gimplify_and_add (x, stmt_list);
4815 c = OMP_CLAUSE_CHAIN (c);
4816 if (c == NULL && !par_clauses)
4818 /* If this was a workshare clause, see if it had been combined
4819 with its parallel. In that case, continue looking for the
4820 clauses also on the parallel statement itself. */
4821 if (is_parallel_ctx (ctx))
4822 break;
4824 ctx = ctx->outer;
4825 if (ctx == NULL || !is_parallel_ctx (ctx))
4826 break;
4828 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4829 OMP_CLAUSE_LASTPRIVATE);
4830 par_clauses = true;
4834 if (label)
4835 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4838 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4839 (which might be a placeholder). INNER is true if this is an inner
4840 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4841 join markers. Generate the before-loop forking sequence in
4842 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4843 general form of these sequences is
4845 GOACC_REDUCTION_SETUP
4846 GOACC_FORK
4847 GOACC_REDUCTION_INIT
4849 GOACC_REDUCTION_FINI
4850 GOACC_JOIN
4851 GOACC_REDUCTION_TEARDOWN. */
4853 static void
4854 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4855 gcall *fork, gcall *join, gimple_seq *fork_seq,
4856 gimple_seq *join_seq, omp_context *ctx)
4858 gimple_seq before_fork = NULL;
4859 gimple_seq after_fork = NULL;
4860 gimple_seq before_join = NULL;
4861 gimple_seq after_join = NULL;
4862 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4863 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4864 unsigned offset = 0;
4866 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4867 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4869 tree orig = OMP_CLAUSE_DECL (c);
4870 tree var = maybe_lookup_decl (orig, ctx);
4871 tree ref_to_res = NULL_TREE;
4872 tree incoming, outgoing, v1, v2, v3;
4873 bool is_private = false;
4875 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4876 if (rcode == MINUS_EXPR)
4877 rcode = PLUS_EXPR;
4878 else if (rcode == TRUTH_ANDIF_EXPR)
4879 rcode = BIT_AND_EXPR;
4880 else if (rcode == TRUTH_ORIF_EXPR)
4881 rcode = BIT_IOR_EXPR;
4882 tree op = build_int_cst (unsigned_type_node, rcode);
4884 if (!var)
4885 var = orig;
4887 incoming = outgoing = var;
4889 if (!inner)
4891 /* See if an outer construct also reduces this variable. */
4892 omp_context *outer = ctx;
4894 while (omp_context *probe = outer->outer)
4896 enum gimple_code type = gimple_code (probe->stmt);
4897 tree cls;
4899 switch (type)
4901 case GIMPLE_OMP_FOR:
4902 cls = gimple_omp_for_clauses (probe->stmt);
4903 break;
4905 case GIMPLE_OMP_TARGET:
4906 if (gimple_omp_target_kind (probe->stmt)
4907 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4908 goto do_lookup;
4910 cls = gimple_omp_target_clauses (probe->stmt);
4911 break;
4913 default:
4914 goto do_lookup;
4917 outer = probe;
4918 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4919 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4920 && orig == OMP_CLAUSE_DECL (cls))
4922 incoming = outgoing = lookup_decl (orig, probe);
4923 goto has_outer_reduction;
4925 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4926 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4927 && orig == OMP_CLAUSE_DECL (cls))
4929 is_private = true;
4930 goto do_lookup;
4934 do_lookup:
4935 /* This is the outermost construct with this reduction,
4936 see if there's a mapping for it. */
4937 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4938 && maybe_lookup_field (orig, outer) && !is_private)
4940 ref_to_res = build_receiver_ref (orig, false, outer);
4941 if (omp_is_reference (orig))
4942 ref_to_res = build_simple_mem_ref (ref_to_res);
4944 tree type = TREE_TYPE (var);
4945 if (POINTER_TYPE_P (type))
4946 type = TREE_TYPE (type);
4948 outgoing = var;
4949 incoming = omp_reduction_init_op (loc, rcode, type);
4951 else
4953 /* Try to look at enclosing contexts for reduction var,
4954 use original if no mapping found. */
4955 tree t = NULL_TREE;
4956 omp_context *c = ctx->outer;
4957 while (c && !t)
4959 t = maybe_lookup_decl (orig, c);
4960 c = c->outer;
4962 incoming = outgoing = (t ? t : orig);
4965 has_outer_reduction:;
4968 if (!ref_to_res)
4969 ref_to_res = integer_zero_node;
4971 if (omp_is_reference (orig))
4973 tree type = TREE_TYPE (var);
4974 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4976 if (!inner)
4978 tree x = create_tmp_var (TREE_TYPE (type), id);
4979 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4982 v1 = create_tmp_var (type, id);
4983 v2 = create_tmp_var (type, id);
4984 v3 = create_tmp_var (type, id);
4986 gimplify_assign (v1, var, fork_seq);
4987 gimplify_assign (v2, var, fork_seq);
4988 gimplify_assign (v3, var, fork_seq);
4990 var = build_simple_mem_ref (var);
4991 v1 = build_simple_mem_ref (v1);
4992 v2 = build_simple_mem_ref (v2);
4993 v3 = build_simple_mem_ref (v3);
4994 outgoing = build_simple_mem_ref (outgoing);
4996 if (!TREE_CONSTANT (incoming))
4997 incoming = build_simple_mem_ref (incoming);
4999 else
5000 v1 = v2 = v3 = var;
5002 /* Determine position in reduction buffer, which may be used
5003 by target. The parser has ensured that this is not a
5004 variable-sized type. */
5005 fixed_size_mode mode
5006 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5007 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5008 offset = (offset + align - 1) & ~(align - 1);
5009 tree off = build_int_cst (sizetype, offset);
5010 offset += GET_MODE_SIZE (mode);
5012 if (!init_code)
5014 init_code = build_int_cst (integer_type_node,
5015 IFN_GOACC_REDUCTION_INIT);
5016 fini_code = build_int_cst (integer_type_node,
5017 IFN_GOACC_REDUCTION_FINI);
5018 setup_code = build_int_cst (integer_type_node,
5019 IFN_GOACC_REDUCTION_SETUP);
5020 teardown_code = build_int_cst (integer_type_node,
5021 IFN_GOACC_REDUCTION_TEARDOWN);
5024 tree setup_call
5025 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5026 TREE_TYPE (var), 6, setup_code,
5027 unshare_expr (ref_to_res),
5028 incoming, level, op, off);
5029 tree init_call
5030 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5031 TREE_TYPE (var), 6, init_code,
5032 unshare_expr (ref_to_res),
5033 v1, level, op, off);
5034 tree fini_call
5035 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5036 TREE_TYPE (var), 6, fini_code,
5037 unshare_expr (ref_to_res),
5038 v2, level, op, off);
5039 tree teardown_call
5040 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5041 TREE_TYPE (var), 6, teardown_code,
5042 ref_to_res, v3, level, op, off);
5044 gimplify_assign (v1, setup_call, &before_fork);
5045 gimplify_assign (v2, init_call, &after_fork);
5046 gimplify_assign (v3, fini_call, &before_join);
5047 gimplify_assign (outgoing, teardown_call, &after_join);
5050 /* Now stitch things together. */
5051 gimple_seq_add_seq (fork_seq, before_fork);
5052 if (fork)
5053 gimple_seq_add_stmt (fork_seq, fork);
5054 gimple_seq_add_seq (fork_seq, after_fork);
5056 gimple_seq_add_seq (join_seq, before_join);
5057 if (join)
5058 gimple_seq_add_stmt (join_seq, join);
5059 gimple_seq_add_seq (join_seq, after_join);
5062 /* Generate code to implement the REDUCTION clauses. */
5064 static void
5065 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5067 gimple_seq sub_seq = NULL;
5068 gimple *stmt;
5069 tree x, c;
5070 int count = 0;
5072 /* OpenACC loop reductions are handled elsewhere. */
5073 if (is_gimple_omp_oacc (ctx->stmt))
5074 return;
5076 /* SIMD reductions are handled in lower_rec_input_clauses. */
5077 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5078 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5079 return;
5081 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5082 update in that case, otherwise use a lock. */
5083 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5084 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5086 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5087 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5089 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5090 count = -1;
5091 break;
5093 count++;
5096 if (count == 0)
5097 return;
5099 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5101 tree var, ref, new_var, orig_var;
5102 enum tree_code code;
5103 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5105 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5106 continue;
5108 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5109 orig_var = var = OMP_CLAUSE_DECL (c);
5110 if (TREE_CODE (var) == MEM_REF)
5112 var = TREE_OPERAND (var, 0);
5113 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5114 var = TREE_OPERAND (var, 0);
5115 if (TREE_CODE (var) == ADDR_EXPR)
5116 var = TREE_OPERAND (var, 0);
5117 else
5119 /* If this is a pointer or referenced based array
5120 section, the var could be private in the outer
5121 context e.g. on orphaned loop construct. Pretend this
5122 is private variable's outer reference. */
5123 ccode = OMP_CLAUSE_PRIVATE;
5124 if (TREE_CODE (var) == INDIRECT_REF)
5125 var = TREE_OPERAND (var, 0);
5127 orig_var = var;
5128 if (is_variable_sized (var))
5130 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5131 var = DECL_VALUE_EXPR (var);
5132 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5133 var = TREE_OPERAND (var, 0);
5134 gcc_assert (DECL_P (var));
5137 new_var = lookup_decl (var, ctx);
5138 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5139 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5140 ref = build_outer_var_ref (var, ctx, ccode);
5141 code = OMP_CLAUSE_REDUCTION_CODE (c);
5143 /* reduction(-:var) sums up the partial results, so it acts
5144 identically to reduction(+:var). */
5145 if (code == MINUS_EXPR)
5146 code = PLUS_EXPR;
5148 if (count == 1)
5150 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5152 addr = save_expr (addr);
5153 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5154 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5155 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5156 gimplify_and_add (x, stmt_seqp);
5157 return;
5159 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5161 tree d = OMP_CLAUSE_DECL (c);
5162 tree type = TREE_TYPE (d);
5163 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5164 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5165 tree ptype = build_pointer_type (TREE_TYPE (type));
5166 tree bias = TREE_OPERAND (d, 1);
5167 d = TREE_OPERAND (d, 0);
5168 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5170 tree b = TREE_OPERAND (d, 1);
5171 b = maybe_lookup_decl (b, ctx);
5172 if (b == NULL)
5174 b = TREE_OPERAND (d, 1);
5175 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5177 if (integer_zerop (bias))
5178 bias = b;
5179 else
5181 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5182 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5183 TREE_TYPE (b), b, bias);
5185 d = TREE_OPERAND (d, 0);
5187 /* For ref build_outer_var_ref already performs this, so
5188 only new_var needs a dereference. */
5189 if (TREE_CODE (d) == INDIRECT_REF)
5191 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5192 gcc_assert (omp_is_reference (var) && var == orig_var);
5194 else if (TREE_CODE (d) == ADDR_EXPR)
5196 if (orig_var == var)
5198 new_var = build_fold_addr_expr (new_var);
5199 ref = build_fold_addr_expr (ref);
5202 else
5204 gcc_assert (orig_var == var);
5205 if (omp_is_reference (var))
5206 ref = build_fold_addr_expr (ref);
5208 if (DECL_P (v))
5210 tree t = maybe_lookup_decl (v, ctx);
5211 if (t)
5212 v = t;
5213 else
5214 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5215 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5217 if (!integer_zerop (bias))
5219 bias = fold_convert_loc (clause_loc, sizetype, bias);
5220 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5221 TREE_TYPE (new_var), new_var,
5222 unshare_expr (bias));
5223 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5224 TREE_TYPE (ref), ref, bias);
5226 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5227 ref = fold_convert_loc (clause_loc, ptype, ref);
5228 tree m = create_tmp_var (ptype, NULL);
5229 gimplify_assign (m, new_var, stmt_seqp);
5230 new_var = m;
5231 m = create_tmp_var (ptype, NULL);
5232 gimplify_assign (m, ref, stmt_seqp);
5233 ref = m;
5234 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5235 tree body = create_artificial_label (UNKNOWN_LOCATION);
5236 tree end = create_artificial_label (UNKNOWN_LOCATION);
5237 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5238 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5239 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5240 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5242 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5243 tree decl_placeholder
5244 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5245 SET_DECL_VALUE_EXPR (placeholder, out);
5246 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5247 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5248 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5249 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5250 gimple_seq_add_seq (&sub_seq,
5251 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5252 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5253 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5254 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5256 else
5258 x = build2 (code, TREE_TYPE (out), out, priv);
5259 out = unshare_expr (out);
5260 gimplify_assign (out, x, &sub_seq);
5262 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5263 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5264 gimple_seq_add_stmt (&sub_seq, g);
5265 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5266 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5267 gimple_seq_add_stmt (&sub_seq, g);
5268 g = gimple_build_assign (i, PLUS_EXPR, i,
5269 build_int_cst (TREE_TYPE (i), 1));
5270 gimple_seq_add_stmt (&sub_seq, g);
5271 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5272 gimple_seq_add_stmt (&sub_seq, g);
5273 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5275 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5277 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5279 if (omp_is_reference (var)
5280 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5281 TREE_TYPE (ref)))
5282 ref = build_fold_addr_expr_loc (clause_loc, ref);
5283 SET_DECL_VALUE_EXPR (placeholder, ref);
5284 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5285 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5286 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5287 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5288 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5290 else
5292 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5293 ref = build_outer_var_ref (var, ctx);
5294 gimplify_assign (ref, x, &sub_seq);
5298 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5300 gimple_seq_add_stmt (stmt_seqp, stmt);
5302 gimple_seq_add_seq (stmt_seqp, sub_seq);
5304 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5306 gimple_seq_add_stmt (stmt_seqp, stmt);
5310 /* Generate code to implement the COPYPRIVATE clauses. */
5312 static void
5313 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5314 omp_context *ctx)
5316 tree c;
5318 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5320 tree var, new_var, ref, x;
5321 bool by_ref;
5322 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5324 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5325 continue;
5327 var = OMP_CLAUSE_DECL (c);
5328 by_ref = use_pointer_for_field (var, NULL);
5330 ref = build_sender_ref (var, ctx);
5331 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5332 if (by_ref)
5334 x = build_fold_addr_expr_loc (clause_loc, new_var);
5335 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5337 gimplify_assign (ref, x, slist);
5339 ref = build_receiver_ref (var, false, ctx);
5340 if (by_ref)
5342 ref = fold_convert_loc (clause_loc,
5343 build_pointer_type (TREE_TYPE (new_var)),
5344 ref);
5345 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5347 if (omp_is_reference (var))
5349 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5350 ref = build_simple_mem_ref_loc (clause_loc, ref);
5351 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5353 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5354 gimplify_and_add (x, rlist);
5359 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5360 and REDUCTION from the sender (aka parent) side. */
5362 static void
5363 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5364 omp_context *ctx)
5366 tree c, t;
5367 int ignored_looptemp = 0;
5368 bool is_taskloop = false;
5370 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5371 by GOMP_taskloop. */
5372 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5374 ignored_looptemp = 2;
5375 is_taskloop = true;
5378 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5380 tree val, ref, x, var;
5381 bool by_ref, do_in = false, do_out = false;
5382 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5384 switch (OMP_CLAUSE_CODE (c))
5386 case OMP_CLAUSE_PRIVATE:
5387 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5388 break;
5389 continue;
5390 case OMP_CLAUSE_FIRSTPRIVATE:
5391 case OMP_CLAUSE_COPYIN:
5392 case OMP_CLAUSE_LASTPRIVATE:
5393 case OMP_CLAUSE_REDUCTION:
5394 break;
5395 case OMP_CLAUSE_SHARED:
5396 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5397 break;
5398 continue;
5399 case OMP_CLAUSE__LOOPTEMP_:
5400 if (ignored_looptemp)
5402 ignored_looptemp--;
5403 continue;
5405 break;
5406 default:
5407 continue;
5410 val = OMP_CLAUSE_DECL (c);
5411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5412 && TREE_CODE (val) == MEM_REF)
5414 val = TREE_OPERAND (val, 0);
5415 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5416 val = TREE_OPERAND (val, 0);
5417 if (TREE_CODE (val) == INDIRECT_REF
5418 || TREE_CODE (val) == ADDR_EXPR)
5419 val = TREE_OPERAND (val, 0);
5420 if (is_variable_sized (val))
5421 continue;
5424 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5425 outer taskloop region. */
5426 omp_context *ctx_for_o = ctx;
5427 if (is_taskloop
5428 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5429 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5430 ctx_for_o = ctx->outer;
5432 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5434 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5435 && is_global_var (var))
5436 continue;
5438 t = omp_member_access_dummy_var (var);
5439 if (t)
5441 var = DECL_VALUE_EXPR (var);
5442 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5443 if (o != t)
5444 var = unshare_and_remap (var, t, o);
5445 else
5446 var = unshare_expr (var);
5449 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5451 /* Handle taskloop firstprivate/lastprivate, where the
5452 lastprivate on GIMPLE_OMP_TASK is represented as
5453 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5454 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5455 x = omp_build_component_ref (ctx->sender_decl, f);
5456 if (use_pointer_for_field (val, ctx))
5457 var = build_fold_addr_expr (var);
5458 gimplify_assign (x, var, ilist);
5459 DECL_ABSTRACT_ORIGIN (f) = NULL;
5460 continue;
5463 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5464 || val == OMP_CLAUSE_DECL (c))
5465 && is_variable_sized (val))
5466 continue;
5467 by_ref = use_pointer_for_field (val, NULL);
5469 switch (OMP_CLAUSE_CODE (c))
5471 case OMP_CLAUSE_FIRSTPRIVATE:
5472 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5473 && !by_ref
5474 && is_task_ctx (ctx))
5475 TREE_NO_WARNING (var) = 1;
5476 do_in = true;
5477 break;
5479 case OMP_CLAUSE_PRIVATE:
5480 case OMP_CLAUSE_COPYIN:
5481 case OMP_CLAUSE__LOOPTEMP_:
5482 do_in = true;
5483 break;
5485 case OMP_CLAUSE_LASTPRIVATE:
5486 if (by_ref || omp_is_reference (val))
5488 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5489 continue;
5490 do_in = true;
5492 else
5494 do_out = true;
5495 if (lang_hooks.decls.omp_private_outer_ref (val))
5496 do_in = true;
5498 break;
5500 case OMP_CLAUSE_REDUCTION:
5501 do_in = true;
5502 if (val == OMP_CLAUSE_DECL (c))
5503 do_out = !(by_ref || omp_is_reference (val));
5504 else
5505 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5506 break;
5508 default:
5509 gcc_unreachable ();
5512 if (do_in)
5514 ref = build_sender_ref (val, ctx);
5515 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5516 gimplify_assign (ref, x, ilist);
5517 if (is_task_ctx (ctx))
5518 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5521 if (do_out)
5523 ref = build_sender_ref (val, ctx);
5524 gimplify_assign (var, ref, olist);
5529 /* Generate code to implement SHARED from the sender (aka parent)
5530 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5531 list things that got automatically shared. */
5533 static void
5534 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5536 tree var, ovar, nvar, t, f, x, record_type;
5538 if (ctx->record_type == NULL)
5539 return;
5541 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5542 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5544 ovar = DECL_ABSTRACT_ORIGIN (f);
5545 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5546 continue;
5548 nvar = maybe_lookup_decl (ovar, ctx);
5549 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5550 continue;
5552 /* If CTX is a nested parallel directive. Find the immediately
5553 enclosing parallel or workshare construct that contains a
5554 mapping for OVAR. */
5555 var = lookup_decl_in_outer_ctx (ovar, ctx);
5557 t = omp_member_access_dummy_var (var);
5558 if (t)
5560 var = DECL_VALUE_EXPR (var);
5561 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5562 if (o != t)
5563 var = unshare_and_remap (var, t, o);
5564 else
5565 var = unshare_expr (var);
5568 if (use_pointer_for_field (ovar, ctx))
5570 x = build_sender_ref (ovar, ctx);
5571 var = build_fold_addr_expr (var);
5572 gimplify_assign (x, var, ilist);
5574 else
5576 x = build_sender_ref (ovar, ctx);
5577 gimplify_assign (x, var, ilist);
5579 if (!TREE_READONLY (var)
5580 /* We don't need to receive a new reference to a result
5581 or parm decl. In fact we may not store to it as we will
5582 invalidate any pending RSO and generate wrong gimple
5583 during inlining. */
5584 && !((TREE_CODE (var) == RESULT_DECL
5585 || TREE_CODE (var) == PARM_DECL)
5586 && DECL_BY_REFERENCE (var)))
5588 x = build_sender_ref (ovar, ctx);
5589 gimplify_assign (var, x, olist);
5595 /* Emit an OpenACC head marker call, encapulating the partitioning and
5596 other information that must be processed by the target compiler.
5597 Return the maximum number of dimensions the associated loop might
5598 be partitioned over. */
5600 static unsigned
5601 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5602 gimple_seq *seq, omp_context *ctx)
5604 unsigned levels = 0;
5605 unsigned tag = 0;
5606 tree gang_static = NULL_TREE;
5607 auto_vec<tree, 5> args;
5609 args.quick_push (build_int_cst
5610 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5611 args.quick_push (ddvar);
5612 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5614 switch (OMP_CLAUSE_CODE (c))
5616 case OMP_CLAUSE_GANG:
5617 tag |= OLF_DIM_GANG;
5618 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5619 /* static:* is represented by -1, and we can ignore it, as
5620 scheduling is always static. */
5621 if (gang_static && integer_minus_onep (gang_static))
5622 gang_static = NULL_TREE;
5623 levels++;
5624 break;
5626 case OMP_CLAUSE_WORKER:
5627 tag |= OLF_DIM_WORKER;
5628 levels++;
5629 break;
5631 case OMP_CLAUSE_VECTOR:
5632 tag |= OLF_DIM_VECTOR;
5633 levels++;
5634 break;
5636 case OMP_CLAUSE_SEQ:
5637 tag |= OLF_SEQ;
5638 break;
5640 case OMP_CLAUSE_AUTO:
5641 tag |= OLF_AUTO;
5642 break;
5644 case OMP_CLAUSE_INDEPENDENT:
5645 tag |= OLF_INDEPENDENT;
5646 break;
5648 case OMP_CLAUSE_TILE:
5649 tag |= OLF_TILE;
5650 break;
5652 default:
5653 continue;
5657 if (gang_static)
5659 if (DECL_P (gang_static))
5660 gang_static = build_outer_var_ref (gang_static, ctx);
5661 tag |= OLF_GANG_STATIC;
5664 /* In a parallel region, loops are implicitly INDEPENDENT. */
5665 omp_context *tgt = enclosing_target_ctx (ctx);
5666 if (!tgt || is_oacc_parallel (tgt))
5667 tag |= OLF_INDEPENDENT;
5669 if (tag & OLF_TILE)
5670 /* Tiling could use all 3 levels. */
5671 levels = 3;
5672 else
5674 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5675 Ensure at least one level, or 2 for possible auto
5676 partitioning */
5677 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5678 << OLF_DIM_BASE) | OLF_SEQ));
5680 if (levels < 1u + maybe_auto)
5681 levels = 1u + maybe_auto;
5684 args.quick_push (build_int_cst (integer_type_node, levels));
5685 args.quick_push (build_int_cst (integer_type_node, tag));
5686 if (gang_static)
5687 args.quick_push (gang_static);
5689 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5690 gimple_set_location (call, loc);
5691 gimple_set_lhs (call, ddvar);
5692 gimple_seq_add_stmt (seq, call);
5694 return levels;
5697 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5698 partitioning level of the enclosed region. */
5700 static void
5701 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5702 tree tofollow, gimple_seq *seq)
5704 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5705 : IFN_UNIQUE_OACC_TAIL_MARK);
5706 tree marker = build_int_cst (integer_type_node, marker_kind);
5707 int nargs = 2 + (tofollow != NULL_TREE);
5708 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5709 marker, ddvar, tofollow);
5710 gimple_set_location (call, loc);
5711 gimple_set_lhs (call, ddvar);
5712 gimple_seq_add_stmt (seq, call);
5715 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5716 the loop clauses, from which we extract reductions. Initialize
5717 HEAD and TAIL. */
5719 static void
5720 lower_oacc_head_tail (location_t loc, tree clauses,
5721 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5723 bool inner = false;
5724 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5725 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5727 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5728 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5729 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5731 gcc_assert (count);
5732 for (unsigned done = 1; count; count--, done++)
5734 gimple_seq fork_seq = NULL;
5735 gimple_seq join_seq = NULL;
5737 tree place = build_int_cst (integer_type_node, -1);
5738 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5739 fork_kind, ddvar, place);
5740 gimple_set_location (fork, loc);
5741 gimple_set_lhs (fork, ddvar);
5743 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5744 join_kind, ddvar, place);
5745 gimple_set_location (join, loc);
5746 gimple_set_lhs (join, ddvar);
5748 /* Mark the beginning of this level sequence. */
5749 if (inner)
5750 lower_oacc_loop_marker (loc, ddvar, true,
5751 build_int_cst (integer_type_node, count),
5752 &fork_seq);
5753 lower_oacc_loop_marker (loc, ddvar, false,
5754 build_int_cst (integer_type_node, done),
5755 &join_seq);
5757 lower_oacc_reductions (loc, clauses, place, inner,
5758 fork, join, &fork_seq, &join_seq, ctx);
5760 /* Append this level to head. */
5761 gimple_seq_add_seq (head, fork_seq);
5762 /* Prepend it to tail. */
5763 gimple_seq_add_seq (&join_seq, *tail);
5764 *tail = join_seq;
5766 inner = true;
5769 /* Mark the end of the sequence. */
5770 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5771 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5774 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5775 catch handler and return it. This prevents programs from violating the
5776 structured block semantics with throws. */
5778 static gimple_seq
5779 maybe_catch_exception (gimple_seq body)
5781 gimple *g;
5782 tree decl;
5784 if (!flag_exceptions)
5785 return body;
5787 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5788 decl = lang_hooks.eh_protect_cleanup_actions ();
5789 else
5790 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5792 g = gimple_build_eh_must_not_throw (decl);
5793 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5794 GIMPLE_TRY_CATCH);
5796 return gimple_seq_alloc_with_stmt (g);
5800 /* Routines to lower OMP directives into OMP-GIMPLE. */
5802 /* If ctx is a worksharing context inside of a cancellable parallel
5803 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5804 and conditional branch to parallel's cancel_label to handle
5805 cancellation in the implicit barrier. */
5807 static void
5808 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5810 gimple *omp_return = gimple_seq_last_stmt (*body);
5811 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5812 if (gimple_omp_return_nowait_p (omp_return))
5813 return;
5814 if (ctx->outer
5815 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5816 && ctx->outer->cancellable)
5818 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5819 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5820 tree lhs = create_tmp_var (c_bool_type);
5821 gimple_omp_return_set_lhs (omp_return, lhs);
5822 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5823 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5824 fold_convert (c_bool_type,
5825 boolean_false_node),
5826 ctx->outer->cancel_label, fallthru_label);
5827 gimple_seq_add_stmt (body, g);
5828 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5832 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5833 CTX is the enclosing OMP context for the current statement. */
5835 static void
5836 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5838 tree block, control;
5839 gimple_stmt_iterator tgsi;
5840 gomp_sections *stmt;
5841 gimple *t;
5842 gbind *new_stmt, *bind;
5843 gimple_seq ilist, dlist, olist, new_body;
5845 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5847 push_gimplify_context ();
5849 dlist = NULL;
5850 ilist = NULL;
5851 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5852 &ilist, &dlist, ctx, NULL);
5854 new_body = gimple_omp_body (stmt);
5855 gimple_omp_set_body (stmt, NULL);
5856 tgsi = gsi_start (new_body);
5857 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5859 omp_context *sctx;
5860 gimple *sec_start;
5862 sec_start = gsi_stmt (tgsi);
5863 sctx = maybe_lookup_ctx (sec_start);
5864 gcc_assert (sctx);
5866 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5867 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5868 GSI_CONTINUE_LINKING);
5869 gimple_omp_set_body (sec_start, NULL);
5871 if (gsi_one_before_end_p (tgsi))
5873 gimple_seq l = NULL;
5874 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5875 &l, ctx);
5876 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5877 gimple_omp_section_set_last (sec_start);
5880 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5881 GSI_CONTINUE_LINKING);
5884 block = make_node (BLOCK);
5885 bind = gimple_build_bind (NULL, new_body, block);
5887 olist = NULL;
5888 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5890 block = make_node (BLOCK);
5891 new_stmt = gimple_build_bind (NULL, NULL, block);
5892 gsi_replace (gsi_p, new_stmt, true);
5894 pop_gimplify_context (new_stmt);
5895 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5896 BLOCK_VARS (block) = gimple_bind_vars (bind);
5897 if (BLOCK_VARS (block))
5898 TREE_USED (block) = 1;
5900 new_body = NULL;
5901 gimple_seq_add_seq (&new_body, ilist);
5902 gimple_seq_add_stmt (&new_body, stmt);
5903 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5904 gimple_seq_add_stmt (&new_body, bind);
5906 control = create_tmp_var (unsigned_type_node, ".section");
5907 t = gimple_build_omp_continue (control, control);
5908 gimple_omp_sections_set_control (stmt, control);
5909 gimple_seq_add_stmt (&new_body, t);
5911 gimple_seq_add_seq (&new_body, olist);
5912 if (ctx->cancellable)
5913 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5914 gimple_seq_add_seq (&new_body, dlist);
5916 new_body = maybe_catch_exception (new_body);
5918 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5919 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5920 t = gimple_build_omp_return (nowait);
5921 gimple_seq_add_stmt (&new_body, t);
5922 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5924 gimple_bind_set_body (new_stmt, new_body);
5928 /* A subroutine of lower_omp_single. Expand the simple form of
5929 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5931 if (GOMP_single_start ())
5932 BODY;
5933 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5935 FIXME. It may be better to delay expanding the logic of this until
5936 pass_expand_omp. The expanded logic may make the job more difficult
5937 to a synchronization analysis pass. */
5939 static void
5940 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5942 location_t loc = gimple_location (single_stmt);
5943 tree tlabel = create_artificial_label (loc);
5944 tree flabel = create_artificial_label (loc);
5945 gimple *call, *cond;
5946 tree lhs, decl;
5948 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5949 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5950 call = gimple_build_call (decl, 0);
5951 gimple_call_set_lhs (call, lhs);
5952 gimple_seq_add_stmt (pre_p, call);
5954 cond = gimple_build_cond (EQ_EXPR, lhs,
5955 fold_convert_loc (loc, TREE_TYPE (lhs),
5956 boolean_true_node),
5957 tlabel, flabel);
5958 gimple_seq_add_stmt (pre_p, cond);
5959 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5960 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5961 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5965 /* A subroutine of lower_omp_single. Expand the simple form of
5966 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5968 #pragma omp single copyprivate (a, b, c)
5970 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5973 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5975 BODY;
5976 copyout.a = a;
5977 copyout.b = b;
5978 copyout.c = c;
5979 GOMP_single_copy_end (&copyout);
5981 else
5983 a = copyout_p->a;
5984 b = copyout_p->b;
5985 c = copyout_p->c;
5987 GOMP_barrier ();
5990 FIXME. It may be better to delay expanding the logic of this until
5991 pass_expand_omp. The expanded logic may make the job more difficult
5992 to a synchronization analysis pass. */
5994 static void
5995 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5996 omp_context *ctx)
5998 tree ptr_type, t, l0, l1, l2, bfn_decl;
5999 gimple_seq copyin_seq;
6000 location_t loc = gimple_location (single_stmt);
6002 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6004 ptr_type = build_pointer_type (ctx->record_type);
6005 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6007 l0 = create_artificial_label (loc);
6008 l1 = create_artificial_label (loc);
6009 l2 = create_artificial_label (loc);
6011 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6012 t = build_call_expr_loc (loc, bfn_decl, 0);
6013 t = fold_convert_loc (loc, ptr_type, t);
6014 gimplify_assign (ctx->receiver_decl, t, pre_p);
6016 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6017 build_int_cst (ptr_type, 0));
6018 t = build3 (COND_EXPR, void_type_node, t,
6019 build_and_jump (&l0), build_and_jump (&l1));
6020 gimplify_and_add (t, pre_p);
6022 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6024 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6026 copyin_seq = NULL;
6027 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6028 &copyin_seq, ctx);
6030 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6031 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6032 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6033 gimplify_and_add (t, pre_p);
6035 t = build_and_jump (&l2);
6036 gimplify_and_add (t, pre_p);
6038 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6040 gimple_seq_add_seq (pre_p, copyin_seq);
6042 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6046 /* Expand code for an OpenMP single directive. */
6048 static void
6049 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6051 tree block;
6052 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6053 gbind *bind;
6054 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6056 push_gimplify_context ();
6058 block = make_node (BLOCK);
6059 bind = gimple_build_bind (NULL, NULL, block);
6060 gsi_replace (gsi_p, bind, true);
6061 bind_body = NULL;
6062 dlist = NULL;
6063 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6064 &bind_body, &dlist, ctx, NULL);
6065 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6067 gimple_seq_add_stmt (&bind_body, single_stmt);
6069 if (ctx->record_type)
6070 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6071 else
6072 lower_omp_single_simple (single_stmt, &bind_body);
6074 gimple_omp_set_body (single_stmt, NULL);
6076 gimple_seq_add_seq (&bind_body, dlist);
6078 bind_body = maybe_catch_exception (bind_body);
6080 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6081 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6082 gimple *g = gimple_build_omp_return (nowait);
6083 gimple_seq_add_stmt (&bind_body_tail, g);
6084 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6085 if (ctx->record_type)
6087 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6088 tree clobber = build_constructor (ctx->record_type, NULL);
6089 TREE_THIS_VOLATILE (clobber) = 1;
6090 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6091 clobber), GSI_SAME_STMT);
6093 gimple_seq_add_seq (&bind_body, bind_body_tail);
6094 gimple_bind_set_body (bind, bind_body);
6096 pop_gimplify_context (bind);
6098 gimple_bind_append_vars (bind, ctx->block_vars);
6099 BLOCK_VARS (block) = ctx->block_vars;
6100 if (BLOCK_VARS (block))
6101 TREE_USED (block) = 1;
6105 /* Expand code for an OpenMP master directive. */
6107 static void
6108 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6110 tree block, lab = NULL, x, bfn_decl;
6111 gimple *stmt = gsi_stmt (*gsi_p);
6112 gbind *bind;
6113 location_t loc = gimple_location (stmt);
6114 gimple_seq tseq;
6116 push_gimplify_context ();
6118 block = make_node (BLOCK);
6119 bind = gimple_build_bind (NULL, NULL, block);
6120 gsi_replace (gsi_p, bind, true);
6121 gimple_bind_add_stmt (bind, stmt);
6123 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6124 x = build_call_expr_loc (loc, bfn_decl, 0);
6125 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6126 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6127 tseq = NULL;
6128 gimplify_and_add (x, &tseq);
6129 gimple_bind_add_seq (bind, tseq);
6131 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6132 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6133 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6134 gimple_omp_set_body (stmt, NULL);
6136 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6138 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6140 pop_gimplify_context (bind);
6142 gimple_bind_append_vars (bind, ctx->block_vars);
6143 BLOCK_VARS (block) = ctx->block_vars;
6147 /* Expand code for an OpenMP taskgroup directive. */
6149 static void
6150 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6152 gimple *stmt = gsi_stmt (*gsi_p);
6153 gcall *x;
6154 gbind *bind;
6155 tree block = make_node (BLOCK);
6157 bind = gimple_build_bind (NULL, NULL, block);
6158 gsi_replace (gsi_p, bind, true);
6159 gimple_bind_add_stmt (bind, stmt);
6161 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6163 gimple_bind_add_stmt (bind, x);
6165 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6166 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6167 gimple_omp_set_body (stmt, NULL);
6169 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6171 gimple_bind_append_vars (bind, ctx->block_vars);
6172 BLOCK_VARS (block) = ctx->block_vars;
6176 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6178 static void
6179 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6180 omp_context *ctx)
6182 struct omp_for_data fd;
6183 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6184 return;
6186 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6187 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6188 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6189 if (!fd.ordered)
6190 return;
6192 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6193 tree c = gimple_omp_ordered_clauses (ord_stmt);
6194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6195 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6197 /* Merge depend clauses from multiple adjacent
6198 #pragma omp ordered depend(sink:...) constructs
6199 into one #pragma omp ordered depend(sink:...), so that
6200 we can optimize them together. */
6201 gimple_stmt_iterator gsi = *gsi_p;
6202 gsi_next (&gsi);
6203 while (!gsi_end_p (gsi))
6205 gimple *stmt = gsi_stmt (gsi);
6206 if (is_gimple_debug (stmt)
6207 || gimple_code (stmt) == GIMPLE_NOP)
6209 gsi_next (&gsi);
6210 continue;
6212 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6213 break;
6214 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6215 c = gimple_omp_ordered_clauses (ord_stmt2);
6216 if (c == NULL_TREE
6217 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6218 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6219 break;
6220 while (*list_p)
6221 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6222 *list_p = c;
6223 gsi_remove (&gsi, true);
6227 /* Canonicalize sink dependence clauses into one folded clause if
6228 possible.
6230 The basic algorithm is to create a sink vector whose first
6231 element is the GCD of all the first elements, and whose remaining
6232 elements are the minimum of the subsequent columns.
6234 We ignore dependence vectors whose first element is zero because
6235 such dependencies are known to be executed by the same thread.
6237 We take into account the direction of the loop, so a minimum
6238 becomes a maximum if the loop is iterating forwards. We also
6239 ignore sink clauses where the loop direction is unknown, or where
6240 the offsets are clearly invalid because they are not a multiple
6241 of the loop increment.
6243 For example:
6245 #pragma omp for ordered(2)
6246 for (i=0; i < N; ++i)
6247 for (j=0; j < M; ++j)
6249 #pragma omp ordered \
6250 depend(sink:i-8,j-2) \
6251 depend(sink:i,j-1) \ // Completely ignored because i+0.
6252 depend(sink:i-4,j-3) \
6253 depend(sink:i-6,j-4)
6254 #pragma omp ordered depend(source)
6257 Folded clause is:
6259 depend(sink:-gcd(8,4,6),-min(2,3,4))
6260 -or-
6261 depend(sink:-2,-2)
6264 /* FIXME: Computing GCD's where the first element is zero is
6265 non-trivial in the presence of collapsed loops. Do this later. */
6266 if (fd.collapse > 1)
6267 return;
6269 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6271 /* wide_int is not a POD so it must be default-constructed. */
6272 for (unsigned i = 0; i != 2 * len - 1; ++i)
6273 new (static_cast<void*>(folded_deps + i)) wide_int ();
6275 tree folded_dep = NULL_TREE;
6276 /* TRUE if the first dimension's offset is negative. */
6277 bool neg_offset_p = false;
6279 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6280 unsigned int i;
6281 while ((c = *list_p) != NULL)
6283 bool remove = false;
6285 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6286 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6287 goto next_ordered_clause;
6289 tree vec;
6290 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6291 vec && TREE_CODE (vec) == TREE_LIST;
6292 vec = TREE_CHAIN (vec), ++i)
6294 gcc_assert (i < len);
6296 /* omp_extract_for_data has canonicalized the condition. */
6297 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6298 || fd.loops[i].cond_code == GT_EXPR);
6299 bool forward = fd.loops[i].cond_code == LT_EXPR;
6300 bool maybe_lexically_later = true;
6302 /* While the committee makes up its mind, bail if we have any
6303 non-constant steps. */
6304 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6305 goto lower_omp_ordered_ret;
6307 tree itype = TREE_TYPE (TREE_VALUE (vec));
6308 if (POINTER_TYPE_P (itype))
6309 itype = sizetype;
6310 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6311 TYPE_PRECISION (itype),
6312 TYPE_SIGN (itype));
6314 /* Ignore invalid offsets that are not multiples of the step. */
6315 if (!wi::multiple_of_p (wi::abs (offset),
6316 wi::abs (wi::to_wide (fd.loops[i].step)),
6317 UNSIGNED))
6319 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6320 "ignoring sink clause with offset that is not "
6321 "a multiple of the loop step");
6322 remove = true;
6323 goto next_ordered_clause;
6326 /* Calculate the first dimension. The first dimension of
6327 the folded dependency vector is the GCD of the first
6328 elements, while ignoring any first elements whose offset
6329 is 0. */
6330 if (i == 0)
6332 /* Ignore dependence vectors whose first dimension is 0. */
6333 if (offset == 0)
6335 remove = true;
6336 goto next_ordered_clause;
6338 else
6340 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6342 error_at (OMP_CLAUSE_LOCATION (c),
6343 "first offset must be in opposite direction "
6344 "of loop iterations");
6345 goto lower_omp_ordered_ret;
6347 if (forward)
6348 offset = -offset;
6349 neg_offset_p = forward;
6350 /* Initialize the first time around. */
6351 if (folded_dep == NULL_TREE)
6353 folded_dep = c;
6354 folded_deps[0] = offset;
6356 else
6357 folded_deps[0] = wi::gcd (folded_deps[0],
6358 offset, UNSIGNED);
6361 /* Calculate minimum for the remaining dimensions. */
6362 else
6364 folded_deps[len + i - 1] = offset;
6365 if (folded_dep == c)
6366 folded_deps[i] = offset;
6367 else if (maybe_lexically_later
6368 && !wi::eq_p (folded_deps[i], offset))
6370 if (forward ^ wi::gts_p (folded_deps[i], offset))
6372 unsigned int j;
6373 folded_dep = c;
6374 for (j = 1; j <= i; j++)
6375 folded_deps[j] = folded_deps[len + j - 1];
6377 else
6378 maybe_lexically_later = false;
6382 gcc_assert (i == len);
6384 remove = true;
6386 next_ordered_clause:
6387 if (remove)
6388 *list_p = OMP_CLAUSE_CHAIN (c);
6389 else
6390 list_p = &OMP_CLAUSE_CHAIN (c);
6393 if (folded_dep)
6395 if (neg_offset_p)
6396 folded_deps[0] = -folded_deps[0];
6398 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6399 if (POINTER_TYPE_P (itype))
6400 itype = sizetype;
6402 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6403 = wide_int_to_tree (itype, folded_deps[0]);
6404 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6405 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6408 lower_omp_ordered_ret:
6410 /* Ordered without clauses is #pragma omp threads, while we want
6411 a nop instead if we remove all clauses. */
6412 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6413 gsi_replace (gsi_p, gimple_build_nop (), true);
6417 /* Expand code for an OpenMP ordered directive. */
6419 static void
6420 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6422 tree block;
6423 gimple *stmt = gsi_stmt (*gsi_p), *g;
6424 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6425 gcall *x;
6426 gbind *bind;
6427 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6428 OMP_CLAUSE_SIMD);
6429 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6430 loop. */
6431 bool maybe_simt
6432 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6433 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6434 OMP_CLAUSE_THREADS);
6436 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6437 OMP_CLAUSE_DEPEND))
6439 /* FIXME: This is needs to be moved to the expansion to verify various
6440 conditions only testable on cfg with dominators computed, and also
6441 all the depend clauses to be merged still might need to be available
6442 for the runtime checks. */
6443 if (0)
6444 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6445 return;
6448 push_gimplify_context ();
6450 block = make_node (BLOCK);
6451 bind = gimple_build_bind (NULL, NULL, block);
6452 gsi_replace (gsi_p, bind, true);
6453 gimple_bind_add_stmt (bind, stmt);
6455 if (simd)
6457 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6458 build_int_cst (NULL_TREE, threads));
6459 cfun->has_simduid_loops = true;
6461 else
6462 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6464 gimple_bind_add_stmt (bind, x);
6466 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6467 if (maybe_simt)
6469 counter = create_tmp_var (integer_type_node);
6470 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6471 gimple_call_set_lhs (g, counter);
6472 gimple_bind_add_stmt (bind, g);
6474 body = create_artificial_label (UNKNOWN_LOCATION);
6475 test = create_artificial_label (UNKNOWN_LOCATION);
6476 gimple_bind_add_stmt (bind, gimple_build_label (body));
6478 tree simt_pred = create_tmp_var (integer_type_node);
6479 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6480 gimple_call_set_lhs (g, simt_pred);
6481 gimple_bind_add_stmt (bind, g);
6483 tree t = create_artificial_label (UNKNOWN_LOCATION);
6484 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6485 gimple_bind_add_stmt (bind, g);
6487 gimple_bind_add_stmt (bind, gimple_build_label (t));
6489 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6490 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6491 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6492 gimple_omp_set_body (stmt, NULL);
6494 if (maybe_simt)
6496 gimple_bind_add_stmt (bind, gimple_build_label (test));
6497 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6498 gimple_bind_add_stmt (bind, g);
6500 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6501 tree nonneg = create_tmp_var (integer_type_node);
6502 gimple_seq tseq = NULL;
6503 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6504 gimple_bind_add_seq (bind, tseq);
6506 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6507 gimple_call_set_lhs (g, nonneg);
6508 gimple_bind_add_stmt (bind, g);
6510 tree end = create_artificial_label (UNKNOWN_LOCATION);
6511 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6512 gimple_bind_add_stmt (bind, g);
6514 gimple_bind_add_stmt (bind, gimple_build_label (end));
6516 if (simd)
6517 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6518 build_int_cst (NULL_TREE, threads));
6519 else
6520 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6522 gimple_bind_add_stmt (bind, x);
6524 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6526 pop_gimplify_context (bind);
6528 gimple_bind_append_vars (bind, ctx->block_vars);
6529 BLOCK_VARS (block) = gimple_bind_vars (bind);
6533 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6534 substitution of a couple of function calls. But in the NAMED case,
6535 requires that languages coordinate a symbol name. It is therefore
6536 best put here in common code. */
6538 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6540 static void
6541 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6543 tree block;
6544 tree name, lock, unlock;
6545 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6546 gbind *bind;
6547 location_t loc = gimple_location (stmt);
6548 gimple_seq tbody;
6550 name = gimple_omp_critical_name (stmt);
6551 if (name)
6553 tree decl;
6555 if (!critical_name_mutexes)
6556 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6558 tree *n = critical_name_mutexes->get (name);
6559 if (n == NULL)
6561 char *new_str;
6563 decl = create_tmp_var_raw (ptr_type_node);
6565 new_str = ACONCAT ((".gomp_critical_user_",
6566 IDENTIFIER_POINTER (name), NULL));
6567 DECL_NAME (decl) = get_identifier (new_str);
6568 TREE_PUBLIC (decl) = 1;
6569 TREE_STATIC (decl) = 1;
6570 DECL_COMMON (decl) = 1;
6571 DECL_ARTIFICIAL (decl) = 1;
6572 DECL_IGNORED_P (decl) = 1;
6574 varpool_node::finalize_decl (decl);
6576 critical_name_mutexes->put (name, decl);
6578 else
6579 decl = *n;
6581 /* If '#pragma omp critical' is inside offloaded region or
6582 inside function marked as offloadable, the symbol must be
6583 marked as offloadable too. */
6584 omp_context *octx;
6585 if (cgraph_node::get (current_function_decl)->offloadable)
6586 varpool_node::get_create (decl)->offloadable = 1;
6587 else
6588 for (octx = ctx->outer; octx; octx = octx->outer)
6589 if (is_gimple_omp_offloaded (octx->stmt))
6591 varpool_node::get_create (decl)->offloadable = 1;
6592 break;
6595 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6596 lock = build_call_expr_loc (loc, lock, 1,
6597 build_fold_addr_expr_loc (loc, decl));
6599 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6600 unlock = build_call_expr_loc (loc, unlock, 1,
6601 build_fold_addr_expr_loc (loc, decl));
6603 else
6605 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6606 lock = build_call_expr_loc (loc, lock, 0);
6608 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6609 unlock = build_call_expr_loc (loc, unlock, 0);
6612 push_gimplify_context ();
6614 block = make_node (BLOCK);
6615 bind = gimple_build_bind (NULL, NULL, block);
6616 gsi_replace (gsi_p, bind, true);
6617 gimple_bind_add_stmt (bind, stmt);
6619 tbody = gimple_bind_body (bind);
6620 gimplify_and_add (lock, &tbody);
6621 gimple_bind_set_body (bind, tbody);
6623 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6624 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6625 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6626 gimple_omp_set_body (stmt, NULL);
6628 tbody = gimple_bind_body (bind);
6629 gimplify_and_add (unlock, &tbody);
6630 gimple_bind_set_body (bind, tbody);
6632 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6634 pop_gimplify_context (bind);
6635 gimple_bind_append_vars (bind, ctx->block_vars);
6636 BLOCK_VARS (block) = gimple_bind_vars (bind);
6639 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6640 for a lastprivate clause. Given a loop control predicate of (V
6641 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6642 is appended to *DLIST, iterator initialization is appended to
6643 *BODY_P. */
6645 static void
6646 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6647 gimple_seq *dlist, struct omp_context *ctx)
6649 tree clauses, cond, vinit;
6650 enum tree_code cond_code;
6651 gimple_seq stmts;
6653 cond_code = fd->loop.cond_code;
6654 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6656 /* When possible, use a strict equality expression. This can let VRP
6657 type optimizations deduce the value and remove a copy. */
6658 if (tree_fits_shwi_p (fd->loop.step))
6660 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6661 if (step == 1 || step == -1)
6662 cond_code = EQ_EXPR;
6665 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6666 || gimple_omp_for_grid_phony (fd->for_stmt))
6667 cond = omp_grid_lastprivate_predicate (fd);
6668 else
6670 tree n2 = fd->loop.n2;
6671 if (fd->collapse > 1
6672 && TREE_CODE (n2) != INTEGER_CST
6673 && gimple_omp_for_combined_into_p (fd->for_stmt))
6675 struct omp_context *taskreg_ctx = NULL;
6676 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6678 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6679 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6680 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6682 if (gimple_omp_for_combined_into_p (gfor))
6684 gcc_assert (ctx->outer->outer
6685 && is_parallel_ctx (ctx->outer->outer));
6686 taskreg_ctx = ctx->outer->outer;
6688 else
6690 struct omp_for_data outer_fd;
6691 omp_extract_for_data (gfor, &outer_fd, NULL);
6692 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6695 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6696 taskreg_ctx = ctx->outer->outer;
6698 else if (is_taskreg_ctx (ctx->outer))
6699 taskreg_ctx = ctx->outer;
6700 if (taskreg_ctx)
6702 int i;
6703 tree taskreg_clauses
6704 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6705 tree innerc = omp_find_clause (taskreg_clauses,
6706 OMP_CLAUSE__LOOPTEMP_);
6707 gcc_assert (innerc);
6708 for (i = 0; i < fd->collapse; i++)
6710 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6711 OMP_CLAUSE__LOOPTEMP_);
6712 gcc_assert (innerc);
6714 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6715 OMP_CLAUSE__LOOPTEMP_);
6716 if (innerc)
6717 n2 = fold_convert (TREE_TYPE (n2),
6718 lookup_decl (OMP_CLAUSE_DECL (innerc),
6719 taskreg_ctx));
6722 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6725 clauses = gimple_omp_for_clauses (fd->for_stmt);
6726 stmts = NULL;
6727 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6728 if (!gimple_seq_empty_p (stmts))
6730 gimple_seq_add_seq (&stmts, *dlist);
6731 *dlist = stmts;
6733 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6734 vinit = fd->loop.n1;
6735 if (cond_code == EQ_EXPR
6736 && tree_fits_shwi_p (fd->loop.n2)
6737 && ! integer_zerop (fd->loop.n2))
6738 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6739 else
6740 vinit = unshare_expr (vinit);
6742 /* Initialize the iterator variable, so that threads that don't execute
6743 any iterations don't execute the lastprivate clauses by accident. */
6744 gimplify_assign (fd->loop.v, vinit, body_p);
6749 /* Lower code for an OMP loop directive. */
6751 static void
6752 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6754 tree *rhs_p, block;
6755 struct omp_for_data fd, *fdp = NULL;
6756 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6757 gbind *new_stmt;
6758 gimple_seq omp_for_body, body, dlist;
6759 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6760 size_t i;
6762 push_gimplify_context ();
6764 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6766 block = make_node (BLOCK);
6767 new_stmt = gimple_build_bind (NULL, NULL, block);
6768 /* Replace at gsi right away, so that 'stmt' is no member
6769 of a sequence anymore as we're going to add to a different
6770 one below. */
6771 gsi_replace (gsi_p, new_stmt, true);
6773 /* Move declaration of temporaries in the loop body before we make
6774 it go away. */
6775 omp_for_body = gimple_omp_body (stmt);
6776 if (!gimple_seq_empty_p (omp_for_body)
6777 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6779 gbind *inner_bind
6780 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6781 tree vars = gimple_bind_vars (inner_bind);
6782 gimple_bind_append_vars (new_stmt, vars);
6783 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6784 keep them on the inner_bind and it's block. */
6785 gimple_bind_set_vars (inner_bind, NULL_TREE);
6786 if (gimple_bind_block (inner_bind))
6787 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6790 if (gimple_omp_for_combined_into_p (stmt))
6792 omp_extract_for_data (stmt, &fd, NULL);
6793 fdp = &fd;
6795 /* We need two temporaries with fd.loop.v type (istart/iend)
6796 and then (fd.collapse - 1) temporaries with the same
6797 type for count2 ... countN-1 vars if not constant. */
6798 size_t count = 2;
6799 tree type = fd.iter_type;
6800 if (fd.collapse > 1
6801 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6802 count += fd.collapse - 1;
6803 bool taskreg_for
6804 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6805 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6806 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6807 tree simtc = NULL;
6808 tree clauses = *pc;
6809 if (taskreg_for)
6810 outerc
6811 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6812 OMP_CLAUSE__LOOPTEMP_);
6813 if (ctx->simt_stmt)
6814 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6815 OMP_CLAUSE__LOOPTEMP_);
6816 for (i = 0; i < count; i++)
6818 tree temp;
6819 if (taskreg_for)
6821 gcc_assert (outerc);
6822 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6823 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6824 OMP_CLAUSE__LOOPTEMP_);
6826 else
6828 /* If there are 2 adjacent SIMD stmts, one with _simt_
6829 clause, another without, make sure they have the same
6830 decls in _looptemp_ clauses, because the outer stmt
6831 they are combined into will look up just one inner_stmt. */
6832 if (ctx->simt_stmt)
6833 temp = OMP_CLAUSE_DECL (simtc);
6834 else
6835 temp = create_tmp_var (type);
6836 insert_decl_map (&ctx->outer->cb, temp, temp);
6838 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6839 OMP_CLAUSE_DECL (*pc) = temp;
6840 pc = &OMP_CLAUSE_CHAIN (*pc);
6841 if (ctx->simt_stmt)
6842 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6843 OMP_CLAUSE__LOOPTEMP_);
6845 *pc = clauses;
6848 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6849 dlist = NULL;
6850 body = NULL;
6851 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6852 fdp);
6853 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6855 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6857 /* Lower the header expressions. At this point, we can assume that
6858 the header is of the form:
6860 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6862 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6863 using the .omp_data_s mapping, if needed. */
6864 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6866 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6867 if (!is_gimple_min_invariant (*rhs_p))
6868 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6869 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6870 recompute_tree_invariant_for_addr_expr (*rhs_p);
6872 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6873 if (!is_gimple_min_invariant (*rhs_p))
6874 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6875 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6876 recompute_tree_invariant_for_addr_expr (*rhs_p);
6878 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6879 if (!is_gimple_min_invariant (*rhs_p))
6880 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6883 /* Once lowered, extract the bounds and clauses. */
6884 omp_extract_for_data (stmt, &fd, NULL);
6886 if (is_gimple_omp_oacc (ctx->stmt)
6887 && !ctx_in_oacc_kernels_region (ctx))
6888 lower_oacc_head_tail (gimple_location (stmt),
6889 gimple_omp_for_clauses (stmt),
6890 &oacc_head, &oacc_tail, ctx);
6892 /* Add OpenACC partitioning and reduction markers just before the loop. */
6893 if (oacc_head)
6894 gimple_seq_add_seq (&body, oacc_head);
6896 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6898 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6899 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6901 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6903 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6904 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6905 OMP_CLAUSE_LINEAR_STEP (c)
6906 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6907 ctx);
6910 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6911 && gimple_omp_for_grid_phony (stmt));
6912 if (!phony_loop)
6913 gimple_seq_add_stmt (&body, stmt);
6914 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6916 if (!phony_loop)
6917 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6918 fd.loop.v));
6920 /* After the loop, add exit clauses. */
6921 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6923 if (ctx->cancellable)
6924 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6926 gimple_seq_add_seq (&body, dlist);
6928 body = maybe_catch_exception (body);
6930 if (!phony_loop)
6932 /* Region exit marker goes at the end of the loop body. */
6933 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6934 maybe_add_implicit_barrier_cancel (ctx, &body);
6937 /* Add OpenACC joining and reduction markers just after the loop. */
6938 if (oacc_tail)
6939 gimple_seq_add_seq (&body, oacc_tail);
6941 pop_gimplify_context (new_stmt);
6943 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6944 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6945 if (BLOCK_VARS (block))
6946 TREE_USED (block) = 1;
6948 gimple_bind_set_body (new_stmt, body);
6949 gimple_omp_set_body (stmt, NULL);
6950 gimple_omp_for_set_pre_body (stmt, NULL);
6953 /* Callback for walk_stmts. Check if the current statement only contains
6954 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6956 static tree
6957 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6958 bool *handled_ops_p,
6959 struct walk_stmt_info *wi)
6961 int *info = (int *) wi->info;
6962 gimple *stmt = gsi_stmt (*gsi_p);
6964 *handled_ops_p = true;
6965 switch (gimple_code (stmt))
6967 WALK_SUBSTMTS;
6969 case GIMPLE_DEBUG:
6970 break;
6971 case GIMPLE_OMP_FOR:
6972 case GIMPLE_OMP_SECTIONS:
6973 *info = *info == 0 ? 1 : -1;
6974 break;
6975 default:
6976 *info = -1;
6977 break;
6979 return NULL;
6982 struct omp_taskcopy_context
6984 /* This field must be at the beginning, as we do "inheritance": Some
6985 callback functions for tree-inline.c (e.g., omp_copy_decl)
6986 receive a copy_body_data pointer that is up-casted to an
6987 omp_context pointer. */
6988 copy_body_data cb;
6989 omp_context *ctx;
6992 static tree
6993 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6995 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6997 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6998 return create_tmp_var (TREE_TYPE (var));
7000 return var;
7003 static tree
7004 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7006 tree name, new_fields = NULL, type, f;
7008 type = lang_hooks.types.make_type (RECORD_TYPE);
7009 name = DECL_NAME (TYPE_NAME (orig_type));
7010 name = build_decl (gimple_location (tcctx->ctx->stmt),
7011 TYPE_DECL, name, type);
7012 TYPE_NAME (type) = name;
7014 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7016 tree new_f = copy_node (f);
7017 DECL_CONTEXT (new_f) = type;
7018 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7019 TREE_CHAIN (new_f) = new_fields;
7020 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7021 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7022 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7023 &tcctx->cb, NULL);
7024 new_fields = new_f;
7025 tcctx->cb.decl_map->put (f, new_f);
7027 TYPE_FIELDS (type) = nreverse (new_fields);
7028 layout_type (type);
7029 return type;
7032 /* Create task copyfn. */
7034 static void
7035 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7037 struct function *child_cfun;
7038 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7039 tree record_type, srecord_type, bind, list;
7040 bool record_needs_remap = false, srecord_needs_remap = false;
7041 splay_tree_node n;
7042 struct omp_taskcopy_context tcctx;
7043 location_t loc = gimple_location (task_stmt);
7045 child_fn = gimple_omp_task_copy_fn (task_stmt);
7046 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7047 gcc_assert (child_cfun->cfg == NULL);
7048 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7050 /* Reset DECL_CONTEXT on function arguments. */
7051 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7052 DECL_CONTEXT (t) = child_fn;
7054 /* Populate the function. */
7055 push_gimplify_context ();
7056 push_cfun (child_cfun);
7058 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7059 TREE_SIDE_EFFECTS (bind) = 1;
7060 list = NULL;
7061 DECL_SAVED_TREE (child_fn) = bind;
7062 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7064 /* Remap src and dst argument types if needed. */
7065 record_type = ctx->record_type;
7066 srecord_type = ctx->srecord_type;
7067 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7068 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7070 record_needs_remap = true;
7071 break;
7073 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7074 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7076 srecord_needs_remap = true;
7077 break;
7080 if (record_needs_remap || srecord_needs_remap)
7082 memset (&tcctx, '\0', sizeof (tcctx));
7083 tcctx.cb.src_fn = ctx->cb.src_fn;
7084 tcctx.cb.dst_fn = child_fn;
7085 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7086 gcc_checking_assert (tcctx.cb.src_node);
7087 tcctx.cb.dst_node = tcctx.cb.src_node;
7088 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7089 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7090 tcctx.cb.eh_lp_nr = 0;
7091 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7092 tcctx.cb.decl_map = new hash_map<tree, tree>;
7093 tcctx.ctx = ctx;
7095 if (record_needs_remap)
7096 record_type = task_copyfn_remap_type (&tcctx, record_type);
7097 if (srecord_needs_remap)
7098 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7100 else
7101 tcctx.cb.decl_map = NULL;
7103 arg = DECL_ARGUMENTS (child_fn);
7104 TREE_TYPE (arg) = build_pointer_type (record_type);
7105 sarg = DECL_CHAIN (arg);
7106 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7108 /* First pass: initialize temporaries used in record_type and srecord_type
7109 sizes and field offsets. */
7110 if (tcctx.cb.decl_map)
7111 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7114 tree *p;
7116 decl = OMP_CLAUSE_DECL (c);
7117 p = tcctx.cb.decl_map->get (decl);
7118 if (p == NULL)
7119 continue;
7120 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7121 sf = (tree) n->value;
7122 sf = *tcctx.cb.decl_map->get (sf);
7123 src = build_simple_mem_ref_loc (loc, sarg);
7124 src = omp_build_component_ref (src, sf);
7125 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7126 append_to_statement_list (t, &list);
7129 /* Second pass: copy shared var pointers and copy construct non-VLA
7130 firstprivate vars. */
7131 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7132 switch (OMP_CLAUSE_CODE (c))
7134 splay_tree_key key;
7135 case OMP_CLAUSE_SHARED:
7136 decl = OMP_CLAUSE_DECL (c);
7137 key = (splay_tree_key) decl;
7138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7139 key = (splay_tree_key) &DECL_UID (decl);
7140 n = splay_tree_lookup (ctx->field_map, key);
7141 if (n == NULL)
7142 break;
7143 f = (tree) n->value;
7144 if (tcctx.cb.decl_map)
7145 f = *tcctx.cb.decl_map->get (f);
7146 n = splay_tree_lookup (ctx->sfield_map, key);
7147 sf = (tree) n->value;
7148 if (tcctx.cb.decl_map)
7149 sf = *tcctx.cb.decl_map->get (sf);
7150 src = build_simple_mem_ref_loc (loc, sarg);
7151 src = omp_build_component_ref (src, sf);
7152 dst = build_simple_mem_ref_loc (loc, arg);
7153 dst = omp_build_component_ref (dst, f);
7154 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7155 append_to_statement_list (t, &list);
7156 break;
7157 case OMP_CLAUSE_FIRSTPRIVATE:
7158 decl = OMP_CLAUSE_DECL (c);
7159 if (is_variable_sized (decl))
7160 break;
7161 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7162 if (n == NULL)
7163 break;
7164 f = (tree) n->value;
7165 if (tcctx.cb.decl_map)
7166 f = *tcctx.cb.decl_map->get (f);
7167 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7168 if (n != NULL)
7170 sf = (tree) n->value;
7171 if (tcctx.cb.decl_map)
7172 sf = *tcctx.cb.decl_map->get (sf);
7173 src = build_simple_mem_ref_loc (loc, sarg);
7174 src = omp_build_component_ref (src, sf);
7175 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7176 src = build_simple_mem_ref_loc (loc, src);
7178 else
7179 src = decl;
7180 dst = build_simple_mem_ref_loc (loc, arg);
7181 dst = omp_build_component_ref (dst, f);
7182 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7183 append_to_statement_list (t, &list);
7184 break;
7185 case OMP_CLAUSE_PRIVATE:
7186 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7187 break;
7188 decl = OMP_CLAUSE_DECL (c);
7189 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7190 f = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 f = *tcctx.cb.decl_map->get (f);
7193 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7194 if (n != NULL)
7196 sf = (tree) n->value;
7197 if (tcctx.cb.decl_map)
7198 sf = *tcctx.cb.decl_map->get (sf);
7199 src = build_simple_mem_ref_loc (loc, sarg);
7200 src = omp_build_component_ref (src, sf);
7201 if (use_pointer_for_field (decl, NULL))
7202 src = build_simple_mem_ref_loc (loc, src);
7204 else
7205 src = decl;
7206 dst = build_simple_mem_ref_loc (loc, arg);
7207 dst = omp_build_component_ref (dst, f);
7208 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7209 append_to_statement_list (t, &list);
7210 break;
7211 default:
7212 break;
7215 /* Last pass: handle VLA firstprivates. */
7216 if (tcctx.cb.decl_map)
7217 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7220 tree ind, ptr, df;
7222 decl = OMP_CLAUSE_DECL (c);
7223 if (!is_variable_sized (decl))
7224 continue;
7225 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7226 if (n == NULL)
7227 continue;
7228 f = (tree) n->value;
7229 f = *tcctx.cb.decl_map->get (f);
7230 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7231 ind = DECL_VALUE_EXPR (decl);
7232 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7233 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7234 n = splay_tree_lookup (ctx->sfield_map,
7235 (splay_tree_key) TREE_OPERAND (ind, 0));
7236 sf = (tree) n->value;
7237 sf = *tcctx.cb.decl_map->get (sf);
7238 src = build_simple_mem_ref_loc (loc, sarg);
7239 src = omp_build_component_ref (src, sf);
7240 src = build_simple_mem_ref_loc (loc, src);
7241 dst = build_simple_mem_ref_loc (loc, arg);
7242 dst = omp_build_component_ref (dst, f);
7243 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7244 append_to_statement_list (t, &list);
7245 n = splay_tree_lookup (ctx->field_map,
7246 (splay_tree_key) TREE_OPERAND (ind, 0));
7247 df = (tree) n->value;
7248 df = *tcctx.cb.decl_map->get (df);
7249 ptr = build_simple_mem_ref_loc (loc, arg);
7250 ptr = omp_build_component_ref (ptr, df);
7251 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7252 build_fold_addr_expr_loc (loc, dst));
7253 append_to_statement_list (t, &list);
7256 t = build1 (RETURN_EXPR, void_type_node, NULL);
7257 append_to_statement_list (t, &list);
7259 if (tcctx.cb.decl_map)
7260 delete tcctx.cb.decl_map;
7261 pop_gimplify_context (NULL);
7262 BIND_EXPR_BODY (bind) = list;
7263 pop_cfun ();
7266 static void
7267 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7269 tree c, clauses;
7270 gimple *g;
7271 size_t n_in = 0, n_out = 0, idx = 2, i;
7273 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7274 gcc_assert (clauses);
7275 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7277 switch (OMP_CLAUSE_DEPEND_KIND (c))
7279 case OMP_CLAUSE_DEPEND_IN:
7280 n_in++;
7281 break;
7282 case OMP_CLAUSE_DEPEND_OUT:
7283 case OMP_CLAUSE_DEPEND_INOUT:
7284 n_out++;
7285 break;
7286 case OMP_CLAUSE_DEPEND_SOURCE:
7287 case OMP_CLAUSE_DEPEND_SINK:
7288 /* FALLTHRU */
7289 default:
7290 gcc_unreachable ();
7292 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7293 tree array = create_tmp_var (type);
7294 TREE_ADDRESSABLE (array) = 1;
7295 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7296 NULL_TREE);
7297 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7298 gimple_seq_add_stmt (iseq, g);
7299 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7300 NULL_TREE);
7301 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7302 gimple_seq_add_stmt (iseq, g);
7303 for (i = 0; i < 2; i++)
7305 if ((i ? n_in : n_out) == 0)
7306 continue;
7307 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7308 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7309 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7311 tree t = OMP_CLAUSE_DECL (c);
7312 t = fold_convert (ptr_type_node, t);
7313 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7314 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7315 NULL_TREE, NULL_TREE);
7316 g = gimple_build_assign (r, t);
7317 gimple_seq_add_stmt (iseq, g);
7320 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7321 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7322 OMP_CLAUSE_CHAIN (c) = *pclauses;
7323 *pclauses = c;
7324 tree clobber = build_constructor (type, NULL);
7325 TREE_THIS_VOLATILE (clobber) = 1;
7326 g = gimple_build_assign (array, clobber);
7327 gimple_seq_add_stmt (oseq, g);
7330 /* Lower the OpenMP parallel or task directive in the current statement
7331 in GSI_P. CTX holds context information for the directive. */
7333 static void
7334 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7336 tree clauses;
7337 tree child_fn, t;
7338 gimple *stmt = gsi_stmt (*gsi_p);
7339 gbind *par_bind, *bind, *dep_bind = NULL;
7340 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7341 location_t loc = gimple_location (stmt);
7343 clauses = gimple_omp_taskreg_clauses (stmt);
7344 par_bind
7345 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7346 par_body = gimple_bind_body (par_bind);
7347 child_fn = ctx->cb.dst_fn;
7348 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7349 && !gimple_omp_parallel_combined_p (stmt))
7351 struct walk_stmt_info wi;
7352 int ws_num = 0;
7354 memset (&wi, 0, sizeof (wi));
7355 wi.info = &ws_num;
7356 wi.val_only = true;
7357 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7358 if (ws_num == 1)
7359 gimple_omp_parallel_set_combined_p (stmt, true);
7361 gimple_seq dep_ilist = NULL;
7362 gimple_seq dep_olist = NULL;
7363 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7364 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7366 push_gimplify_context ();
7367 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7368 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7369 &dep_ilist, &dep_olist);
7372 if (ctx->srecord_type)
7373 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7375 push_gimplify_context ();
7377 par_olist = NULL;
7378 par_ilist = NULL;
7379 par_rlist = NULL;
7380 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7381 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7382 if (phony_construct && ctx->record_type)
7384 gcc_checking_assert (!ctx->receiver_decl);
7385 ctx->receiver_decl = create_tmp_var
7386 (build_reference_type (ctx->record_type), ".omp_rec");
7388 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7389 lower_omp (&par_body, ctx);
7390 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7391 lower_reduction_clauses (clauses, &par_rlist, ctx);
7393 /* Declare all the variables created by mapping and the variables
7394 declared in the scope of the parallel body. */
7395 record_vars_into (ctx->block_vars, child_fn);
7396 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7398 if (ctx->record_type)
7400 ctx->sender_decl
7401 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7402 : ctx->record_type, ".omp_data_o");
7403 DECL_NAMELESS (ctx->sender_decl) = 1;
7404 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7405 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7408 olist = NULL;
7409 ilist = NULL;
7410 lower_send_clauses (clauses, &ilist, &olist, ctx);
7411 lower_send_shared_vars (&ilist, &olist, ctx);
7413 if (ctx->record_type)
7415 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7416 TREE_THIS_VOLATILE (clobber) = 1;
7417 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7418 clobber));
7421 /* Once all the expansions are done, sequence all the different
7422 fragments inside gimple_omp_body. */
7424 new_body = NULL;
7426 if (ctx->record_type)
7428 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7429 /* fixup_child_record_type might have changed receiver_decl's type. */
7430 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7431 gimple_seq_add_stmt (&new_body,
7432 gimple_build_assign (ctx->receiver_decl, t));
7435 gimple_seq_add_seq (&new_body, par_ilist);
7436 gimple_seq_add_seq (&new_body, par_body);
7437 gimple_seq_add_seq (&new_body, par_rlist);
7438 if (ctx->cancellable)
7439 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7440 gimple_seq_add_seq (&new_body, par_olist);
7441 new_body = maybe_catch_exception (new_body);
7442 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7443 gimple_seq_add_stmt (&new_body,
7444 gimple_build_omp_continue (integer_zero_node,
7445 integer_zero_node));
7446 if (!phony_construct)
7448 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7449 gimple_omp_set_body (stmt, new_body);
7452 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7453 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7454 gimple_bind_add_seq (bind, ilist);
7455 if (!phony_construct)
7456 gimple_bind_add_stmt (bind, stmt);
7457 else
7458 gimple_bind_add_seq (bind, new_body);
7459 gimple_bind_add_seq (bind, olist);
7461 pop_gimplify_context (NULL);
7463 if (dep_bind)
7465 gimple_bind_add_seq (dep_bind, dep_ilist);
7466 gimple_bind_add_stmt (dep_bind, bind);
7467 gimple_bind_add_seq (dep_bind, dep_olist);
7468 pop_gimplify_context (dep_bind);
7472 /* Lower the GIMPLE_OMP_TARGET in the current statement
7473 in GSI_P. CTX holds context information for the directive. */
7475 static void
7476 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7478 tree clauses;
7479 tree child_fn, t, c;
7480 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7481 gbind *tgt_bind, *bind, *dep_bind = NULL;
7482 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7483 location_t loc = gimple_location (stmt);
7484 bool offloaded, data_region;
7485 unsigned int map_cnt = 0;
7487 offloaded = is_gimple_omp_offloaded (stmt);
7488 switch (gimple_omp_target_kind (stmt))
7490 case GF_OMP_TARGET_KIND_REGION:
7491 case GF_OMP_TARGET_KIND_UPDATE:
7492 case GF_OMP_TARGET_KIND_ENTER_DATA:
7493 case GF_OMP_TARGET_KIND_EXIT_DATA:
7494 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7495 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7496 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7497 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7498 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7499 data_region = false;
7500 break;
7501 case GF_OMP_TARGET_KIND_DATA:
7502 case GF_OMP_TARGET_KIND_OACC_DATA:
7503 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7504 data_region = true;
7505 break;
7506 default:
7507 gcc_unreachable ();
7510 clauses = gimple_omp_target_clauses (stmt);
7512 gimple_seq dep_ilist = NULL;
7513 gimple_seq dep_olist = NULL;
7514 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7516 push_gimplify_context ();
7517 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7518 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7519 &dep_ilist, &dep_olist);
7522 tgt_bind = NULL;
7523 tgt_body = NULL;
7524 if (offloaded)
7526 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7527 tgt_body = gimple_bind_body (tgt_bind);
7529 else if (data_region)
7530 tgt_body = gimple_omp_body (stmt);
7531 child_fn = ctx->cb.dst_fn;
7533 push_gimplify_context ();
7534 fplist = NULL;
7536 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7537 switch (OMP_CLAUSE_CODE (c))
7539 tree var, x;
7541 default:
7542 break;
7543 case OMP_CLAUSE_MAP:
7544 #if CHECKING_P
7545 /* First check what we're prepared to handle in the following. */
7546 switch (OMP_CLAUSE_MAP_KIND (c))
7548 case GOMP_MAP_ALLOC:
7549 case GOMP_MAP_TO:
7550 case GOMP_MAP_FROM:
7551 case GOMP_MAP_TOFROM:
7552 case GOMP_MAP_POINTER:
7553 case GOMP_MAP_TO_PSET:
7554 case GOMP_MAP_DELETE:
7555 case GOMP_MAP_RELEASE:
7556 case GOMP_MAP_ALWAYS_TO:
7557 case GOMP_MAP_ALWAYS_FROM:
7558 case GOMP_MAP_ALWAYS_TOFROM:
7559 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7560 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7561 case GOMP_MAP_STRUCT:
7562 case GOMP_MAP_ALWAYS_POINTER:
7563 break;
7564 case GOMP_MAP_FORCE_ALLOC:
7565 case GOMP_MAP_FORCE_TO:
7566 case GOMP_MAP_FORCE_FROM:
7567 case GOMP_MAP_FORCE_TOFROM:
7568 case GOMP_MAP_FORCE_PRESENT:
7569 case GOMP_MAP_FORCE_DEVICEPTR:
7570 case GOMP_MAP_DEVICE_RESIDENT:
7571 case GOMP_MAP_LINK:
7572 gcc_assert (is_gimple_omp_oacc (stmt));
7573 break;
7574 default:
7575 gcc_unreachable ();
7577 #endif
7578 /* FALLTHRU */
7579 case OMP_CLAUSE_TO:
7580 case OMP_CLAUSE_FROM:
7581 oacc_firstprivate:
7582 var = OMP_CLAUSE_DECL (c);
7583 if (!DECL_P (var))
7585 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7586 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7587 && (OMP_CLAUSE_MAP_KIND (c)
7588 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7589 map_cnt++;
7590 continue;
7593 if (DECL_SIZE (var)
7594 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7596 tree var2 = DECL_VALUE_EXPR (var);
7597 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7598 var2 = TREE_OPERAND (var2, 0);
7599 gcc_assert (DECL_P (var2));
7600 var = var2;
7603 if (offloaded
7604 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7605 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7606 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7608 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7610 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7611 && varpool_node::get_create (var)->offloadable)
7612 continue;
7614 tree type = build_pointer_type (TREE_TYPE (var));
7615 tree new_var = lookup_decl (var, ctx);
7616 x = create_tmp_var_raw (type, get_name (new_var));
7617 gimple_add_tmp_var (x);
7618 x = build_simple_mem_ref (x);
7619 SET_DECL_VALUE_EXPR (new_var, x);
7620 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7622 continue;
7625 if (!maybe_lookup_field (var, ctx))
7626 continue;
7628 /* Don't remap oacc parallel reduction variables, because the
7629 intermediate result must be local to each gang. */
7630 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7631 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7633 x = build_receiver_ref (var, true, ctx);
7634 tree new_var = lookup_decl (var, ctx);
7636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7637 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7638 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7639 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7640 x = build_simple_mem_ref (x);
7641 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7643 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7644 if (omp_is_reference (new_var))
7646 /* Create a local object to hold the instance
7647 value. */
7648 tree type = TREE_TYPE (TREE_TYPE (new_var));
7649 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7650 tree inst = create_tmp_var (type, id);
7651 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7652 x = build_fold_addr_expr (inst);
7654 gimplify_assign (new_var, x, &fplist);
7656 else if (DECL_P (new_var))
7658 SET_DECL_VALUE_EXPR (new_var, x);
7659 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7661 else
7662 gcc_unreachable ();
7664 map_cnt++;
7665 break;
7667 case OMP_CLAUSE_FIRSTPRIVATE:
7668 if (is_oacc_parallel (ctx))
7669 goto oacc_firstprivate;
7670 map_cnt++;
7671 var = OMP_CLAUSE_DECL (c);
7672 if (!omp_is_reference (var)
7673 && !is_gimple_reg_type (TREE_TYPE (var)))
7675 tree new_var = lookup_decl (var, ctx);
7676 if (is_variable_sized (var))
7678 tree pvar = DECL_VALUE_EXPR (var);
7679 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7680 pvar = TREE_OPERAND (pvar, 0);
7681 gcc_assert (DECL_P (pvar));
7682 tree new_pvar = lookup_decl (pvar, ctx);
7683 x = build_fold_indirect_ref (new_pvar);
7684 TREE_THIS_NOTRAP (x) = 1;
7686 else
7687 x = build_receiver_ref (var, true, ctx);
7688 SET_DECL_VALUE_EXPR (new_var, x);
7689 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7691 break;
7693 case OMP_CLAUSE_PRIVATE:
7694 if (is_gimple_omp_oacc (ctx->stmt))
7695 break;
7696 var = OMP_CLAUSE_DECL (c);
7697 if (is_variable_sized (var))
7699 tree new_var = lookup_decl (var, ctx);
7700 tree pvar = DECL_VALUE_EXPR (var);
7701 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7702 pvar = TREE_OPERAND (pvar, 0);
7703 gcc_assert (DECL_P (pvar));
7704 tree new_pvar = lookup_decl (pvar, ctx);
7705 x = build_fold_indirect_ref (new_pvar);
7706 TREE_THIS_NOTRAP (x) = 1;
7707 SET_DECL_VALUE_EXPR (new_var, x);
7708 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7710 break;
7712 case OMP_CLAUSE_USE_DEVICE_PTR:
7713 case OMP_CLAUSE_IS_DEVICE_PTR:
7714 var = OMP_CLAUSE_DECL (c);
7715 map_cnt++;
7716 if (is_variable_sized (var))
7718 tree new_var = lookup_decl (var, ctx);
7719 tree pvar = DECL_VALUE_EXPR (var);
7720 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7721 pvar = TREE_OPERAND (pvar, 0);
7722 gcc_assert (DECL_P (pvar));
7723 tree new_pvar = lookup_decl (pvar, ctx);
7724 x = build_fold_indirect_ref (new_pvar);
7725 TREE_THIS_NOTRAP (x) = 1;
7726 SET_DECL_VALUE_EXPR (new_var, x);
7727 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7729 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7731 tree new_var = lookup_decl (var, ctx);
7732 tree type = build_pointer_type (TREE_TYPE (var));
7733 x = create_tmp_var_raw (type, get_name (new_var));
7734 gimple_add_tmp_var (x);
7735 x = build_simple_mem_ref (x);
7736 SET_DECL_VALUE_EXPR (new_var, x);
7737 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7739 else
7741 tree new_var = lookup_decl (var, ctx);
7742 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7743 gimple_add_tmp_var (x);
7744 SET_DECL_VALUE_EXPR (new_var, x);
7745 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7747 break;
7750 if (offloaded)
7752 target_nesting_level++;
7753 lower_omp (&tgt_body, ctx);
7754 target_nesting_level--;
7756 else if (data_region)
7757 lower_omp (&tgt_body, ctx);
7759 if (offloaded)
7761 /* Declare all the variables created by mapping and the variables
7762 declared in the scope of the target body. */
7763 record_vars_into (ctx->block_vars, child_fn);
7764 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7767 olist = NULL;
7768 ilist = NULL;
7769 if (ctx->record_type)
7771 ctx->sender_decl
7772 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7773 DECL_NAMELESS (ctx->sender_decl) = 1;
7774 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7775 t = make_tree_vec (3);
7776 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7777 TREE_VEC_ELT (t, 1)
7778 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7779 ".omp_data_sizes");
7780 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7781 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7782 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7783 tree tkind_type = short_unsigned_type_node;
7784 int talign_shift = 8;
7785 TREE_VEC_ELT (t, 2)
7786 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7787 ".omp_data_kinds");
7788 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7789 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7790 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7791 gimple_omp_target_set_data_arg (stmt, t);
7793 vec<constructor_elt, va_gc> *vsize;
7794 vec<constructor_elt, va_gc> *vkind;
7795 vec_alloc (vsize, map_cnt);
7796 vec_alloc (vkind, map_cnt);
7797 unsigned int map_idx = 0;
7799 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7800 switch (OMP_CLAUSE_CODE (c))
7802 tree ovar, nc, s, purpose, var, x, type;
7803 unsigned int talign;
7805 default:
7806 break;
7808 case OMP_CLAUSE_MAP:
7809 case OMP_CLAUSE_TO:
7810 case OMP_CLAUSE_FROM:
7811 oacc_firstprivate_map:
7812 nc = c;
7813 ovar = OMP_CLAUSE_DECL (c);
7814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7815 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7816 || (OMP_CLAUSE_MAP_KIND (c)
7817 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7818 break;
7819 if (!DECL_P (ovar))
7821 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7822 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7824 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7825 == get_base_address (ovar));
7826 nc = OMP_CLAUSE_CHAIN (c);
7827 ovar = OMP_CLAUSE_DECL (nc);
7829 else
7831 tree x = build_sender_ref (ovar, ctx);
7832 tree v
7833 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7834 gimplify_assign (x, v, &ilist);
7835 nc = NULL_TREE;
7838 else
7840 if (DECL_SIZE (ovar)
7841 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7843 tree ovar2 = DECL_VALUE_EXPR (ovar);
7844 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7845 ovar2 = TREE_OPERAND (ovar2, 0);
7846 gcc_assert (DECL_P (ovar2));
7847 ovar = ovar2;
7849 if (!maybe_lookup_field (ovar, ctx))
7850 continue;
7853 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7854 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7855 talign = DECL_ALIGN_UNIT (ovar);
7856 if (nc)
7858 var = lookup_decl_in_outer_ctx (ovar, ctx);
7859 x = build_sender_ref (ovar, ctx);
7861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7862 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7863 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7864 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7866 gcc_assert (offloaded);
7867 tree avar
7868 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7869 mark_addressable (avar);
7870 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7871 talign = DECL_ALIGN_UNIT (avar);
7872 avar = build_fold_addr_expr (avar);
7873 gimplify_assign (x, avar, &ilist);
7875 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7877 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7878 if (!omp_is_reference (var))
7880 if (is_gimple_reg (var)
7881 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7882 TREE_NO_WARNING (var) = 1;
7883 var = build_fold_addr_expr (var);
7885 else
7886 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7887 gimplify_assign (x, var, &ilist);
7889 else if (is_gimple_reg (var))
7891 gcc_assert (offloaded);
7892 tree avar = create_tmp_var (TREE_TYPE (var));
7893 mark_addressable (avar);
7894 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7895 if (GOMP_MAP_COPY_TO_P (map_kind)
7896 || map_kind == GOMP_MAP_POINTER
7897 || map_kind == GOMP_MAP_TO_PSET
7898 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7900 /* If we need to initialize a temporary
7901 with VAR because it is not addressable, and
7902 the variable hasn't been initialized yet, then
7903 we'll get a warning for the store to avar.
7904 Don't warn in that case, the mapping might
7905 be implicit. */
7906 TREE_NO_WARNING (var) = 1;
7907 gimplify_assign (avar, var, &ilist);
7909 avar = build_fold_addr_expr (avar);
7910 gimplify_assign (x, avar, &ilist);
7911 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7912 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7913 && !TYPE_READONLY (TREE_TYPE (var)))
7915 x = unshare_expr (x);
7916 x = build_simple_mem_ref (x);
7917 gimplify_assign (var, x, &olist);
7920 else
7922 var = build_fold_addr_expr (var);
7923 gimplify_assign (x, var, &ilist);
7926 s = NULL_TREE;
7927 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7929 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7930 s = TREE_TYPE (ovar);
7931 if (TREE_CODE (s) == REFERENCE_TYPE)
7932 s = TREE_TYPE (s);
7933 s = TYPE_SIZE_UNIT (s);
7935 else
7936 s = OMP_CLAUSE_SIZE (c);
7937 if (s == NULL_TREE)
7938 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7939 s = fold_convert (size_type_node, s);
7940 purpose = size_int (map_idx++);
7941 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7942 if (TREE_CODE (s) != INTEGER_CST)
7943 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7945 unsigned HOST_WIDE_INT tkind, tkind_zero;
7946 switch (OMP_CLAUSE_CODE (c))
7948 case OMP_CLAUSE_MAP:
7949 tkind = OMP_CLAUSE_MAP_KIND (c);
7950 tkind_zero = tkind;
7951 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7952 switch (tkind)
7954 case GOMP_MAP_ALLOC:
7955 case GOMP_MAP_TO:
7956 case GOMP_MAP_FROM:
7957 case GOMP_MAP_TOFROM:
7958 case GOMP_MAP_ALWAYS_TO:
7959 case GOMP_MAP_ALWAYS_FROM:
7960 case GOMP_MAP_ALWAYS_TOFROM:
7961 case GOMP_MAP_RELEASE:
7962 case GOMP_MAP_FORCE_TO:
7963 case GOMP_MAP_FORCE_FROM:
7964 case GOMP_MAP_FORCE_TOFROM:
7965 case GOMP_MAP_FORCE_PRESENT:
7966 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7967 break;
7968 case GOMP_MAP_DELETE:
7969 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7970 default:
7971 break;
7973 if (tkind_zero != tkind)
7975 if (integer_zerop (s))
7976 tkind = tkind_zero;
7977 else if (integer_nonzerop (s))
7978 tkind_zero = tkind;
7980 break;
7981 case OMP_CLAUSE_FIRSTPRIVATE:
7982 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7983 tkind = GOMP_MAP_TO;
7984 tkind_zero = tkind;
7985 break;
7986 case OMP_CLAUSE_TO:
7987 tkind = GOMP_MAP_TO;
7988 tkind_zero = tkind;
7989 break;
7990 case OMP_CLAUSE_FROM:
7991 tkind = GOMP_MAP_FROM;
7992 tkind_zero = tkind;
7993 break;
7994 default:
7995 gcc_unreachable ();
7997 gcc_checking_assert (tkind
7998 < (HOST_WIDE_INT_C (1U) << talign_shift));
7999 gcc_checking_assert (tkind_zero
8000 < (HOST_WIDE_INT_C (1U) << talign_shift));
8001 talign = ceil_log2 (talign);
8002 tkind |= talign << talign_shift;
8003 tkind_zero |= talign << talign_shift;
8004 gcc_checking_assert (tkind
8005 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8006 gcc_checking_assert (tkind_zero
8007 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8008 if (tkind == tkind_zero)
8009 x = build_int_cstu (tkind_type, tkind);
8010 else
8012 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8013 x = build3 (COND_EXPR, tkind_type,
8014 fold_build2 (EQ_EXPR, boolean_type_node,
8015 unshare_expr (s), size_zero_node),
8016 build_int_cstu (tkind_type, tkind_zero),
8017 build_int_cstu (tkind_type, tkind));
8019 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8020 if (nc && nc != c)
8021 c = nc;
8022 break;
8024 case OMP_CLAUSE_FIRSTPRIVATE:
8025 if (is_oacc_parallel (ctx))
8026 goto oacc_firstprivate_map;
8027 ovar = OMP_CLAUSE_DECL (c);
8028 if (omp_is_reference (ovar))
8029 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8030 else
8031 talign = DECL_ALIGN_UNIT (ovar);
8032 var = lookup_decl_in_outer_ctx (ovar, ctx);
8033 x = build_sender_ref (ovar, ctx);
8034 tkind = GOMP_MAP_FIRSTPRIVATE;
8035 type = TREE_TYPE (ovar);
8036 if (omp_is_reference (ovar))
8037 type = TREE_TYPE (type);
8038 if ((INTEGRAL_TYPE_P (type)
8039 && TYPE_PRECISION (type) <= POINTER_SIZE)
8040 || TREE_CODE (type) == POINTER_TYPE)
8042 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8043 tree t = var;
8044 if (omp_is_reference (var))
8045 t = build_simple_mem_ref (var);
8046 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8047 TREE_NO_WARNING (var) = 1;
8048 if (TREE_CODE (type) != POINTER_TYPE)
8049 t = fold_convert (pointer_sized_int_node, t);
8050 t = fold_convert (TREE_TYPE (x), t);
8051 gimplify_assign (x, t, &ilist);
8053 else if (omp_is_reference (var))
8054 gimplify_assign (x, var, &ilist);
8055 else if (is_gimple_reg (var))
8057 tree avar = create_tmp_var (TREE_TYPE (var));
8058 mark_addressable (avar);
8059 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8060 TREE_NO_WARNING (var) = 1;
8061 gimplify_assign (avar, var, &ilist);
8062 avar = build_fold_addr_expr (avar);
8063 gimplify_assign (x, avar, &ilist);
8065 else
8067 var = build_fold_addr_expr (var);
8068 gimplify_assign (x, var, &ilist);
8070 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8071 s = size_int (0);
8072 else if (omp_is_reference (ovar))
8073 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8074 else
8075 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8076 s = fold_convert (size_type_node, s);
8077 purpose = size_int (map_idx++);
8078 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8079 if (TREE_CODE (s) != INTEGER_CST)
8080 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8082 gcc_checking_assert (tkind
8083 < (HOST_WIDE_INT_C (1U) << talign_shift));
8084 talign = ceil_log2 (talign);
8085 tkind |= talign << talign_shift;
8086 gcc_checking_assert (tkind
8087 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8088 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8089 build_int_cstu (tkind_type, tkind));
8090 break;
8092 case OMP_CLAUSE_USE_DEVICE_PTR:
8093 case OMP_CLAUSE_IS_DEVICE_PTR:
8094 ovar = OMP_CLAUSE_DECL (c);
8095 var = lookup_decl_in_outer_ctx (ovar, ctx);
8096 x = build_sender_ref (ovar, ctx);
8097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8098 tkind = GOMP_MAP_USE_DEVICE_PTR;
8099 else
8100 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8101 type = TREE_TYPE (ovar);
8102 if (TREE_CODE (type) == ARRAY_TYPE)
8103 var = build_fold_addr_expr (var);
8104 else
8106 if (omp_is_reference (ovar))
8108 type = TREE_TYPE (type);
8109 if (TREE_CODE (type) != ARRAY_TYPE)
8110 var = build_simple_mem_ref (var);
8111 var = fold_convert (TREE_TYPE (x), var);
8114 gimplify_assign (x, var, &ilist);
8115 s = size_int (0);
8116 purpose = size_int (map_idx++);
8117 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8118 gcc_checking_assert (tkind
8119 < (HOST_WIDE_INT_C (1U) << talign_shift));
8120 gcc_checking_assert (tkind
8121 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8122 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8123 build_int_cstu (tkind_type, tkind));
8124 break;
8127 gcc_assert (map_idx == map_cnt);
8129 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8130 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8131 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8132 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8133 for (int i = 1; i <= 2; i++)
8134 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8136 gimple_seq initlist = NULL;
8137 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8138 TREE_VEC_ELT (t, i)),
8139 &initlist, true, NULL_TREE);
8140 gimple_seq_add_seq (&ilist, initlist);
8142 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8143 NULL);
8144 TREE_THIS_VOLATILE (clobber) = 1;
8145 gimple_seq_add_stmt (&olist,
8146 gimple_build_assign (TREE_VEC_ELT (t, i),
8147 clobber));
8150 tree clobber = build_constructor (ctx->record_type, NULL);
8151 TREE_THIS_VOLATILE (clobber) = 1;
8152 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8153 clobber));
8156 /* Once all the expansions are done, sequence all the different
8157 fragments inside gimple_omp_body. */
8159 new_body = NULL;
8161 if (offloaded
8162 && ctx->record_type)
8164 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8165 /* fixup_child_record_type might have changed receiver_decl's type. */
8166 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8167 gimple_seq_add_stmt (&new_body,
8168 gimple_build_assign (ctx->receiver_decl, t));
8170 gimple_seq_add_seq (&new_body, fplist);
8172 if (offloaded || data_region)
8174 tree prev = NULL_TREE;
8175 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8176 switch (OMP_CLAUSE_CODE (c))
8178 tree var, x;
8179 default:
8180 break;
8181 case OMP_CLAUSE_FIRSTPRIVATE:
8182 if (is_gimple_omp_oacc (ctx->stmt))
8183 break;
8184 var = OMP_CLAUSE_DECL (c);
8185 if (omp_is_reference (var)
8186 || is_gimple_reg_type (TREE_TYPE (var)))
8188 tree new_var = lookup_decl (var, ctx);
8189 tree type;
8190 type = TREE_TYPE (var);
8191 if (omp_is_reference (var))
8192 type = TREE_TYPE (type);
8193 if ((INTEGRAL_TYPE_P (type)
8194 && TYPE_PRECISION (type) <= POINTER_SIZE)
8195 || TREE_CODE (type) == POINTER_TYPE)
8197 x = build_receiver_ref (var, false, ctx);
8198 if (TREE_CODE (type) != POINTER_TYPE)
8199 x = fold_convert (pointer_sized_int_node, x);
8200 x = fold_convert (type, x);
8201 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8202 fb_rvalue);
8203 if (omp_is_reference (var))
8205 tree v = create_tmp_var_raw (type, get_name (var));
8206 gimple_add_tmp_var (v);
8207 TREE_ADDRESSABLE (v) = 1;
8208 gimple_seq_add_stmt (&new_body,
8209 gimple_build_assign (v, x));
8210 x = build_fold_addr_expr (v);
8212 gimple_seq_add_stmt (&new_body,
8213 gimple_build_assign (new_var, x));
8215 else
8217 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8218 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8219 fb_rvalue);
8220 gimple_seq_add_stmt (&new_body,
8221 gimple_build_assign (new_var, x));
8224 else if (is_variable_sized (var))
8226 tree pvar = DECL_VALUE_EXPR (var);
8227 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8228 pvar = TREE_OPERAND (pvar, 0);
8229 gcc_assert (DECL_P (pvar));
8230 tree new_var = lookup_decl (pvar, ctx);
8231 x = build_receiver_ref (var, false, ctx);
8232 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8233 gimple_seq_add_stmt (&new_body,
8234 gimple_build_assign (new_var, x));
8236 break;
8237 case OMP_CLAUSE_PRIVATE:
8238 if (is_gimple_omp_oacc (ctx->stmt))
8239 break;
8240 var = OMP_CLAUSE_DECL (c);
8241 if (omp_is_reference (var))
8243 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8244 tree new_var = lookup_decl (var, ctx);
8245 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8246 if (TREE_CONSTANT (x))
8248 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8249 get_name (var));
8250 gimple_add_tmp_var (x);
8251 TREE_ADDRESSABLE (x) = 1;
8252 x = build_fold_addr_expr_loc (clause_loc, x);
8254 else
8255 break;
8257 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8258 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8259 gimple_seq_add_stmt (&new_body,
8260 gimple_build_assign (new_var, x));
8262 break;
8263 case OMP_CLAUSE_USE_DEVICE_PTR:
8264 case OMP_CLAUSE_IS_DEVICE_PTR:
8265 var = OMP_CLAUSE_DECL (c);
8266 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8267 x = build_sender_ref (var, ctx);
8268 else
8269 x = build_receiver_ref (var, false, ctx);
8270 if (is_variable_sized (var))
8272 tree pvar = DECL_VALUE_EXPR (var);
8273 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8274 pvar = TREE_OPERAND (pvar, 0);
8275 gcc_assert (DECL_P (pvar));
8276 tree new_var = lookup_decl (pvar, ctx);
8277 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8278 gimple_seq_add_stmt (&new_body,
8279 gimple_build_assign (new_var, x));
8281 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8283 tree new_var = lookup_decl (var, ctx);
8284 new_var = DECL_VALUE_EXPR (new_var);
8285 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8286 new_var = TREE_OPERAND (new_var, 0);
8287 gcc_assert (DECL_P (new_var));
8288 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8289 gimple_seq_add_stmt (&new_body,
8290 gimple_build_assign (new_var, x));
8292 else
8294 tree type = TREE_TYPE (var);
8295 tree new_var = lookup_decl (var, ctx);
8296 if (omp_is_reference (var))
8298 type = TREE_TYPE (type);
8299 if (TREE_CODE (type) != ARRAY_TYPE)
8301 tree v = create_tmp_var_raw (type, get_name (var));
8302 gimple_add_tmp_var (v);
8303 TREE_ADDRESSABLE (v) = 1;
8304 x = fold_convert (type, x);
8305 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8306 fb_rvalue);
8307 gimple_seq_add_stmt (&new_body,
8308 gimple_build_assign (v, x));
8309 x = build_fold_addr_expr (v);
8312 new_var = DECL_VALUE_EXPR (new_var);
8313 x = fold_convert (TREE_TYPE (new_var), x);
8314 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8315 gimple_seq_add_stmt (&new_body,
8316 gimple_build_assign (new_var, x));
8318 break;
8320 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8321 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8322 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8323 or references to VLAs. */
8324 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8325 switch (OMP_CLAUSE_CODE (c))
8327 tree var;
8328 default:
8329 break;
8330 case OMP_CLAUSE_MAP:
8331 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8332 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8334 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8335 poly_int64 offset = 0;
8336 gcc_assert (prev);
8337 var = OMP_CLAUSE_DECL (c);
8338 if (DECL_P (var)
8339 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8340 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8341 ctx))
8342 && varpool_node::get_create (var)->offloadable)
8343 break;
8344 if (TREE_CODE (var) == INDIRECT_REF
8345 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8346 var = TREE_OPERAND (var, 0);
8347 if (TREE_CODE (var) == COMPONENT_REF)
8349 var = get_addr_base_and_unit_offset (var, &offset);
8350 gcc_assert (var != NULL_TREE && DECL_P (var));
8352 else if (DECL_SIZE (var)
8353 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8355 tree var2 = DECL_VALUE_EXPR (var);
8356 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8357 var2 = TREE_OPERAND (var2, 0);
8358 gcc_assert (DECL_P (var2));
8359 var = var2;
8361 tree new_var = lookup_decl (var, ctx), x;
8362 tree type = TREE_TYPE (new_var);
8363 bool is_ref;
8364 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8365 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8366 == COMPONENT_REF))
8368 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8369 is_ref = true;
8370 new_var = build2 (MEM_REF, type,
8371 build_fold_addr_expr (new_var),
8372 build_int_cst (build_pointer_type (type),
8373 offset));
8375 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8377 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8378 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8379 new_var = build2 (MEM_REF, type,
8380 build_fold_addr_expr (new_var),
8381 build_int_cst (build_pointer_type (type),
8382 offset));
8384 else
8385 is_ref = omp_is_reference (var);
8386 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8387 is_ref = false;
8388 bool ref_to_array = false;
8389 if (is_ref)
8391 type = TREE_TYPE (type);
8392 if (TREE_CODE (type) == ARRAY_TYPE)
8394 type = build_pointer_type (type);
8395 ref_to_array = true;
8398 else if (TREE_CODE (type) == ARRAY_TYPE)
8400 tree decl2 = DECL_VALUE_EXPR (new_var);
8401 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8402 decl2 = TREE_OPERAND (decl2, 0);
8403 gcc_assert (DECL_P (decl2));
8404 new_var = decl2;
8405 type = TREE_TYPE (new_var);
8407 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8408 x = fold_convert_loc (clause_loc, type, x);
8409 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8411 tree bias = OMP_CLAUSE_SIZE (c);
8412 if (DECL_P (bias))
8413 bias = lookup_decl (bias, ctx);
8414 bias = fold_convert_loc (clause_loc, sizetype, bias);
8415 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8416 bias);
8417 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8418 TREE_TYPE (x), x, bias);
8420 if (ref_to_array)
8421 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8422 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8423 if (is_ref && !ref_to_array)
8425 tree t = create_tmp_var_raw (type, get_name (var));
8426 gimple_add_tmp_var (t);
8427 TREE_ADDRESSABLE (t) = 1;
8428 gimple_seq_add_stmt (&new_body,
8429 gimple_build_assign (t, x));
8430 x = build_fold_addr_expr_loc (clause_loc, t);
8432 gimple_seq_add_stmt (&new_body,
8433 gimple_build_assign (new_var, x));
8434 prev = NULL_TREE;
8436 else if (OMP_CLAUSE_CHAIN (c)
8437 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8438 == OMP_CLAUSE_MAP
8439 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8440 == GOMP_MAP_FIRSTPRIVATE_POINTER
8441 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8442 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8443 prev = c;
8444 break;
8445 case OMP_CLAUSE_PRIVATE:
8446 var = OMP_CLAUSE_DECL (c);
8447 if (is_variable_sized (var))
8449 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8450 tree new_var = lookup_decl (var, ctx);
8451 tree pvar = DECL_VALUE_EXPR (var);
8452 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8453 pvar = TREE_OPERAND (pvar, 0);
8454 gcc_assert (DECL_P (pvar));
8455 tree new_pvar = lookup_decl (pvar, ctx);
8456 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8457 tree al = size_int (DECL_ALIGN (var));
8458 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8459 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8460 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8461 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8462 gimple_seq_add_stmt (&new_body,
8463 gimple_build_assign (new_pvar, x));
8465 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8467 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8468 tree new_var = lookup_decl (var, ctx);
8469 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8470 if (TREE_CONSTANT (x))
8471 break;
8472 else
8474 tree atmp
8475 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8476 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8477 tree al = size_int (TYPE_ALIGN (rtype));
8478 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8481 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8482 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8483 gimple_seq_add_stmt (&new_body,
8484 gimple_build_assign (new_var, x));
8486 break;
8489 gimple_seq fork_seq = NULL;
8490 gimple_seq join_seq = NULL;
8492 if (is_oacc_parallel (ctx))
8494 /* If there are reductions on the offloaded region itself, treat
8495 them as a dummy GANG loop. */
8496 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8498 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8499 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8502 gimple_seq_add_seq (&new_body, fork_seq);
8503 gimple_seq_add_seq (&new_body, tgt_body);
8504 gimple_seq_add_seq (&new_body, join_seq);
8506 if (offloaded)
8507 new_body = maybe_catch_exception (new_body);
8509 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8510 gimple_omp_set_body (stmt, new_body);
8513 bind = gimple_build_bind (NULL, NULL,
8514 tgt_bind ? gimple_bind_block (tgt_bind)
8515 : NULL_TREE);
8516 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8517 gimple_bind_add_seq (bind, ilist);
8518 gimple_bind_add_stmt (bind, stmt);
8519 gimple_bind_add_seq (bind, olist);
8521 pop_gimplify_context (NULL);
8523 if (dep_bind)
8525 gimple_bind_add_seq (dep_bind, dep_ilist);
8526 gimple_bind_add_stmt (dep_bind, bind);
8527 gimple_bind_add_seq (dep_bind, dep_olist);
8528 pop_gimplify_context (dep_bind);
8532 /* Expand code for an OpenMP teams directive. */
8534 static void
8535 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8537 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8538 push_gimplify_context ();
8540 tree block = make_node (BLOCK);
8541 gbind *bind = gimple_build_bind (NULL, NULL, block);
8542 gsi_replace (gsi_p, bind, true);
8543 gimple_seq bind_body = NULL;
8544 gimple_seq dlist = NULL;
8545 gimple_seq olist = NULL;
8547 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8548 OMP_CLAUSE_NUM_TEAMS);
8549 if (num_teams == NULL_TREE)
8550 num_teams = build_int_cst (unsigned_type_node, 0);
8551 else
8553 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8554 num_teams = fold_convert (unsigned_type_node, num_teams);
8555 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8557 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8558 OMP_CLAUSE_THREAD_LIMIT);
8559 if (thread_limit == NULL_TREE)
8560 thread_limit = build_int_cst (unsigned_type_node, 0);
8561 else
8563 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8564 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8565 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8566 fb_rvalue);
8569 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8570 &bind_body, &dlist, ctx, NULL);
8571 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8572 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8573 if (!gimple_omp_teams_grid_phony (teams_stmt))
8575 gimple_seq_add_stmt (&bind_body, teams_stmt);
8576 location_t loc = gimple_location (teams_stmt);
8577 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8578 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8579 gimple_set_location (call, loc);
8580 gimple_seq_add_stmt (&bind_body, call);
8583 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8584 gimple_omp_set_body (teams_stmt, NULL);
8585 gimple_seq_add_seq (&bind_body, olist);
8586 gimple_seq_add_seq (&bind_body, dlist);
8587 if (!gimple_omp_teams_grid_phony (teams_stmt))
8588 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8589 gimple_bind_set_body (bind, bind_body);
8591 pop_gimplify_context (bind);
8593 gimple_bind_append_vars (bind, ctx->block_vars);
8594 BLOCK_VARS (block) = ctx->block_vars;
8595 if (BLOCK_VARS (block))
8596 TREE_USED (block) = 1;
8599 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8601 static void
8602 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8604 gimple *stmt = gsi_stmt (*gsi_p);
8605 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8606 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8607 gimple_build_omp_return (false));
8611 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8612 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8613 of OMP context, but with task_shared_vars set. */
8615 static tree
8616 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8617 void *data)
8619 tree t = *tp;
8621 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8622 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8623 return t;
8625 if (task_shared_vars
8626 && DECL_P (t)
8627 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8628 return t;
8630 /* If a global variable has been privatized, TREE_CONSTANT on
8631 ADDR_EXPR might be wrong. */
8632 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8633 recompute_tree_invariant_for_addr_expr (t);
8635 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8636 return NULL_TREE;
8639 /* Data to be communicated between lower_omp_regimplify_operands and
8640 lower_omp_regimplify_operands_p. */
8642 struct lower_omp_regimplify_operands_data
8644 omp_context *ctx;
8645 vec<tree> *decls;
8648 /* Helper function for lower_omp_regimplify_operands. Find
8649 omp_member_access_dummy_var vars and adjust temporarily their
8650 DECL_VALUE_EXPRs if needed. */
8652 static tree
8653 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8654 void *data)
8656 tree t = omp_member_access_dummy_var (*tp);
8657 if (t)
8659 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8660 lower_omp_regimplify_operands_data *ldata
8661 = (lower_omp_regimplify_operands_data *) wi->info;
8662 tree o = maybe_lookup_decl (t, ldata->ctx);
8663 if (o != t)
8665 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8666 ldata->decls->safe_push (*tp);
8667 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8668 SET_DECL_VALUE_EXPR (*tp, v);
8671 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8672 return NULL_TREE;
8675 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8676 of omp_member_access_dummy_var vars during regimplification. */
8678 static void
8679 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8680 gimple_stmt_iterator *gsi_p)
8682 auto_vec<tree, 10> decls;
8683 if (ctx)
8685 struct walk_stmt_info wi;
8686 memset (&wi, '\0', sizeof (wi));
8687 struct lower_omp_regimplify_operands_data data;
8688 data.ctx = ctx;
8689 data.decls = &decls;
8690 wi.info = &data;
8691 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8693 gimple_regimplify_operands (stmt, gsi_p);
8694 while (!decls.is_empty ())
8696 tree t = decls.pop ();
8697 tree v = decls.pop ();
8698 SET_DECL_VALUE_EXPR (t, v);
8702 static void
8703 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8705 gimple *stmt = gsi_stmt (*gsi_p);
8706 struct walk_stmt_info wi;
8707 gcall *call_stmt;
8709 if (gimple_has_location (stmt))
8710 input_location = gimple_location (stmt);
8712 if (task_shared_vars)
8713 memset (&wi, '\0', sizeof (wi));
8715 /* If we have issued syntax errors, avoid doing any heavy lifting.
8716 Just replace the OMP directives with a NOP to avoid
8717 confusing RTL expansion. */
8718 if (seen_error () && is_gimple_omp (stmt))
8720 gsi_replace (gsi_p, gimple_build_nop (), true);
8721 return;
8724 switch (gimple_code (stmt))
8726 case GIMPLE_COND:
8728 gcond *cond_stmt = as_a <gcond *> (stmt);
8729 if ((ctx || task_shared_vars)
8730 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8731 lower_omp_regimplify_p,
8732 ctx ? NULL : &wi, NULL)
8733 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8734 lower_omp_regimplify_p,
8735 ctx ? NULL : &wi, NULL)))
8736 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8738 break;
8739 case GIMPLE_CATCH:
8740 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8741 break;
8742 case GIMPLE_EH_FILTER:
8743 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8744 break;
8745 case GIMPLE_TRY:
8746 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8747 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8748 break;
8749 case GIMPLE_TRANSACTION:
8750 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8751 ctx);
8752 break;
8753 case GIMPLE_BIND:
8754 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8755 break;
8756 case GIMPLE_OMP_PARALLEL:
8757 case GIMPLE_OMP_TASK:
8758 ctx = maybe_lookup_ctx (stmt);
8759 gcc_assert (ctx);
8760 if (ctx->cancellable)
8761 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8762 lower_omp_taskreg (gsi_p, ctx);
8763 break;
8764 case GIMPLE_OMP_FOR:
8765 ctx = maybe_lookup_ctx (stmt);
8766 gcc_assert (ctx);
8767 if (ctx->cancellable)
8768 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8769 lower_omp_for (gsi_p, ctx);
8770 break;
8771 case GIMPLE_OMP_SECTIONS:
8772 ctx = maybe_lookup_ctx (stmt);
8773 gcc_assert (ctx);
8774 if (ctx->cancellable)
8775 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8776 lower_omp_sections (gsi_p, ctx);
8777 break;
8778 case GIMPLE_OMP_SINGLE:
8779 ctx = maybe_lookup_ctx (stmt);
8780 gcc_assert (ctx);
8781 lower_omp_single (gsi_p, ctx);
8782 break;
8783 case GIMPLE_OMP_MASTER:
8784 ctx = maybe_lookup_ctx (stmt);
8785 gcc_assert (ctx);
8786 lower_omp_master (gsi_p, ctx);
8787 break;
8788 case GIMPLE_OMP_TASKGROUP:
8789 ctx = maybe_lookup_ctx (stmt);
8790 gcc_assert (ctx);
8791 lower_omp_taskgroup (gsi_p, ctx);
8792 break;
8793 case GIMPLE_OMP_ORDERED:
8794 ctx = maybe_lookup_ctx (stmt);
8795 gcc_assert (ctx);
8796 lower_omp_ordered (gsi_p, ctx);
8797 break;
8798 case GIMPLE_OMP_CRITICAL:
8799 ctx = maybe_lookup_ctx (stmt);
8800 gcc_assert (ctx);
8801 lower_omp_critical (gsi_p, ctx);
8802 break;
8803 case GIMPLE_OMP_ATOMIC_LOAD:
8804 if ((ctx || task_shared_vars)
8805 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8806 as_a <gomp_atomic_load *> (stmt)),
8807 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8808 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8809 break;
8810 case GIMPLE_OMP_TARGET:
8811 ctx = maybe_lookup_ctx (stmt);
8812 gcc_assert (ctx);
8813 lower_omp_target (gsi_p, ctx);
8814 break;
8815 case GIMPLE_OMP_TEAMS:
8816 ctx = maybe_lookup_ctx (stmt);
8817 gcc_assert (ctx);
8818 lower_omp_teams (gsi_p, ctx);
8819 break;
8820 case GIMPLE_OMP_GRID_BODY:
8821 ctx = maybe_lookup_ctx (stmt);
8822 gcc_assert (ctx);
8823 lower_omp_grid_body (gsi_p, ctx);
8824 break;
8825 case GIMPLE_CALL:
8826 tree fndecl;
8827 call_stmt = as_a <gcall *> (stmt);
8828 fndecl = gimple_call_fndecl (call_stmt);
8829 if (fndecl
8830 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8831 switch (DECL_FUNCTION_CODE (fndecl))
8833 case BUILT_IN_GOMP_BARRIER:
8834 if (ctx == NULL)
8835 break;
8836 /* FALLTHRU */
8837 case BUILT_IN_GOMP_CANCEL:
8838 case BUILT_IN_GOMP_CANCELLATION_POINT:
8839 omp_context *cctx;
8840 cctx = ctx;
8841 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8842 cctx = cctx->outer;
8843 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8844 if (!cctx->cancellable)
8846 if (DECL_FUNCTION_CODE (fndecl)
8847 == BUILT_IN_GOMP_CANCELLATION_POINT)
8849 stmt = gimple_build_nop ();
8850 gsi_replace (gsi_p, stmt, false);
8852 break;
8854 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8856 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8857 gimple_call_set_fndecl (call_stmt, fndecl);
8858 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8860 tree lhs;
8861 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8862 gimple_call_set_lhs (call_stmt, lhs);
8863 tree fallthru_label;
8864 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8865 gimple *g;
8866 g = gimple_build_label (fallthru_label);
8867 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8868 g = gimple_build_cond (NE_EXPR, lhs,
8869 fold_convert (TREE_TYPE (lhs),
8870 boolean_false_node),
8871 cctx->cancel_label, fallthru_label);
8872 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8873 break;
8874 default:
8875 break;
8877 /* FALLTHRU */
8878 default:
8879 if ((ctx || task_shared_vars)
8880 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8881 ctx ? NULL : &wi))
8883 /* Just remove clobbers, this should happen only if we have
8884 "privatized" local addressable variables in SIMD regions,
8885 the clobber isn't needed in that case and gimplifying address
8886 of the ARRAY_REF into a pointer and creating MEM_REF based
8887 clobber would create worse code than we get with the clobber
8888 dropped. */
8889 if (gimple_clobber_p (stmt))
8891 gsi_replace (gsi_p, gimple_build_nop (), true);
8892 break;
8894 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8896 break;
8900 static void
8901 lower_omp (gimple_seq *body, omp_context *ctx)
8903 location_t saved_location = input_location;
8904 gimple_stmt_iterator gsi;
8905 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8906 lower_omp_1 (&gsi, ctx);
8907 /* During gimplification, we haven't folded statments inside offloading
8908 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8909 if (target_nesting_level || taskreg_nesting_level)
8910 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8911 fold_stmt (&gsi);
8912 input_location = saved_location;
8915 /* Main entry point. */
8917 static unsigned int
8918 execute_lower_omp (void)
8920 gimple_seq body;
8921 int i;
8922 omp_context *ctx;
8924 /* This pass always runs, to provide PROP_gimple_lomp.
8925 But often, there is nothing to do. */
8926 if (flag_openacc == 0 && flag_openmp == 0
8927 && flag_openmp_simd == 0)
8928 return 0;
8930 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8931 delete_omp_context);
8933 body = gimple_body (current_function_decl);
8935 if (hsa_gen_requested_p ())
8936 omp_grid_gridify_all_targets (&body);
8938 scan_omp (&body, NULL);
8939 gcc_assert (taskreg_nesting_level == 0);
8940 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8941 finish_taskreg_scan (ctx);
8942 taskreg_contexts.release ();
8944 if (all_contexts->root)
8946 if (task_shared_vars)
8947 push_gimplify_context ();
8948 lower_omp (&body, NULL);
8949 if (task_shared_vars)
8950 pop_gimplify_context (NULL);
8953 if (all_contexts)
8955 splay_tree_delete (all_contexts);
8956 all_contexts = NULL;
8958 BITMAP_FREE (task_shared_vars);
8959 return 0;
8962 namespace {
8964 const pass_data pass_data_lower_omp =
8966 GIMPLE_PASS, /* type */
8967 "omplower", /* name */
8968 OPTGROUP_OMP, /* optinfo_flags */
8969 TV_NONE, /* tv_id */
8970 PROP_gimple_any, /* properties_required */
8971 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8972 0, /* properties_destroyed */
8973 0, /* todo_flags_start */
8974 0, /* todo_flags_finish */
8977 class pass_lower_omp : public gimple_opt_pass
8979 public:
8980 pass_lower_omp (gcc::context *ctxt)
8981 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8984 /* opt_pass methods: */
8985 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8987 }; // class pass_lower_omp
8989 } // anon namespace
8991 gimple_opt_pass *
8992 make_pass_lower_omp (gcc::context *ctxt)
8994 return new pass_lower_omp (ctxt);
8997 /* The following is a utility to diagnose structured block violations.
8998 It is not part of the "omplower" pass, as that's invoked too late. It
8999 should be invoked by the respective front ends after gimplification. */
9001 static splay_tree all_labels;
9003 /* Check for mismatched contexts and generate an error if needed. Return
9004 true if an error is detected. */
9006 static bool
9007 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9008 gimple *branch_ctx, gimple *label_ctx)
9010 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9011 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9013 if (label_ctx == branch_ctx)
9014 return false;
9016 const char* kind = NULL;
9018 if (flag_openacc)
9020 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9021 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9023 gcc_checking_assert (kind == NULL);
9024 kind = "OpenACC";
9027 if (kind == NULL)
9029 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9030 kind = "OpenMP";
9033 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9034 so we could traverse it and issue a correct "exit" or "enter" error
9035 message upon a structured block violation.
9037 We built the context by building a list with tree_cons'ing, but there is
9038 no easy counterpart in gimple tuples. It seems like far too much work
9039 for issuing exit/enter error messages. If someone really misses the
9040 distinct error message... patches welcome. */
9042 #if 0
9043 /* Try to avoid confusing the user by producing and error message
9044 with correct "exit" or "enter" verbiage. We prefer "exit"
9045 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9046 if (branch_ctx == NULL)
9047 exit_p = false;
9048 else
9050 while (label_ctx)
9052 if (TREE_VALUE (label_ctx) == branch_ctx)
9054 exit_p = false;
9055 break;
9057 label_ctx = TREE_CHAIN (label_ctx);
9061 if (exit_p)
9062 error ("invalid exit from %s structured block", kind);
9063 else
9064 error ("invalid entry to %s structured block", kind);
9065 #endif
9067 /* If it's obvious we have an invalid entry, be specific about the error. */
9068 if (branch_ctx == NULL)
9069 error ("invalid entry to %s structured block", kind);
9070 else
9072 /* Otherwise, be vague and lazy, but efficient. */
9073 error ("invalid branch to/from %s structured block", kind);
9076 gsi_replace (gsi_p, gimple_build_nop (), false);
9077 return true;
9080 /* Pass 1: Create a minimal tree of structured blocks, and record
9081 where each label is found. */
9083 static tree
9084 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9085 struct walk_stmt_info *wi)
9087 gimple *context = (gimple *) wi->info;
9088 gimple *inner_context;
9089 gimple *stmt = gsi_stmt (*gsi_p);
9091 *handled_ops_p = true;
9093 switch (gimple_code (stmt))
9095 WALK_SUBSTMTS;
9097 case GIMPLE_OMP_PARALLEL:
9098 case GIMPLE_OMP_TASK:
9099 case GIMPLE_OMP_SECTIONS:
9100 case GIMPLE_OMP_SINGLE:
9101 case GIMPLE_OMP_SECTION:
9102 case GIMPLE_OMP_MASTER:
9103 case GIMPLE_OMP_ORDERED:
9104 case GIMPLE_OMP_CRITICAL:
9105 case GIMPLE_OMP_TARGET:
9106 case GIMPLE_OMP_TEAMS:
9107 case GIMPLE_OMP_TASKGROUP:
9108 /* The minimal context here is just the current OMP construct. */
9109 inner_context = stmt;
9110 wi->info = inner_context;
9111 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9112 wi->info = context;
9113 break;
9115 case GIMPLE_OMP_FOR:
9116 inner_context = stmt;
9117 wi->info = inner_context;
9118 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9119 walk them. */
9120 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9121 diagnose_sb_1, NULL, wi);
9122 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9123 wi->info = context;
9124 break;
9126 case GIMPLE_LABEL:
9127 splay_tree_insert (all_labels,
9128 (splay_tree_key) gimple_label_label (
9129 as_a <glabel *> (stmt)),
9130 (splay_tree_value) context);
9131 break;
9133 default:
9134 break;
9137 return NULL_TREE;
9140 /* Pass 2: Check each branch and see if its context differs from that of
9141 the destination label's context. */
9143 static tree
9144 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9145 struct walk_stmt_info *wi)
9147 gimple *context = (gimple *) wi->info;
9148 splay_tree_node n;
9149 gimple *stmt = gsi_stmt (*gsi_p);
9151 *handled_ops_p = true;
9153 switch (gimple_code (stmt))
9155 WALK_SUBSTMTS;
9157 case GIMPLE_OMP_PARALLEL:
9158 case GIMPLE_OMP_TASK:
9159 case GIMPLE_OMP_SECTIONS:
9160 case GIMPLE_OMP_SINGLE:
9161 case GIMPLE_OMP_SECTION:
9162 case GIMPLE_OMP_MASTER:
9163 case GIMPLE_OMP_ORDERED:
9164 case GIMPLE_OMP_CRITICAL:
9165 case GIMPLE_OMP_TARGET:
9166 case GIMPLE_OMP_TEAMS:
9167 case GIMPLE_OMP_TASKGROUP:
9168 wi->info = stmt;
9169 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9170 wi->info = context;
9171 break;
9173 case GIMPLE_OMP_FOR:
9174 wi->info = stmt;
9175 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9176 walk them. */
9177 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9178 diagnose_sb_2, NULL, wi);
9179 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9180 wi->info = context;
9181 break;
9183 case GIMPLE_COND:
9185 gcond *cond_stmt = as_a <gcond *> (stmt);
9186 tree lab = gimple_cond_true_label (cond_stmt);
9187 if (lab)
9189 n = splay_tree_lookup (all_labels,
9190 (splay_tree_key) lab);
9191 diagnose_sb_0 (gsi_p, context,
9192 n ? (gimple *) n->value : NULL);
9194 lab = gimple_cond_false_label (cond_stmt);
9195 if (lab)
9197 n = splay_tree_lookup (all_labels,
9198 (splay_tree_key) lab);
9199 diagnose_sb_0 (gsi_p, context,
9200 n ? (gimple *) n->value : NULL);
9203 break;
9205 case GIMPLE_GOTO:
9207 tree lab = gimple_goto_dest (stmt);
9208 if (TREE_CODE (lab) != LABEL_DECL)
9209 break;
9211 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9212 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9214 break;
9216 case GIMPLE_SWITCH:
9218 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9219 unsigned int i;
9220 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9222 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9223 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9224 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9225 break;
9228 break;
9230 case GIMPLE_RETURN:
9231 diagnose_sb_0 (gsi_p, context, NULL);
9232 break;
9234 default:
9235 break;
9238 return NULL_TREE;
9241 static unsigned int
9242 diagnose_omp_structured_block_errors (void)
9244 struct walk_stmt_info wi;
9245 gimple_seq body = gimple_body (current_function_decl);
9247 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9249 memset (&wi, 0, sizeof (wi));
9250 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9252 memset (&wi, 0, sizeof (wi));
9253 wi.want_locations = true;
9254 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9256 gimple_set_body (current_function_decl, body);
9258 splay_tree_delete (all_labels);
9259 all_labels = NULL;
9261 return 0;
9264 namespace {
9266 const pass_data pass_data_diagnose_omp_blocks =
9268 GIMPLE_PASS, /* type */
9269 "*diagnose_omp_blocks", /* name */
9270 OPTGROUP_OMP, /* optinfo_flags */
9271 TV_NONE, /* tv_id */
9272 PROP_gimple_any, /* properties_required */
9273 0, /* properties_provided */
9274 0, /* properties_destroyed */
9275 0, /* todo_flags_start */
9276 0, /* todo_flags_finish */
9279 class pass_diagnose_omp_blocks : public gimple_opt_pass
9281 public:
9282 pass_diagnose_omp_blocks (gcc::context *ctxt)
9283 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9286 /* opt_pass methods: */
9287 virtual bool gate (function *)
9289 return flag_openacc || flag_openmp || flag_openmp_simd;
9291 virtual unsigned int execute (function *)
9293 return diagnose_omp_structured_block_errors ();
9296 }; // class pass_diagnose_omp_blocks
9298 } // anon namespace
9300 gimple_opt_pass *
9301 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9303 return new pass_diagnose_omp_blocks (ctxt);
9307 #include "gt-omp-low.h"