1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
122 /* True if this parallel directive is nested within another. */
125 /* True if this construct can be cancelled. */
129 static splay_tree all_contexts
;
130 static int taskreg_nesting_level
;
131 static int target_nesting_level
;
132 static bitmap task_shared_vars
;
133 static vec
<omp_context
*> taskreg_contexts
;
135 static void scan_omp (gimple_seq
*, omp_context
*);
136 static tree
scan_omp_1_op (tree
*, int *, void *);
138 #define WALK_SUBSTMTS \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
148 /* Return true if CTX corresponds to an oacc parallel region. */
151 is_oacc_parallel (omp_context
*ctx
)
153 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
154 return ((outer_type
== GIMPLE_OMP_TARGET
)
155 && (gimple_omp_target_kind (ctx
->stmt
)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
159 /* Return true if CTX corresponds to an oacc kernels region. */
162 is_oacc_kernels (omp_context
*ctx
)
164 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
165 return ((outer_type
== GIMPLE_OMP_TARGET
)
166 && (gimple_omp_target_kind (ctx
->stmt
)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
175 omp_member_access_dummy_var (tree decl
)
178 || !DECL_ARTIFICIAL (decl
)
179 || !DECL_IGNORED_P (decl
)
180 || !DECL_HAS_VALUE_EXPR_P (decl
)
181 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
184 tree v
= DECL_VALUE_EXPR (decl
);
185 if (TREE_CODE (v
) != COMPONENT_REF
)
189 switch (TREE_CODE (v
))
195 case POINTER_PLUS_EXPR
:
196 v
= TREE_OPERAND (v
, 0);
199 if (DECL_CONTEXT (v
) == current_function_decl
200 && DECL_ARTIFICIAL (v
)
201 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
209 /* Helper for unshare_and_remap, called through walk_tree. */
212 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
214 tree
*pair
= (tree
*) data
;
217 *tp
= unshare_expr (pair
[1]);
220 else if (IS_TYPE_OR_DECL_P (*tp
))
225 /* Return unshare_expr (X) with all occurrences of FROM
229 unshare_and_remap (tree x
, tree from
, tree to
)
231 tree pair
[2] = { from
, to
};
232 x
= unshare_expr (x
);
233 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
240 scan_omp_op (tree
*tp
, omp_context
*ctx
)
242 struct walk_stmt_info wi
;
244 memset (&wi
, 0, sizeof (wi
));
246 wi
.want_locations
= true;
248 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
251 static void lower_omp (gimple_seq
*, omp_context
*);
252 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
253 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
255 /* Return true if CTX is for an omp parallel. */
258 is_parallel_ctx (omp_context
*ctx
)
260 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
264 /* Return true if CTX is for an omp task. */
267 is_task_ctx (omp_context
*ctx
)
269 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
273 /* Return true if CTX is for an omp taskloop. */
276 is_taskloop_ctx (omp_context
*ctx
)
278 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
283 /* Return true if CTX is for an omp parallel or omp task. */
286 is_taskreg_ctx (omp_context
*ctx
)
288 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
);
291 /* Return true if EXPR is variable sized. */
294 is_variable_sized (const_tree expr
)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
304 lookup_decl (tree var
, omp_context
*ctx
)
306 tree
*n
= ctx
->cb
.decl_map
->get (var
);
311 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
313 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
314 return n
? *n
: NULL_TREE
;
318 lookup_field (tree var
, omp_context
*ctx
)
321 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
322 return (tree
) n
->value
;
326 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
329 n
= splay_tree_lookup (ctx
->sfield_map
330 ? ctx
->sfield_map
: ctx
->field_map
, key
);
331 return (tree
) n
->value
;
335 lookup_sfield (tree var
, omp_context
*ctx
)
337 return lookup_sfield ((splay_tree_key
) var
, ctx
);
341 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
344 n
= splay_tree_lookup (ctx
->field_map
, key
);
345 return n
? (tree
) n
->value
: NULL_TREE
;
349 maybe_lookup_field (tree var
, omp_context
*ctx
)
351 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
358 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
361 || TYPE_ATOMIC (TREE_TYPE (decl
)))
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
384 /* Do not use copy-in/copy-out for variables that have their
386 if (TREE_ADDRESSABLE (decl
))
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
391 if (TREE_READONLY (decl
)
392 || ((TREE_CODE (decl
) == RESULT_DECL
393 || TREE_CODE (decl
) == PARM_DECL
)
394 && DECL_BY_REFERENCE (decl
)))
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx
->is_nested
)
406 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
407 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
414 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
415 c
; c
= OMP_CLAUSE_CHAIN (c
))
416 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c
) == decl
)
421 goto maybe_mark_addressable_and_ret
;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx
))
431 maybe_mark_addressable_and_ret
:
432 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
433 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
438 if (!task_shared_vars
)
439 task_shared_vars
= BITMAP_ALLOC (NULL
);
440 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
441 TREE_ADDRESSABLE (outer
) = 1;
450 /* Construct a new automatic decl similar to VAR. */
453 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
455 tree copy
= copy_var_decl (var
, name
, type
);
457 DECL_CONTEXT (copy
) = current_function_decl
;
458 DECL_CHAIN (copy
) = ctx
->block_vars
;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
463 if (TREE_ADDRESSABLE (var
)
465 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
466 TREE_ADDRESSABLE (copy
) = 0;
467 ctx
->block_vars
= copy
;
473 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
475 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 omp_build_component_ref (tree obj
, tree field
)
483 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
484 if (TREE_THIS_VOLATILE (field
))
485 TREE_THIS_VOLATILE (ret
) |= 1;
486 if (TREE_READONLY (field
))
487 TREE_READONLY (ret
) |= 1;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
494 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
496 tree x
, field
= lookup_field (var
, ctx
);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x
= maybe_lookup_field (field
, ctx
);
504 x
= build_simple_mem_ref (ctx
->receiver_decl
);
505 TREE_THIS_NOTRAP (x
) = 1;
506 x
= omp_build_component_ref (x
, field
);
509 x
= build_simple_mem_ref (x
);
510 TREE_THIS_NOTRAP (x
) = 1;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
521 build_outer_var_ref (tree var
, omp_context
*ctx
,
522 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
528 else if (is_variable_sized (var
))
530 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
531 x
= build_outer_var_ref (x
, ctx
, code
);
532 x
= build_simple_mem_ref (x
);
534 else if (is_taskreg_ctx (ctx
))
536 bool by_ref
= use_pointer_for_field (var
, NULL
);
537 x
= build_receiver_ref (var
, by_ref
, ctx
);
539 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
541 || (code
== OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
543 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
551 if (ctx
->outer
&& is_taskreg_ctx (ctx
))
552 x
= lookup_decl (var
, ctx
->outer
);
554 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
558 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
560 gcc_assert (ctx
->outer
);
562 = splay_tree_lookup (ctx
->outer
->field_map
,
563 (splay_tree_key
) &DECL_UID (var
));
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
->outer
)))
569 x
= lookup_decl (var
, ctx
->outer
);
573 tree field
= (tree
) n
->value
;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x
= maybe_lookup_field (field
, ctx
->outer
);
580 x
= build_simple_mem_ref (ctx
->outer
->receiver_decl
);
581 x
= omp_build_component_ref (x
, field
);
582 if (use_pointer_for_field (var
, ctx
->outer
))
583 x
= build_simple_mem_ref (x
);
588 omp_context
*outer
= ctx
->outer
;
589 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
591 outer
= outer
->outer
;
593 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
595 x
= lookup_decl (var
, outer
);
597 else if (omp_is_reference (var
))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
601 else if (omp_member_access_dummy_var (var
))
608 tree t
= omp_member_access_dummy_var (var
);
611 x
= DECL_VALUE_EXPR (var
);
612 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
614 x
= unshare_and_remap (x
, t
, o
);
616 x
= unshare_expr (x
);
620 if (omp_is_reference (var
))
621 x
= build_simple_mem_ref (x
);
626 /* Build tree nodes to access the field for VAR on the sender side. */
629 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
631 tree field
= lookup_sfield (key
, ctx
);
632 return omp_build_component_ref (ctx
->sender_decl
, field
);
636 build_sender_ref (tree var
, omp_context
*ctx
)
638 return build_sender_ref ((splay_tree_key
) var
, ctx
);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
645 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
647 tree field
, type
, sfield
= NULL_TREE
;
648 splay_tree_key key
= (splay_tree_key
) var
;
652 key
= (splay_tree_key
) &DECL_UID (var
);
653 gcc_checking_assert (key
!= (splay_tree_key
) var
);
655 gcc_assert ((mask
& 1) == 0
656 || !splay_tree_lookup (ctx
->field_map
, key
));
657 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
658 || !splay_tree_lookup (ctx
->sfield_map
, key
));
659 gcc_assert ((mask
& 3) == 3
660 || !is_gimple_omp_oacc (ctx
->stmt
));
662 type
= TREE_TYPE (var
);
663 /* Prevent redeclaring the var in the split-off function with a restrict
664 pointer type. Note that we only clear type itself, restrict qualifiers in
665 the pointed-to type will be ignored by points-to analysis. */
666 if (POINTER_TYPE_P (type
)
667 && TYPE_RESTRICT (type
))
668 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
672 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
673 type
= build_pointer_type (build_pointer_type (type
));
676 type
= build_pointer_type (type
);
677 else if ((mask
& 3) == 1 && omp_is_reference (var
))
678 type
= TREE_TYPE (type
);
680 field
= build_decl (DECL_SOURCE_LOCATION (var
),
681 FIELD_DECL
, DECL_NAME (var
), type
);
683 /* Remember what variable this field was created for. This does have a
684 side effect of making dwarf2out ignore this member, so for helpful
685 debugging we clear it later in delete_omp_context. */
686 DECL_ABSTRACT_ORIGIN (field
) = var
;
687 if (type
== TREE_TYPE (var
))
689 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
690 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
691 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
694 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
698 insert_field_into_struct (ctx
->record_type
, field
);
699 if (ctx
->srecord_type
)
701 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
702 FIELD_DECL
, DECL_NAME (var
), type
);
703 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
704 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
705 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
706 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
707 insert_field_into_struct (ctx
->srecord_type
, sfield
);
712 if (ctx
->srecord_type
== NULL_TREE
)
716 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
717 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
718 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
720 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
721 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
722 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
723 insert_field_into_struct (ctx
->srecord_type
, sfield
);
724 splay_tree_insert (ctx
->sfield_map
,
725 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
726 (splay_tree_value
) sfield
);
730 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
731 : ctx
->srecord_type
, field
);
735 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
736 if ((mask
& 2) && ctx
->sfield_map
)
737 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
741 install_var_local (tree var
, omp_context
*ctx
)
743 tree new_var
= omp_copy_decl_1 (var
, ctx
);
744 insert_decl_map (&ctx
->cb
, var
, new_var
);
748 /* Adjust the replacement for DECL in CTX for the new context. This means
749 copying the DECL_VALUE_EXPR, and fixing up the type. */
752 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
756 new_decl
= lookup_decl (decl
, ctx
);
758 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
760 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
761 && DECL_HAS_VALUE_EXPR_P (decl
))
763 tree ve
= DECL_VALUE_EXPR (decl
);
764 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
765 SET_DECL_VALUE_EXPR (new_decl
, ve
);
766 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
769 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
771 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
772 if (size
== error_mark_node
)
773 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
774 DECL_SIZE (new_decl
) = size
;
776 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
777 if (size
== error_mark_node
)
778 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
779 DECL_SIZE_UNIT (new_decl
) = size
;
783 /* The callback for remap_decl. Search all containing contexts for a
784 mapping of the variable; this avoids having to duplicate the splay
785 tree ahead of time. We know a mapping doesn't already exist in the
786 given context. Create new mappings to implement default semantics. */
789 omp_copy_decl (tree var
, copy_body_data
*cb
)
791 omp_context
*ctx
= (omp_context
*) cb
;
794 if (TREE_CODE (var
) == LABEL_DECL
)
796 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
798 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
799 DECL_CONTEXT (new_var
) = current_function_decl
;
800 insert_decl_map (&ctx
->cb
, var
, new_var
);
804 while (!is_taskreg_ctx (ctx
))
809 new_var
= maybe_lookup_decl (var
, ctx
);
814 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
817 return error_mark_node
;
820 /* Create a new context, with OUTER_CTX being the surrounding context. */
823 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
825 omp_context
*ctx
= XCNEW (omp_context
);
827 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
828 (splay_tree_value
) ctx
);
833 ctx
->outer
= outer_ctx
;
834 ctx
->cb
= outer_ctx
->cb
;
835 ctx
->cb
.block
= NULL
;
836 ctx
->depth
= outer_ctx
->depth
+ 1;
840 ctx
->cb
.src_fn
= current_function_decl
;
841 ctx
->cb
.dst_fn
= current_function_decl
;
842 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
843 gcc_checking_assert (ctx
->cb
.src_node
);
844 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
845 ctx
->cb
.src_cfun
= cfun
;
846 ctx
->cb
.copy_decl
= omp_copy_decl
;
847 ctx
->cb
.eh_lp_nr
= 0;
848 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
852 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
857 static gimple_seq
maybe_catch_exception (gimple_seq
);
859 /* Finalize task copyfn. */
862 finalize_task_copyfn (gomp_task
*task_stmt
)
864 struct function
*child_cfun
;
866 gimple_seq seq
= NULL
, new_seq
;
869 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
870 if (child_fn
== NULL_TREE
)
873 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
874 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
876 push_cfun (child_cfun
);
877 bind
= gimplify_body (child_fn
, false);
878 gimple_seq_add_stmt (&seq
, bind
);
879 new_seq
= maybe_catch_exception (seq
);
882 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
884 gimple_seq_add_stmt (&seq
, bind
);
886 gimple_set_body (child_fn
, seq
);
889 /* Inform the callgraph about the new function. */
890 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
891 node
->parallelized_function
= 1;
892 cgraph_node::add_new_function (child_fn
, false);
895 /* Destroy a omp_context data structures. Called through the splay tree
896 value delete callback. */
899 delete_omp_context (splay_tree_value value
)
901 omp_context
*ctx
= (omp_context
*) value
;
903 delete ctx
->cb
.decl_map
;
906 splay_tree_delete (ctx
->field_map
);
908 splay_tree_delete (ctx
->sfield_map
);
910 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
911 it produces corrupt debug information. */
912 if (ctx
->record_type
)
915 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
916 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
918 if (ctx
->srecord_type
)
921 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
922 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
925 if (is_task_ctx (ctx
))
926 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
931 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
935 fixup_child_record_type (omp_context
*ctx
)
937 tree f
, type
= ctx
->record_type
;
939 if (!ctx
->receiver_decl
)
941 /* ??? It isn't sufficient to just call remap_type here, because
942 variably_modified_type_p doesn't work the way we expect for
943 record types. Testing each field for whether it needs remapping
944 and creating a new record by hand works, however. */
945 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
946 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
950 tree name
, new_fields
= NULL
;
952 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
953 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
954 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
955 TYPE_DECL
, name
, type
);
956 TYPE_NAME (type
) = name
;
958 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
960 tree new_f
= copy_node (f
);
961 DECL_CONTEXT (new_f
) = type
;
962 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
963 DECL_CHAIN (new_f
) = new_fields
;
964 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
965 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
967 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
971 /* Arrange to be able to look up the receiver field
972 given the sender field. */
973 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
974 (splay_tree_value
) new_f
);
976 TYPE_FIELDS (type
) = nreverse (new_fields
);
980 /* In a target region we never modify any of the pointers in *.omp_data_i,
981 so attempt to help the optimizers. */
982 if (is_gimple_omp_offloaded (ctx
->stmt
))
983 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
985 TREE_TYPE (ctx
->receiver_decl
)
986 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
989 /* Instantiate decls as necessary in CTX to satisfy the data sharing
990 specified by CLAUSES. */
993 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
996 bool scan_array_reductions
= false;
998 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1002 switch (OMP_CLAUSE_CODE (c
))
1004 case OMP_CLAUSE_PRIVATE
:
1005 decl
= OMP_CLAUSE_DECL (c
);
1006 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1008 else if (!is_variable_sized (decl
))
1009 install_var_local (decl
, ctx
);
1012 case OMP_CLAUSE_SHARED
:
1013 decl
= OMP_CLAUSE_DECL (c
);
1014 /* Ignore shared directives in teams construct. */
1015 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1017 /* Global variables don't need to be copied,
1018 the receiver side will use them directly. */
1019 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1020 if (is_global_var (odecl
))
1022 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1025 gcc_assert (is_taskreg_ctx (ctx
));
1026 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1027 || !is_variable_sized (decl
));
1028 /* Global variables don't need to be copied,
1029 the receiver side will use them directly. */
1030 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1032 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1034 use_pointer_for_field (decl
, ctx
);
1037 by_ref
= use_pointer_for_field (decl
, NULL
);
1038 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1039 || TREE_ADDRESSABLE (decl
)
1041 || omp_is_reference (decl
))
1043 by_ref
= use_pointer_for_field (decl
, ctx
);
1044 install_var_field (decl
, by_ref
, 3, ctx
);
1045 install_var_local (decl
, ctx
);
1048 /* We don't need to copy const scalar vars back. */
1049 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1052 case OMP_CLAUSE_REDUCTION
:
1053 decl
= OMP_CLAUSE_DECL (c
);
1054 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1055 && TREE_CODE (decl
) == MEM_REF
)
1057 tree t
= TREE_OPERAND (decl
, 0);
1058 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1059 t
= TREE_OPERAND (t
, 0);
1060 if (TREE_CODE (t
) == INDIRECT_REF
1061 || TREE_CODE (t
) == ADDR_EXPR
)
1062 t
= TREE_OPERAND (t
, 0);
1063 install_var_local (t
, ctx
);
1064 if (is_taskreg_ctx (ctx
)
1065 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1066 && !is_variable_sized (t
))
1068 by_ref
= use_pointer_for_field (t
, ctx
);
1069 install_var_field (t
, by_ref
, 3, ctx
);
1075 case OMP_CLAUSE_LASTPRIVATE
:
1076 /* Let the corresponding firstprivate clause create
1078 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1082 case OMP_CLAUSE_FIRSTPRIVATE
:
1083 case OMP_CLAUSE_LINEAR
:
1084 decl
= OMP_CLAUSE_DECL (c
);
1086 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1087 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1088 && is_gimple_omp_offloaded (ctx
->stmt
))
1090 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1091 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1092 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1093 install_var_field (decl
, true, 3, ctx
);
1095 install_var_field (decl
, false, 3, ctx
);
1097 if (is_variable_sized (decl
))
1099 if (is_task_ctx (ctx
))
1100 install_var_field (decl
, false, 1, ctx
);
1103 else if (is_taskreg_ctx (ctx
))
1106 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1107 by_ref
= use_pointer_for_field (decl
, NULL
);
1109 if (is_task_ctx (ctx
)
1110 && (global
|| by_ref
|| omp_is_reference (decl
)))
1112 install_var_field (decl
, false, 1, ctx
);
1114 install_var_field (decl
, by_ref
, 2, ctx
);
1117 install_var_field (decl
, by_ref
, 3, ctx
);
1119 install_var_local (decl
, ctx
);
1122 case OMP_CLAUSE_USE_DEVICE_PTR
:
1123 decl
= OMP_CLAUSE_DECL (c
);
1124 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1125 install_var_field (decl
, true, 3, ctx
);
1127 install_var_field (decl
, false, 3, ctx
);
1128 if (DECL_SIZE (decl
)
1129 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1131 tree decl2
= DECL_VALUE_EXPR (decl
);
1132 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1133 decl2
= TREE_OPERAND (decl2
, 0);
1134 gcc_assert (DECL_P (decl2
));
1135 install_var_local (decl2
, ctx
);
1137 install_var_local (decl
, ctx
);
1140 case OMP_CLAUSE_IS_DEVICE_PTR
:
1141 decl
= OMP_CLAUSE_DECL (c
);
1144 case OMP_CLAUSE__LOOPTEMP_
:
1145 gcc_assert (is_taskreg_ctx (ctx
));
1146 decl
= OMP_CLAUSE_DECL (c
);
1147 install_var_field (decl
, false, 3, ctx
);
1148 install_var_local (decl
, ctx
);
1151 case OMP_CLAUSE_COPYPRIVATE
:
1152 case OMP_CLAUSE_COPYIN
:
1153 decl
= OMP_CLAUSE_DECL (c
);
1154 by_ref
= use_pointer_for_field (decl
, NULL
);
1155 install_var_field (decl
, by_ref
, 3, ctx
);
1158 case OMP_CLAUSE_FINAL
:
1160 case OMP_CLAUSE_NUM_THREADS
:
1161 case OMP_CLAUSE_NUM_TEAMS
:
1162 case OMP_CLAUSE_THREAD_LIMIT
:
1163 case OMP_CLAUSE_DEVICE
:
1164 case OMP_CLAUSE_SCHEDULE
:
1165 case OMP_CLAUSE_DIST_SCHEDULE
:
1166 case OMP_CLAUSE_DEPEND
:
1167 case OMP_CLAUSE_PRIORITY
:
1168 case OMP_CLAUSE_GRAINSIZE
:
1169 case OMP_CLAUSE_NUM_TASKS
:
1170 case OMP_CLAUSE_NUM_GANGS
:
1171 case OMP_CLAUSE_NUM_WORKERS
:
1172 case OMP_CLAUSE_VECTOR_LENGTH
:
1174 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1178 case OMP_CLAUSE_FROM
:
1179 case OMP_CLAUSE_MAP
:
1181 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1182 decl
= OMP_CLAUSE_DECL (c
);
1183 /* Global variables with "omp declare target" attribute
1184 don't need to be copied, the receiver side will use them
1185 directly. However, global variables with "omp declare target link"
1186 attribute need to be copied. */
1187 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1189 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1190 && (OMP_CLAUSE_MAP_KIND (c
)
1191 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1192 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1193 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1194 && varpool_node::get_create (decl
)->offloadable
1195 && !lookup_attribute ("omp declare target link",
1196 DECL_ATTRIBUTES (decl
)))
1198 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1199 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1201 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1202 not offloaded; there is nothing to map for those. */
1203 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1204 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1205 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1208 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1209 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1210 || (OMP_CLAUSE_MAP_KIND (c
)
1211 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1213 if (TREE_CODE (decl
) == COMPONENT_REF
1214 || (TREE_CODE (decl
) == INDIRECT_REF
1215 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1216 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1217 == REFERENCE_TYPE
)))
1219 if (DECL_SIZE (decl
)
1220 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1222 tree decl2
= DECL_VALUE_EXPR (decl
);
1223 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1224 decl2
= TREE_OPERAND (decl2
, 0);
1225 gcc_assert (DECL_P (decl2
));
1226 install_var_local (decl2
, ctx
);
1228 install_var_local (decl
, ctx
);
1233 if (DECL_SIZE (decl
)
1234 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1236 tree decl2
= DECL_VALUE_EXPR (decl
);
1237 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1238 decl2
= TREE_OPERAND (decl2
, 0);
1239 gcc_assert (DECL_P (decl2
));
1240 install_var_field (decl2
, true, 3, ctx
);
1241 install_var_local (decl2
, ctx
);
1242 install_var_local (decl
, ctx
);
1246 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1247 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1248 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1249 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1250 install_var_field (decl
, true, 7, ctx
);
1252 install_var_field (decl
, true, 3, ctx
);
1253 if (is_gimple_omp_offloaded (ctx
->stmt
)
1254 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1255 install_var_local (decl
, ctx
);
1260 tree base
= get_base_address (decl
);
1261 tree nc
= OMP_CLAUSE_CHAIN (c
);
1264 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1265 && OMP_CLAUSE_DECL (nc
) == base
1266 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1267 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1269 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1270 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1276 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1277 decl
= OMP_CLAUSE_DECL (c
);
1279 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1280 (splay_tree_key
) decl
));
1282 = build_decl (OMP_CLAUSE_LOCATION (c
),
1283 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1284 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1285 insert_field_into_struct (ctx
->record_type
, field
);
1286 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1287 (splay_tree_value
) field
);
1292 case OMP_CLAUSE__GRIDDIM_
:
1295 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1296 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1300 case OMP_CLAUSE_NOWAIT
:
1301 case OMP_CLAUSE_ORDERED
:
1302 case OMP_CLAUSE_COLLAPSE
:
1303 case OMP_CLAUSE_UNTIED
:
1304 case OMP_CLAUSE_MERGEABLE
:
1305 case OMP_CLAUSE_PROC_BIND
:
1306 case OMP_CLAUSE_SAFELEN
:
1307 case OMP_CLAUSE_SIMDLEN
:
1308 case OMP_CLAUSE_THREADS
:
1309 case OMP_CLAUSE_SIMD
:
1310 case OMP_CLAUSE_NOGROUP
:
1311 case OMP_CLAUSE_DEFAULTMAP
:
1312 case OMP_CLAUSE_ASYNC
:
1313 case OMP_CLAUSE_WAIT
:
1314 case OMP_CLAUSE_GANG
:
1315 case OMP_CLAUSE_WORKER
:
1316 case OMP_CLAUSE_VECTOR
:
1317 case OMP_CLAUSE_INDEPENDENT
:
1318 case OMP_CLAUSE_AUTO
:
1319 case OMP_CLAUSE_SEQ
:
1320 case OMP_CLAUSE_TILE
:
1321 case OMP_CLAUSE__SIMT_
:
1322 case OMP_CLAUSE_DEFAULT
:
1323 case OMP_CLAUSE_IF_PRESENT
:
1324 case OMP_CLAUSE_FINALIZE
:
1327 case OMP_CLAUSE_ALIGNED
:
1328 decl
= OMP_CLAUSE_DECL (c
);
1329 if (is_global_var (decl
)
1330 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1331 install_var_local (decl
, ctx
);
1334 case OMP_CLAUSE__CACHE_
:
1340 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1342 switch (OMP_CLAUSE_CODE (c
))
1344 case OMP_CLAUSE_LASTPRIVATE
:
1345 /* Let the corresponding firstprivate clause create
1347 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1348 scan_array_reductions
= true;
1349 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1353 case OMP_CLAUSE_FIRSTPRIVATE
:
1354 case OMP_CLAUSE_PRIVATE
:
1355 case OMP_CLAUSE_LINEAR
:
1356 case OMP_CLAUSE_IS_DEVICE_PTR
:
1357 decl
= OMP_CLAUSE_DECL (c
);
1358 if (is_variable_sized (decl
))
1360 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1361 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1362 && is_gimple_omp_offloaded (ctx
->stmt
))
1364 tree decl2
= DECL_VALUE_EXPR (decl
);
1365 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1366 decl2
= TREE_OPERAND (decl2
, 0);
1367 gcc_assert (DECL_P (decl2
));
1368 install_var_local (decl2
, ctx
);
1369 fixup_remapped_decl (decl2
, ctx
, false);
1371 install_var_local (decl
, ctx
);
1373 fixup_remapped_decl (decl
, ctx
,
1374 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1375 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1376 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1377 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1378 scan_array_reductions
= true;
1381 case OMP_CLAUSE_REDUCTION
:
1382 decl
= OMP_CLAUSE_DECL (c
);
1383 if (TREE_CODE (decl
) != MEM_REF
)
1385 if (is_variable_sized (decl
))
1386 install_var_local (decl
, ctx
);
1387 fixup_remapped_decl (decl
, ctx
, false);
1389 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1390 scan_array_reductions
= true;
1393 case OMP_CLAUSE_SHARED
:
1394 /* Ignore shared directives in teams construct. */
1395 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1397 decl
= OMP_CLAUSE_DECL (c
);
1398 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1400 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1402 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1405 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1406 install_var_field (decl
, by_ref
, 11, ctx
);
1409 fixup_remapped_decl (decl
, ctx
, false);
1412 case OMP_CLAUSE_MAP
:
1413 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1415 decl
= OMP_CLAUSE_DECL (c
);
1417 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1418 && (OMP_CLAUSE_MAP_KIND (c
)
1419 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1420 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1421 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1422 && varpool_node::get_create (decl
)->offloadable
)
1426 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1427 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1428 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1429 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1431 tree new_decl
= lookup_decl (decl
, ctx
);
1432 TREE_TYPE (new_decl
)
1433 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1435 else if (DECL_SIZE (decl
)
1436 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1438 tree decl2
= DECL_VALUE_EXPR (decl
);
1439 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1440 decl2
= TREE_OPERAND (decl2
, 0);
1441 gcc_assert (DECL_P (decl2
));
1442 fixup_remapped_decl (decl2
, ctx
, false);
1443 fixup_remapped_decl (decl
, ctx
, true);
1446 fixup_remapped_decl (decl
, ctx
, false);
1450 case OMP_CLAUSE_COPYPRIVATE
:
1451 case OMP_CLAUSE_COPYIN
:
1452 case OMP_CLAUSE_DEFAULT
:
1454 case OMP_CLAUSE_NUM_THREADS
:
1455 case OMP_CLAUSE_NUM_TEAMS
:
1456 case OMP_CLAUSE_THREAD_LIMIT
:
1457 case OMP_CLAUSE_DEVICE
:
1458 case OMP_CLAUSE_SCHEDULE
:
1459 case OMP_CLAUSE_DIST_SCHEDULE
:
1460 case OMP_CLAUSE_NOWAIT
:
1461 case OMP_CLAUSE_ORDERED
:
1462 case OMP_CLAUSE_COLLAPSE
:
1463 case OMP_CLAUSE_UNTIED
:
1464 case OMP_CLAUSE_FINAL
:
1465 case OMP_CLAUSE_MERGEABLE
:
1466 case OMP_CLAUSE_PROC_BIND
:
1467 case OMP_CLAUSE_SAFELEN
:
1468 case OMP_CLAUSE_SIMDLEN
:
1469 case OMP_CLAUSE_ALIGNED
:
1470 case OMP_CLAUSE_DEPEND
:
1471 case OMP_CLAUSE__LOOPTEMP_
:
1473 case OMP_CLAUSE_FROM
:
1474 case OMP_CLAUSE_PRIORITY
:
1475 case OMP_CLAUSE_GRAINSIZE
:
1476 case OMP_CLAUSE_NUM_TASKS
:
1477 case OMP_CLAUSE_THREADS
:
1478 case OMP_CLAUSE_SIMD
:
1479 case OMP_CLAUSE_NOGROUP
:
1480 case OMP_CLAUSE_DEFAULTMAP
:
1481 case OMP_CLAUSE_USE_DEVICE_PTR
:
1482 case OMP_CLAUSE_ASYNC
:
1483 case OMP_CLAUSE_WAIT
:
1484 case OMP_CLAUSE_NUM_GANGS
:
1485 case OMP_CLAUSE_NUM_WORKERS
:
1486 case OMP_CLAUSE_VECTOR_LENGTH
:
1487 case OMP_CLAUSE_GANG
:
1488 case OMP_CLAUSE_WORKER
:
1489 case OMP_CLAUSE_VECTOR
:
1490 case OMP_CLAUSE_INDEPENDENT
:
1491 case OMP_CLAUSE_AUTO
:
1492 case OMP_CLAUSE_SEQ
:
1493 case OMP_CLAUSE_TILE
:
1494 case OMP_CLAUSE__GRIDDIM_
:
1495 case OMP_CLAUSE__SIMT_
:
1496 case OMP_CLAUSE_IF_PRESENT
:
1497 case OMP_CLAUSE_FINALIZE
:
1500 case OMP_CLAUSE__CACHE_
:
1506 gcc_checking_assert (!scan_array_reductions
1507 || !is_gimple_omp_oacc (ctx
->stmt
));
1508 if (scan_array_reductions
)
1510 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1511 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1512 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1514 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1515 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1517 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1518 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1519 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1520 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1521 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1522 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1526 /* Create a new name for omp child function. Returns an identifier. */
1529 create_omp_child_function_name (bool task_copy
)
1531 return clone_function_name (current_function_decl
,
1532 task_copy
? "_omp_cpyfn" : "_omp_fn");
1535 /* Return true if CTX may belong to offloaded code: either if current function
1536 is offloaded, or any enclosing context corresponds to a target region. */
1539 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1541 if (cgraph_node::get (current_function_decl
)->offloadable
)
1543 for (; ctx
; ctx
= ctx
->outer
)
1544 if (is_gimple_omp_offloaded (ctx
->stmt
))
1549 /* Build a decl for the omp child function. It'll not contain a body
1550 yet, just the bare decl. */
1553 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1555 tree decl
, type
, name
, t
;
1557 name
= create_omp_child_function_name (task_copy
);
1559 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1560 ptr_type_node
, NULL_TREE
);
1562 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1564 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1566 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1569 ctx
->cb
.dst_fn
= decl
;
1571 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1573 TREE_STATIC (decl
) = 1;
1574 TREE_USED (decl
) = 1;
1575 DECL_ARTIFICIAL (decl
) = 1;
1576 DECL_IGNORED_P (decl
) = 0;
1577 TREE_PUBLIC (decl
) = 0;
1578 DECL_UNINLINABLE (decl
) = 1;
1579 DECL_EXTERNAL (decl
) = 0;
1580 DECL_CONTEXT (decl
) = NULL_TREE
;
1581 DECL_INITIAL (decl
) = make_node (BLOCK
);
1582 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1583 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1584 /* Remove omp declare simd attribute from the new attributes. */
1585 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1587 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1590 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1591 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1592 *p
= TREE_CHAIN (*p
);
1595 tree chain
= TREE_CHAIN (*p
);
1596 *p
= copy_node (*p
);
1597 p
= &TREE_CHAIN (*p
);
1601 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1602 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1603 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1604 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1605 DECL_FUNCTION_VERSIONED (decl
)
1606 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1608 if (omp_maybe_offloaded_ctx (ctx
))
1610 cgraph_node::get_create (decl
)->offloadable
= 1;
1611 if (ENABLE_OFFLOADING
)
1612 g
->have_offload
= true;
1615 if (cgraph_node::get_create (decl
)->offloadable
1616 && !lookup_attribute ("omp declare target",
1617 DECL_ATTRIBUTES (current_function_decl
)))
1619 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1620 ? "omp target entrypoint"
1621 : "omp declare target");
1622 DECL_ATTRIBUTES (decl
)
1623 = tree_cons (get_identifier (target_attr
),
1624 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1627 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1628 RESULT_DECL
, NULL_TREE
, void_type_node
);
1629 DECL_ARTIFICIAL (t
) = 1;
1630 DECL_IGNORED_P (t
) = 1;
1631 DECL_CONTEXT (t
) = decl
;
1632 DECL_RESULT (decl
) = t
;
1634 tree data_name
= get_identifier (".omp_data_i");
1635 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1637 DECL_ARTIFICIAL (t
) = 1;
1638 DECL_NAMELESS (t
) = 1;
1639 DECL_ARG_TYPE (t
) = ptr_type_node
;
1640 DECL_CONTEXT (t
) = current_function_decl
;
1642 TREE_READONLY (t
) = 1;
1643 DECL_ARGUMENTS (decl
) = t
;
1645 ctx
->receiver_decl
= t
;
1648 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1649 PARM_DECL
, get_identifier (".omp_data_o"),
1651 DECL_ARTIFICIAL (t
) = 1;
1652 DECL_NAMELESS (t
) = 1;
1653 DECL_ARG_TYPE (t
) = ptr_type_node
;
1654 DECL_CONTEXT (t
) = current_function_decl
;
1656 TREE_ADDRESSABLE (t
) = 1;
1657 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1658 DECL_ARGUMENTS (decl
) = t
;
1661 /* Allocate memory for the function structure. The call to
1662 allocate_struct_function clobbers CFUN, so we need to restore
1664 push_struct_function (decl
);
1665 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1666 init_tree_ssa (cfun
);
1670 /* Callback for walk_gimple_seq. Check if combined parallel
1671 contains gimple_omp_for_combined_into_p OMP_FOR. */
1674 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1675 bool *handled_ops_p
,
1676 struct walk_stmt_info
*wi
)
1678 gimple
*stmt
= gsi_stmt (*gsi_p
);
1680 *handled_ops_p
= true;
1681 switch (gimple_code (stmt
))
1685 case GIMPLE_OMP_FOR
:
1686 if (gimple_omp_for_combined_into_p (stmt
)
1687 && gimple_omp_for_kind (stmt
)
1688 == *(const enum gf_mask
*) (wi
->info
))
1691 return integer_zero_node
;
1700 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1703 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1704 omp_context
*outer_ctx
)
1706 struct walk_stmt_info wi
;
1708 memset (&wi
, 0, sizeof (wi
));
1710 wi
.info
= (void *) &msk
;
1711 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1712 if (wi
.info
!= (void *) &msk
)
1714 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1715 struct omp_for_data fd
;
1716 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1717 /* We need two temporaries with fd.loop.v type (istart/iend)
1718 and then (fd.collapse - 1) temporaries with the same
1719 type for count2 ... countN-1 vars if not constant. */
1720 size_t count
= 2, i
;
1721 tree type
= fd
.iter_type
;
1723 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1725 count
+= fd
.collapse
- 1;
1726 /* If there are lastprivate clauses on the inner
1727 GIMPLE_OMP_FOR, add one more temporaries for the total number
1728 of iterations (product of count1 ... countN-1). */
1729 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1730 OMP_CLAUSE_LASTPRIVATE
))
1732 else if (msk
== GF_OMP_FOR_KIND_FOR
1733 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1734 OMP_CLAUSE_LASTPRIVATE
))
1737 for (i
= 0; i
< count
; i
++)
1739 tree temp
= create_tmp_var (type
);
1740 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1741 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1742 OMP_CLAUSE_DECL (c
) = temp
;
1743 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1744 gimple_omp_taskreg_set_clauses (stmt
, c
);
1749 /* Scan an OpenMP parallel directive. */
1752 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1756 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1758 /* Ignore parallel directives with empty bodies, unless there
1759 are copyin clauses. */
1761 && empty_body_p (gimple_omp_body (stmt
))
1762 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1763 OMP_CLAUSE_COPYIN
) == NULL
)
1765 gsi_replace (gsi
, gimple_build_nop (), false);
1769 if (gimple_omp_parallel_combined_p (stmt
))
1770 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1772 ctx
= new_omp_context (stmt
, outer_ctx
);
1773 taskreg_contexts
.safe_push (ctx
);
1774 if (taskreg_nesting_level
> 1)
1775 ctx
->is_nested
= true;
1776 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1777 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1778 name
= create_tmp_var_name (".omp_data_s");
1779 name
= build_decl (gimple_location (stmt
),
1780 TYPE_DECL
, name
, ctx
->record_type
);
1781 DECL_ARTIFICIAL (name
) = 1;
1782 DECL_NAMELESS (name
) = 1;
1783 TYPE_NAME (ctx
->record_type
) = name
;
1784 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1785 if (!gimple_omp_parallel_grid_phony (stmt
))
1787 create_omp_child_function (ctx
, false);
1788 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1791 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1792 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1794 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1795 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1798 /* Scan an OpenMP task directive. */
1801 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1805 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1807 /* Ignore task directives with empty bodies, unless they have depend
1810 && empty_body_p (gimple_omp_body (stmt
))
1811 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1813 gsi_replace (gsi
, gimple_build_nop (), false);
1817 if (gimple_omp_task_taskloop_p (stmt
))
1818 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1820 ctx
= new_omp_context (stmt
, outer_ctx
);
1821 taskreg_contexts
.safe_push (ctx
);
1822 if (taskreg_nesting_level
> 1)
1823 ctx
->is_nested
= true;
1824 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1825 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1826 name
= create_tmp_var_name (".omp_data_s");
1827 name
= build_decl (gimple_location (stmt
),
1828 TYPE_DECL
, name
, ctx
->record_type
);
1829 DECL_ARTIFICIAL (name
) = 1;
1830 DECL_NAMELESS (name
) = 1;
1831 TYPE_NAME (ctx
->record_type
) = name
;
1832 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1833 create_omp_child_function (ctx
, false);
1834 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1836 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1838 if (ctx
->srecord_type
)
1840 name
= create_tmp_var_name (".omp_data_a");
1841 name
= build_decl (gimple_location (stmt
),
1842 TYPE_DECL
, name
, ctx
->srecord_type
);
1843 DECL_ARTIFICIAL (name
) = 1;
1844 DECL_NAMELESS (name
) = 1;
1845 TYPE_NAME (ctx
->srecord_type
) = name
;
1846 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1847 create_omp_child_function (ctx
, true);
1850 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1852 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1854 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1855 t
= build_int_cst (long_integer_type_node
, 0);
1856 gimple_omp_task_set_arg_size (stmt
, t
);
1857 t
= build_int_cst (long_integer_type_node
, 1);
1858 gimple_omp_task_set_arg_align (stmt
, t
);
1862 /* Helper function for finish_taskreg_scan, called through walk_tree.
1863 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1864 tree, replace it in the expression. */
1867 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
1871 omp_context
*ctx
= (omp_context
*) data
;
1872 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
1875 if (DECL_HAS_VALUE_EXPR_P (t
))
1876 t
= unshare_expr (DECL_VALUE_EXPR (t
));
1881 else if (IS_TYPE_OR_DECL_P (*tp
))
1886 /* If any decls have been made addressable during scan_omp,
1887 adjust their fields if needed, and layout record types
1888 of parallel/task constructs. */
1891 finish_taskreg_scan (omp_context
*ctx
)
1893 if (ctx
->record_type
== NULL_TREE
)
1896 /* If any task_shared_vars were needed, verify all
1897 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1898 statements if use_pointer_for_field hasn't changed
1899 because of that. If it did, update field types now. */
1900 if (task_shared_vars
)
1904 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
1905 c
; c
= OMP_CLAUSE_CHAIN (c
))
1906 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1907 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1909 tree decl
= OMP_CLAUSE_DECL (c
);
1911 /* Global variables don't need to be copied,
1912 the receiver side will use them directly. */
1913 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1915 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
1916 || !use_pointer_for_field (decl
, ctx
))
1918 tree field
= lookup_field (decl
, ctx
);
1919 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
1920 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
1922 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
1923 TREE_THIS_VOLATILE (field
) = 0;
1924 DECL_USER_ALIGN (field
) = 0;
1925 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
1926 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
1927 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
1928 if (ctx
->srecord_type
)
1930 tree sfield
= lookup_sfield (decl
, ctx
);
1931 TREE_TYPE (sfield
) = TREE_TYPE (field
);
1932 TREE_THIS_VOLATILE (sfield
) = 0;
1933 DECL_USER_ALIGN (sfield
) = 0;
1934 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
1935 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
1936 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
1941 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1943 layout_type (ctx
->record_type
);
1944 fixup_child_record_type (ctx
);
1948 location_t loc
= gimple_location (ctx
->stmt
);
1949 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
1950 /* Move VLA fields to the end. */
1951 p
= &TYPE_FIELDS (ctx
->record_type
);
1953 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
1954 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
1957 *p
= TREE_CHAIN (*p
);
1958 TREE_CHAIN (*q
) = NULL_TREE
;
1959 q
= &TREE_CHAIN (*q
);
1962 p
= &DECL_CHAIN (*p
);
1964 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
1966 /* Move fields corresponding to first and second _looptemp_
1967 clause first. There are filled by GOMP_taskloop
1968 and thus need to be in specific positions. */
1969 tree c1
= gimple_omp_task_clauses (ctx
->stmt
);
1970 c1
= omp_find_clause (c1
, OMP_CLAUSE__LOOPTEMP_
);
1971 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
1972 OMP_CLAUSE__LOOPTEMP_
);
1973 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
1974 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
1975 p
= &TYPE_FIELDS (ctx
->record_type
);
1977 if (*p
== f1
|| *p
== f2
)
1978 *p
= DECL_CHAIN (*p
);
1980 p
= &DECL_CHAIN (*p
);
1981 DECL_CHAIN (f1
) = f2
;
1982 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
1983 TYPE_FIELDS (ctx
->record_type
) = f1
;
1984 if (ctx
->srecord_type
)
1986 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
1987 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
1988 p
= &TYPE_FIELDS (ctx
->srecord_type
);
1990 if (*p
== f1
|| *p
== f2
)
1991 *p
= DECL_CHAIN (*p
);
1993 p
= &DECL_CHAIN (*p
);
1994 DECL_CHAIN (f1
) = f2
;
1995 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
1996 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
1999 layout_type (ctx
->record_type
);
2000 fixup_child_record_type (ctx
);
2001 if (ctx
->srecord_type
)
2002 layout_type (ctx
->srecord_type
);
2003 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2004 TYPE_SIZE_UNIT (ctx
->record_type
));
2005 if (TREE_CODE (t
) != INTEGER_CST
)
2007 t
= unshare_expr (t
);
2008 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2010 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2011 t
= build_int_cst (long_integer_type_node
,
2012 TYPE_ALIGN_UNIT (ctx
->record_type
));
2013 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2017 /* Find the enclosing offload context. */
2019 static omp_context
*
2020 enclosing_target_ctx (omp_context
*ctx
)
2022 for (; ctx
; ctx
= ctx
->outer
)
2023 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2029 /* Return true if ctx is part of an oacc kernels region. */
2032 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2034 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2036 gimple
*stmt
= ctx
->stmt
;
2037 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2038 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2045 /* Check the parallelism clauses inside a kernels regions.
2046 Until kernels handling moves to use the same loop indirection
2047 scheme as parallel, we need to do this checking early. */
2050 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2052 bool checking
= true;
2053 unsigned outer_mask
= 0;
2054 unsigned this_mask
= 0;
2055 bool has_seq
= false, has_auto
= false;
2058 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2062 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2064 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2067 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2069 switch (OMP_CLAUSE_CODE (c
))
2071 case OMP_CLAUSE_GANG
:
2072 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2074 case OMP_CLAUSE_WORKER
:
2075 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2077 case OMP_CLAUSE_VECTOR
:
2078 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2080 case OMP_CLAUSE_SEQ
:
2083 case OMP_CLAUSE_AUTO
:
2093 if (has_seq
&& (this_mask
|| has_auto
))
2094 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2095 " OpenACC loop specifiers");
2096 else if (has_auto
&& this_mask
)
2097 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2098 " OpenACC loop specifiers");
2100 if (this_mask
& outer_mask
)
2101 error_at (gimple_location (stmt
), "inner loop uses same"
2102 " OpenACC parallelism as containing loop");
2105 return outer_mask
| this_mask
;
2108 /* Scan a GIMPLE_OMP_FOR. */
2110 static omp_context
*
2111 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2115 tree clauses
= gimple_omp_for_clauses (stmt
);
2117 ctx
= new_omp_context (stmt
, outer_ctx
);
2119 if (is_gimple_omp_oacc (stmt
))
2121 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2123 if (!tgt
|| is_oacc_parallel (tgt
))
2124 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2126 char const *check
= NULL
;
2128 switch (OMP_CLAUSE_CODE (c
))
2130 case OMP_CLAUSE_GANG
:
2134 case OMP_CLAUSE_WORKER
:
2138 case OMP_CLAUSE_VECTOR
:
2146 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2147 error_at (gimple_location (stmt
),
2148 "argument not permitted on %qs clause in"
2149 " OpenACC %<parallel%>", check
);
2152 if (tgt
&& is_oacc_kernels (tgt
))
2154 /* Strip out reductions, as they are not handled yet. */
2155 tree
*prev_ptr
= &clauses
;
2157 while (tree probe
= *prev_ptr
)
2159 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2161 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2162 *prev_ptr
= *next_ptr
;
2164 prev_ptr
= next_ptr
;
2167 gimple_omp_for_set_clauses (stmt
, clauses
);
2168 check_oacc_kernel_gwv (stmt
, ctx
);
2172 scan_sharing_clauses (clauses
, ctx
);
2174 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2175 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2177 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2178 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2179 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2180 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2182 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2186 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2189 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2190 omp_context
*outer_ctx
)
2192 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2193 gsi_replace (gsi
, bind
, false);
2194 gimple_seq seq
= NULL
;
2195 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2196 tree cond
= create_tmp_var_raw (integer_type_node
);
2197 DECL_CONTEXT (cond
) = current_function_decl
;
2198 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2199 gimple_bind_set_vars (bind
, cond
);
2200 gimple_call_set_lhs (g
, cond
);
2201 gimple_seq_add_stmt (&seq
, g
);
2202 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2203 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2204 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2205 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2206 gimple_seq_add_stmt (&seq
, g
);
2207 g
= gimple_build_label (lab1
);
2208 gimple_seq_add_stmt (&seq
, g
);
2209 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2210 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2211 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2212 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2213 gimple_omp_for_set_clauses (new_stmt
, clause
);
2214 gimple_seq_add_stmt (&seq
, new_stmt
);
2215 g
= gimple_build_goto (lab3
);
2216 gimple_seq_add_stmt (&seq
, g
);
2217 g
= gimple_build_label (lab2
);
2218 gimple_seq_add_stmt (&seq
, g
);
2219 gimple_seq_add_stmt (&seq
, stmt
);
2220 g
= gimple_build_label (lab3
);
2221 gimple_seq_add_stmt (&seq
, g
);
2222 gimple_bind_set_body (bind
, seq
);
2224 scan_omp_for (new_stmt
, outer_ctx
);
2225 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2228 /* Scan an OpenMP sections directive. */
2231 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2235 ctx
= new_omp_context (stmt
, outer_ctx
);
2236 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2237 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2240 /* Scan an OpenMP single directive. */
2243 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2248 ctx
= new_omp_context (stmt
, outer_ctx
);
2249 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2250 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2251 name
= create_tmp_var_name (".omp_copy_s");
2252 name
= build_decl (gimple_location (stmt
),
2253 TYPE_DECL
, name
, ctx
->record_type
);
2254 TYPE_NAME (ctx
->record_type
) = name
;
2256 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2257 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2259 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2260 ctx
->record_type
= NULL
;
2262 layout_type (ctx
->record_type
);
2265 /* Scan a GIMPLE_OMP_TARGET. */
2268 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2272 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2273 tree clauses
= gimple_omp_target_clauses (stmt
);
2275 ctx
= new_omp_context (stmt
, outer_ctx
);
2276 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2277 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2278 name
= create_tmp_var_name (".omp_data_t");
2279 name
= build_decl (gimple_location (stmt
),
2280 TYPE_DECL
, name
, ctx
->record_type
);
2281 DECL_ARTIFICIAL (name
) = 1;
2282 DECL_NAMELESS (name
) = 1;
2283 TYPE_NAME (ctx
->record_type
) = name
;
2284 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2288 create_omp_child_function (ctx
, false);
2289 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2292 scan_sharing_clauses (clauses
, ctx
);
2293 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2295 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2296 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2299 TYPE_FIELDS (ctx
->record_type
)
2300 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2303 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2304 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2306 field
= DECL_CHAIN (field
))
2307 gcc_assert (DECL_ALIGN (field
) == align
);
2309 layout_type (ctx
->record_type
);
2311 fixup_child_record_type (ctx
);
2315 /* Scan an OpenMP teams directive. */
2318 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2320 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2321 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2322 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2325 /* Check nesting restrictions. */
2327 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2331 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2332 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2333 the original copy of its contents. */
2336 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2337 inside an OpenACC CTX. */
2338 if (!(is_gimple_omp (stmt
)
2339 && is_gimple_omp_oacc (stmt
))
2340 /* Except for atomic codes that we share with OpenMP. */
2341 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2342 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2344 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2346 error_at (gimple_location (stmt
),
2347 "non-OpenACC construct inside of OpenACC routine");
2351 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2352 if (is_gimple_omp (octx
->stmt
)
2353 && is_gimple_omp_oacc (octx
->stmt
))
2355 error_at (gimple_location (stmt
),
2356 "non-OpenACC construct inside of OpenACC region");
2363 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2364 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2367 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2369 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2370 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2372 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2373 && (ctx
->outer
== NULL
2374 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2375 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2376 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2377 != GF_OMP_FOR_KIND_FOR
)
2378 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2380 error_at (gimple_location (stmt
),
2381 "%<ordered simd threads%> must be closely "
2382 "nested inside of %<for simd%> region");
2388 error_at (gimple_location (stmt
),
2389 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2390 " may not be nested inside %<simd%> region");
2393 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2395 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2396 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2397 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2398 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2400 error_at (gimple_location (stmt
),
2401 "only %<distribute%> or %<parallel%> regions are "
2402 "allowed to be strictly nested inside %<teams%> "
2408 switch (gimple_code (stmt
))
2410 case GIMPLE_OMP_FOR
:
2411 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2413 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2415 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2417 error_at (gimple_location (stmt
),
2418 "%<distribute%> region must be strictly nested "
2419 "inside %<teams%> construct");
2424 /* We split taskloop into task and nested taskloop in it. */
2425 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2427 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2432 switch (gimple_code (ctx
->stmt
))
2434 case GIMPLE_OMP_FOR
:
2435 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2436 == GF_OMP_FOR_KIND_OACC_LOOP
);
2439 case GIMPLE_OMP_TARGET
:
2440 switch (gimple_omp_target_kind (ctx
->stmt
))
2442 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2443 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2454 else if (oacc_get_fn_attrib (current_function_decl
))
2458 error_at (gimple_location (stmt
),
2459 "OpenACC loop directive must be associated with"
2460 " an OpenACC compute region");
2466 if (is_gimple_call (stmt
)
2467 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2468 == BUILT_IN_GOMP_CANCEL
2469 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2470 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2472 const char *bad
= NULL
;
2473 const char *kind
= NULL
;
2474 const char *construct
2475 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2476 == BUILT_IN_GOMP_CANCEL
)
2477 ? "#pragma omp cancel"
2478 : "#pragma omp cancellation point";
2481 error_at (gimple_location (stmt
), "orphaned %qs construct",
2485 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2486 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2490 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2491 bad
= "#pragma omp parallel";
2492 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2493 == BUILT_IN_GOMP_CANCEL
2494 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2495 ctx
->cancellable
= true;
2499 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2500 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2501 bad
= "#pragma omp for";
2502 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2503 == BUILT_IN_GOMP_CANCEL
2504 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2506 ctx
->cancellable
= true;
2507 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2509 warning_at (gimple_location (stmt
), 0,
2510 "%<#pragma omp cancel for%> inside "
2511 "%<nowait%> for construct");
2512 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2513 OMP_CLAUSE_ORDERED
))
2514 warning_at (gimple_location (stmt
), 0,
2515 "%<#pragma omp cancel for%> inside "
2516 "%<ordered%> for construct");
2521 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2522 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2523 bad
= "#pragma omp sections";
2524 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2525 == BUILT_IN_GOMP_CANCEL
2526 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2528 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2530 ctx
->cancellable
= true;
2531 if (omp_find_clause (gimple_omp_sections_clauses
2534 warning_at (gimple_location (stmt
), 0,
2535 "%<#pragma omp cancel sections%> inside "
2536 "%<nowait%> sections construct");
2540 gcc_assert (ctx
->outer
2541 && gimple_code (ctx
->outer
->stmt
)
2542 == GIMPLE_OMP_SECTIONS
);
2543 ctx
->outer
->cancellable
= true;
2544 if (omp_find_clause (gimple_omp_sections_clauses
2547 warning_at (gimple_location (stmt
), 0,
2548 "%<#pragma omp cancel sections%> inside "
2549 "%<nowait%> sections construct");
2555 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TASK
)
2556 bad
= "#pragma omp task";
2559 for (omp_context
*octx
= ctx
->outer
;
2560 octx
; octx
= octx
->outer
)
2562 switch (gimple_code (octx
->stmt
))
2564 case GIMPLE_OMP_TASKGROUP
:
2566 case GIMPLE_OMP_TARGET
:
2567 if (gimple_omp_target_kind (octx
->stmt
)
2568 != GF_OMP_TARGET_KIND_REGION
)
2571 case GIMPLE_OMP_PARALLEL
:
2572 case GIMPLE_OMP_TEAMS
:
2573 error_at (gimple_location (stmt
),
2574 "%<%s taskgroup%> construct not closely "
2575 "nested inside of %<taskgroup%> region",
2583 ctx
->cancellable
= true;
2588 error_at (gimple_location (stmt
), "invalid arguments");
2593 error_at (gimple_location (stmt
),
2594 "%<%s %s%> construct not closely nested inside of %qs",
2595 construct
, kind
, bad
);
2600 case GIMPLE_OMP_SECTIONS
:
2601 case GIMPLE_OMP_SINGLE
:
2602 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2603 switch (gimple_code (ctx
->stmt
))
2605 case GIMPLE_OMP_FOR
:
2606 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2607 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2610 case GIMPLE_OMP_SECTIONS
:
2611 case GIMPLE_OMP_SINGLE
:
2612 case GIMPLE_OMP_ORDERED
:
2613 case GIMPLE_OMP_MASTER
:
2614 case GIMPLE_OMP_TASK
:
2615 case GIMPLE_OMP_CRITICAL
:
2616 if (is_gimple_call (stmt
))
2618 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2619 != BUILT_IN_GOMP_BARRIER
)
2621 error_at (gimple_location (stmt
),
2622 "barrier region may not be closely nested inside "
2623 "of work-sharing, %<critical%>, %<ordered%>, "
2624 "%<master%>, explicit %<task%> or %<taskloop%> "
2628 error_at (gimple_location (stmt
),
2629 "work-sharing region may not be closely nested inside "
2630 "of work-sharing, %<critical%>, %<ordered%>, "
2631 "%<master%>, explicit %<task%> or %<taskloop%> region");
2633 case GIMPLE_OMP_PARALLEL
:
2634 case GIMPLE_OMP_TEAMS
:
2636 case GIMPLE_OMP_TARGET
:
2637 if (gimple_omp_target_kind (ctx
->stmt
)
2638 == GF_OMP_TARGET_KIND_REGION
)
2645 case GIMPLE_OMP_MASTER
:
2646 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2647 switch (gimple_code (ctx
->stmt
))
2649 case GIMPLE_OMP_FOR
:
2650 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2651 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2654 case GIMPLE_OMP_SECTIONS
:
2655 case GIMPLE_OMP_SINGLE
:
2656 case GIMPLE_OMP_TASK
:
2657 error_at (gimple_location (stmt
),
2658 "%<master%> region may not be closely nested inside "
2659 "of work-sharing, explicit %<task%> or %<taskloop%> "
2662 case GIMPLE_OMP_PARALLEL
:
2663 case GIMPLE_OMP_TEAMS
:
2665 case GIMPLE_OMP_TARGET
:
2666 if (gimple_omp_target_kind (ctx
->stmt
)
2667 == GF_OMP_TARGET_KIND_REGION
)
2674 case GIMPLE_OMP_TASK
:
2675 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2676 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2677 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2678 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2680 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2681 error_at (OMP_CLAUSE_LOCATION (c
),
2682 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2683 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2687 case GIMPLE_OMP_ORDERED
:
2688 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2689 c
; c
= OMP_CLAUSE_CHAIN (c
))
2691 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2693 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2694 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2697 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2698 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2699 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2702 /* Look for containing ordered(N) loop. */
2704 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2706 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2707 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2709 error_at (OMP_CLAUSE_LOCATION (c
),
2710 "%<ordered%> construct with %<depend%> clause "
2711 "must be closely nested inside an %<ordered%> "
2715 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2717 error_at (OMP_CLAUSE_LOCATION (c
),
2718 "%<ordered%> construct with %<depend%> clause "
2719 "must be closely nested inside a loop with "
2720 "%<ordered%> clause with a parameter");
2726 error_at (OMP_CLAUSE_LOCATION (c
),
2727 "invalid depend kind in omp %<ordered%> %<depend%>");
2731 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2732 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2734 /* ordered simd must be closely nested inside of simd region,
2735 and simd region must not encounter constructs other than
2736 ordered simd, therefore ordered simd may be either orphaned,
2737 or ctx->stmt must be simd. The latter case is handled already
2741 error_at (gimple_location (stmt
),
2742 "%<ordered%> %<simd%> must be closely nested inside "
2747 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2748 switch (gimple_code (ctx
->stmt
))
2750 case GIMPLE_OMP_CRITICAL
:
2751 case GIMPLE_OMP_TASK
:
2752 case GIMPLE_OMP_ORDERED
:
2753 ordered_in_taskloop
:
2754 error_at (gimple_location (stmt
),
2755 "%<ordered%> region may not be closely nested inside "
2756 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2757 "%<taskloop%> region");
2759 case GIMPLE_OMP_FOR
:
2760 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2761 goto ordered_in_taskloop
;
2762 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2763 OMP_CLAUSE_ORDERED
) == NULL
)
2765 error_at (gimple_location (stmt
),
2766 "%<ordered%> region must be closely nested inside "
2767 "a loop region with an %<ordered%> clause");
2771 case GIMPLE_OMP_TARGET
:
2772 if (gimple_omp_target_kind (ctx
->stmt
)
2773 != GF_OMP_TARGET_KIND_REGION
)
2776 case GIMPLE_OMP_PARALLEL
:
2777 case GIMPLE_OMP_TEAMS
:
2778 error_at (gimple_location (stmt
),
2779 "%<ordered%> region must be closely nested inside "
2780 "a loop region with an %<ordered%> clause");
2786 case GIMPLE_OMP_CRITICAL
:
2789 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
2790 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2791 if (gomp_critical
*other_crit
2792 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
2793 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
2795 error_at (gimple_location (stmt
),
2796 "%<critical%> region may not be nested inside "
2797 "a %<critical%> region with the same name");
2802 case GIMPLE_OMP_TEAMS
:
2804 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
2805 || gimple_omp_target_kind (ctx
->stmt
) != GF_OMP_TARGET_KIND_REGION
)
2807 error_at (gimple_location (stmt
),
2808 "%<teams%> construct not closely nested inside of "
2809 "%<target%> construct");
2813 case GIMPLE_OMP_TARGET
:
2814 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2815 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2816 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2817 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2819 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2820 error_at (OMP_CLAUSE_LOCATION (c
),
2821 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2822 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2825 if (is_gimple_omp_offloaded (stmt
)
2826 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2828 error_at (gimple_location (stmt
),
2829 "OpenACC region inside of OpenACC routine, nested "
2830 "parallelism not supported yet");
2833 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2835 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
2837 if (is_gimple_omp (stmt
)
2838 && is_gimple_omp_oacc (stmt
)
2839 && is_gimple_omp (ctx
->stmt
))
2841 error_at (gimple_location (stmt
),
2842 "OpenACC construct inside of non-OpenACC region");
2848 const char *stmt_name
, *ctx_stmt_name
;
2849 switch (gimple_omp_target_kind (stmt
))
2851 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
2852 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
2853 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
2854 case GF_OMP_TARGET_KIND_ENTER_DATA
:
2855 stmt_name
= "target enter data"; break;
2856 case GF_OMP_TARGET_KIND_EXIT_DATA
:
2857 stmt_name
= "target exit data"; break;
2858 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
2859 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
2860 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
2861 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
2862 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
2863 stmt_name
= "enter/exit data"; break;
2864 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
2866 default: gcc_unreachable ();
2868 switch (gimple_omp_target_kind (ctx
->stmt
))
2870 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
2871 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
2872 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2873 ctx_stmt_name
= "parallel"; break;
2874 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2875 ctx_stmt_name
= "kernels"; break;
2876 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
2877 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
2878 ctx_stmt_name
= "host_data"; break;
2879 default: gcc_unreachable ();
2882 /* OpenACC/OpenMP mismatch? */
2883 if (is_gimple_omp_oacc (stmt
)
2884 != is_gimple_omp_oacc (ctx
->stmt
))
2886 error_at (gimple_location (stmt
),
2887 "%s %qs construct inside of %s %qs region",
2888 (is_gimple_omp_oacc (stmt
)
2889 ? "OpenACC" : "OpenMP"), stmt_name
,
2890 (is_gimple_omp_oacc (ctx
->stmt
)
2891 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
2894 if (is_gimple_omp_offloaded (ctx
->stmt
))
2896 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2897 if (is_gimple_omp_oacc (ctx
->stmt
))
2899 error_at (gimple_location (stmt
),
2900 "%qs construct inside of %qs region",
2901 stmt_name
, ctx_stmt_name
);
2906 warning_at (gimple_location (stmt
), 0,
2907 "%qs construct inside of %qs region",
2908 stmt_name
, ctx_stmt_name
);
2920 /* Helper function scan_omp.
2922 Callback for walk_tree or operators in walk_gimple_stmt used to
2923 scan for OMP directives in TP. */
2926 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
2928 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2929 omp_context
*ctx
= (omp_context
*) wi
->info
;
2932 switch (TREE_CODE (t
))
2940 tree repl
= remap_decl (t
, &ctx
->cb
);
2941 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
2947 if (ctx
&& TYPE_P (t
))
2948 *tp
= remap_type (t
, &ctx
->cb
);
2949 else if (!DECL_P (t
))
2954 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
2955 if (tem
!= TREE_TYPE (t
))
2957 if (TREE_CODE (t
) == INTEGER_CST
)
2958 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
2960 TREE_TYPE (t
) = tem
;
2970 /* Return true if FNDECL is a setjmp or a longjmp. */
2973 setjmp_or_longjmp_p (const_tree fndecl
)
2975 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2976 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SETJMP
2977 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LONGJMP
))
2980 tree declname
= DECL_NAME (fndecl
);
2983 const char *name
= IDENTIFIER_POINTER (declname
);
2984 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
2988 /* Helper function for scan_omp.
2990 Callback for walk_gimple_stmt used to scan for OMP directives in
2991 the current statement in GSI. */
2994 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2995 struct walk_stmt_info
*wi
)
2997 gimple
*stmt
= gsi_stmt (*gsi
);
2998 omp_context
*ctx
= (omp_context
*) wi
->info
;
3000 if (gimple_has_location (stmt
))
3001 input_location
= gimple_location (stmt
);
3003 /* Check the nesting restrictions. */
3004 bool remove
= false;
3005 if (is_gimple_omp (stmt
))
3006 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3007 else if (is_gimple_call (stmt
))
3009 tree fndecl
= gimple_call_fndecl (stmt
);
3012 if (setjmp_or_longjmp_p (fndecl
)
3014 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3015 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3018 error_at (gimple_location (stmt
),
3019 "setjmp/longjmp inside simd construct");
3021 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3022 switch (DECL_FUNCTION_CODE (fndecl
))
3024 case BUILT_IN_GOMP_BARRIER
:
3025 case BUILT_IN_GOMP_CANCEL
:
3026 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3027 case BUILT_IN_GOMP_TASKYIELD
:
3028 case BUILT_IN_GOMP_TASKWAIT
:
3029 case BUILT_IN_GOMP_TASKGROUP_START
:
3030 case BUILT_IN_GOMP_TASKGROUP_END
:
3031 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3040 stmt
= gimple_build_nop ();
3041 gsi_replace (gsi
, stmt
, false);
3044 *handled_ops_p
= true;
3046 switch (gimple_code (stmt
))
3048 case GIMPLE_OMP_PARALLEL
:
3049 taskreg_nesting_level
++;
3050 scan_omp_parallel (gsi
, ctx
);
3051 taskreg_nesting_level
--;
3054 case GIMPLE_OMP_TASK
:
3055 taskreg_nesting_level
++;
3056 scan_omp_task (gsi
, ctx
);
3057 taskreg_nesting_level
--;
3060 case GIMPLE_OMP_FOR
:
3061 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3062 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3063 && omp_maybe_offloaded_ctx (ctx
)
3064 && omp_max_simt_vf ())
3065 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3067 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3070 case GIMPLE_OMP_SECTIONS
:
3071 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3074 case GIMPLE_OMP_SINGLE
:
3075 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3078 case GIMPLE_OMP_SECTION
:
3079 case GIMPLE_OMP_MASTER
:
3080 case GIMPLE_OMP_TASKGROUP
:
3081 case GIMPLE_OMP_ORDERED
:
3082 case GIMPLE_OMP_CRITICAL
:
3083 case GIMPLE_OMP_GRID_BODY
:
3084 ctx
= new_omp_context (stmt
, ctx
);
3085 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3088 case GIMPLE_OMP_TARGET
:
3089 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3092 case GIMPLE_OMP_TEAMS
:
3093 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3100 *handled_ops_p
= false;
3102 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3104 var
= DECL_CHAIN (var
))
3105 insert_decl_map (&ctx
->cb
, var
, var
);
3109 *handled_ops_p
= false;
3117 /* Scan all the statements starting at the current statement. CTX
3118 contains context information about the OMP directives and
3119 clauses found during the scan. */
3122 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3124 location_t saved_location
;
3125 struct walk_stmt_info wi
;
3127 memset (&wi
, 0, sizeof (wi
));
3129 wi
.want_locations
= true;
3131 saved_location
= input_location
;
3132 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3133 input_location
= saved_location
;
3136 /* Re-gimplification and code generation routines. */
3138 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3139 of BIND if in a method. */
3142 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3144 if (DECL_ARGUMENTS (current_function_decl
)
3145 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3146 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3149 tree vars
= gimple_bind_vars (bind
);
3150 for (tree
*pvar
= &vars
; *pvar
; )
3151 if (omp_member_access_dummy_var (*pvar
))
3152 *pvar
= DECL_CHAIN (*pvar
);
3154 pvar
= &DECL_CHAIN (*pvar
);
3155 gimple_bind_set_vars (bind
, vars
);
3159 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3160 block and its subblocks. */
3163 remove_member_access_dummy_vars (tree block
)
3165 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3166 if (omp_member_access_dummy_var (*pvar
))
3167 *pvar
= DECL_CHAIN (*pvar
);
3169 pvar
= &DECL_CHAIN (*pvar
);
3171 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3172 remove_member_access_dummy_vars (block
);
3175 /* If a context was created for STMT when it was scanned, return it. */
3177 static omp_context
*
3178 maybe_lookup_ctx (gimple
*stmt
)
3181 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3182 return n
? (omp_context
*) n
->value
: NULL
;
3186 /* Find the mapping for DECL in CTX or the immediately enclosing
3187 context that has a mapping for DECL.
3189 If CTX is a nested parallel directive, we may have to use the decl
3190 mappings created in CTX's parent context. Suppose that we have the
3191 following parallel nesting (variable UIDs showed for clarity):
3194 #omp parallel shared(iD.1562) -> outer parallel
3195 iD.1562 = iD.1562 + 1;
3197 #omp parallel shared (iD.1562) -> inner parallel
3198 iD.1562 = iD.1562 - 1;
3200 Each parallel structure will create a distinct .omp_data_s structure
3201 for copying iD.1562 in/out of the directive:
3203 outer parallel .omp_data_s.1.i -> iD.1562
3204 inner parallel .omp_data_s.2.i -> iD.1562
3206 A shared variable mapping will produce a copy-out operation before
3207 the parallel directive and a copy-in operation after it. So, in
3208 this case we would have:
3211 .omp_data_o.1.i = iD.1562;
3212 #omp parallel shared(iD.1562) -> outer parallel
3213 .omp_data_i.1 = &.omp_data_o.1
3214 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3216 .omp_data_o.2.i = iD.1562; -> **
3217 #omp parallel shared(iD.1562) -> inner parallel
3218 .omp_data_i.2 = &.omp_data_o.2
3219 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3222 ** This is a problem. The symbol iD.1562 cannot be referenced
3223 inside the body of the outer parallel region. But since we are
3224 emitting this copy operation while expanding the inner parallel
3225 directive, we need to access the CTX structure of the outer
3226 parallel directive to get the correct mapping:
3228 .omp_data_o.2.i = .omp_data_i.1->i
3230 Since there may be other workshare or parallel directives enclosing
3231 the parallel directive, it may be necessary to walk up the context
3232 parent chain. This is not a problem in general because nested
3233 parallelism happens only rarely. */
3236 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3241 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3242 t
= maybe_lookup_decl (decl
, up
);
3244 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3246 return t
? t
: decl
;
3250 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3251 in outer contexts. */
3254 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3259 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3260 t
= maybe_lookup_decl (decl
, up
);
3262 return t
? t
: decl
;
3266 /* Construct the initialization value for reduction operation OP. */
3269 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3278 case TRUTH_ORIF_EXPR
:
3279 case TRUTH_XOR_EXPR
:
3281 return build_zero_cst (type
);
3284 case TRUTH_AND_EXPR
:
3285 case TRUTH_ANDIF_EXPR
:
3287 return fold_convert_loc (loc
, type
, integer_one_node
);
3290 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3293 if (SCALAR_FLOAT_TYPE_P (type
))
3295 REAL_VALUE_TYPE max
, min
;
3296 if (HONOR_INFINITIES (type
))
3299 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3302 real_maxval (&min
, 1, TYPE_MODE (type
));
3303 return build_real (type
, min
);
3305 else if (POINTER_TYPE_P (type
))
3308 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3309 return wide_int_to_tree (type
, min
);
3313 gcc_assert (INTEGRAL_TYPE_P (type
));
3314 return TYPE_MIN_VALUE (type
);
3318 if (SCALAR_FLOAT_TYPE_P (type
))
3320 REAL_VALUE_TYPE max
;
3321 if (HONOR_INFINITIES (type
))
3324 real_maxval (&max
, 0, TYPE_MODE (type
));
3325 return build_real (type
, max
);
3327 else if (POINTER_TYPE_P (type
))
3330 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3331 return wide_int_to_tree (type
, max
);
3335 gcc_assert (INTEGRAL_TYPE_P (type
));
3336 return TYPE_MAX_VALUE (type
);
3344 /* Construct the initialization value for reduction CLAUSE. */
3347 omp_reduction_init (tree clause
, tree type
)
3349 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3350 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3353 /* Return alignment to be assumed for var in CLAUSE, which should be
3354 OMP_CLAUSE_ALIGNED. */
3357 omp_clause_aligned_alignment (tree clause
)
3359 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3360 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3362 /* Otherwise return implementation defined alignment. */
3363 unsigned int al
= 1;
3364 opt_scalar_mode mode_iter
;
3365 auto_vector_sizes sizes
;
3366 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
);
3368 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3369 vs
= ordered_max (vs
, sizes
[i
]);
3370 static enum mode_class classes
[]
3371 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3372 for (int i
= 0; i
< 4; i
+= 2)
3373 /* The for loop above dictates that we only walk through scalar classes. */
3374 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3376 scalar_mode mode
= mode_iter
.require ();
3377 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3378 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3380 while (maybe_ne (vs
, 0U)
3381 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3382 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3383 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3385 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3386 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3388 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3389 GET_MODE_SIZE (mode
));
3390 type
= build_vector_type (type
, nelts
);
3391 if (TYPE_MODE (type
) != vmode
)
3393 if (TYPE_ALIGN_UNIT (type
) > al
)
3394 al
= TYPE_ALIGN_UNIT (type
);
3396 return build_int_cst (integer_type_node
, al
);
3400 /* This structure is part of the interface between lower_rec_simd_input_clauses
3401 and lower_rec_input_clauses. */
3403 struct omplow_simd_context
{
3404 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3407 vec
<tree
, va_heap
> simt_eargs
;
3408 gimple_seq simt_dlist
;
3409 poly_uint64_pod max_vf
;
3413 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3417 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3418 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3420 if (known_eq (sctx
->max_vf
, 0U))
3422 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3423 if (maybe_gt (sctx
->max_vf
, 1U))
3425 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3426 OMP_CLAUSE_SAFELEN
);
3429 poly_uint64 safe_len
;
3430 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3431 || maybe_lt (safe_len
, 1U))
3434 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3437 if (maybe_gt (sctx
->max_vf
, 1U))
3439 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3440 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3443 if (known_eq (sctx
->max_vf
, 1U))
3448 if (is_gimple_reg (new_var
))
3450 ivar
= lvar
= new_var
;
3453 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3454 ivar
= lvar
= create_tmp_var (type
);
3455 TREE_ADDRESSABLE (ivar
) = 1;
3456 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3457 NULL
, DECL_ATTRIBUTES (ivar
));
3458 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3459 tree clobber
= build_constructor (type
, NULL
);
3460 TREE_THIS_VOLATILE (clobber
) = 1;
3461 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3462 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3466 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3467 tree avar
= create_tmp_var_raw (atype
);
3468 if (TREE_ADDRESSABLE (new_var
))
3469 TREE_ADDRESSABLE (avar
) = 1;
3470 DECL_ATTRIBUTES (avar
)
3471 = tree_cons (get_identifier ("omp simd array"), NULL
,
3472 DECL_ATTRIBUTES (avar
));
3473 gimple_add_tmp_var (avar
);
3474 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3475 NULL_TREE
, NULL_TREE
);
3476 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3477 NULL_TREE
, NULL_TREE
);
3479 if (DECL_P (new_var
))
3481 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3482 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3487 /* Helper function of lower_rec_input_clauses. For a reference
3488 in simd reduction, add an underlying variable it will reference. */
3491 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3493 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3494 if (TREE_CONSTANT (z
))
3496 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3497 get_name (new_vard
));
3498 gimple_add_tmp_var (z
);
3499 TREE_ADDRESSABLE (z
) = 1;
3500 z
= build_fold_addr_expr_loc (loc
, z
);
3501 gimplify_assign (new_vard
, z
, ilist
);
3505 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3506 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3507 private variables. Initialization statements go in ILIST, while calls
3508 to destructors go in DLIST. */
3511 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3512 omp_context
*ctx
, struct omp_for_data
*fd
)
3514 tree c
, dtor
, copyin_seq
, x
, ptr
;
3515 bool copyin_by_ref
= false;
3516 bool lastprivate_firstprivate
= false;
3517 bool reduction_omp_orig_ref
= false;
3519 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3520 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3521 omplow_simd_context sctx
= omplow_simd_context ();
3522 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3523 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3524 gimple_seq llist
[3] = { };
3527 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3529 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3530 with data sharing clauses referencing variable sized vars. That
3531 is unnecessarily hard to support and very unlikely to result in
3532 vectorized code anyway. */
3534 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3535 switch (OMP_CLAUSE_CODE (c
))
3537 case OMP_CLAUSE_LINEAR
:
3538 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3541 case OMP_CLAUSE_PRIVATE
:
3542 case OMP_CLAUSE_FIRSTPRIVATE
:
3543 case OMP_CLAUSE_LASTPRIVATE
:
3544 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3547 case OMP_CLAUSE_REDUCTION
:
3548 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3549 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3556 /* Add a placeholder for simduid. */
3557 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
3558 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3560 /* Do all the fixed sized types in the first pass, and the variable sized
3561 types in the second pass. This makes sure that the scalar arguments to
3562 the variable sized types are processed before we use them in the
3563 variable sized operations. */
3564 for (pass
= 0; pass
< 2; ++pass
)
3566 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3568 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3571 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3575 case OMP_CLAUSE_PRIVATE
:
3576 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3579 case OMP_CLAUSE_SHARED
:
3580 /* Ignore shared directives in teams construct. */
3581 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3583 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3585 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3586 || is_global_var (OMP_CLAUSE_DECL (c
)));
3589 case OMP_CLAUSE_FIRSTPRIVATE
:
3590 case OMP_CLAUSE_COPYIN
:
3592 case OMP_CLAUSE_LINEAR
:
3593 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3594 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3595 lastprivate_firstprivate
= true;
3597 case OMP_CLAUSE_REDUCTION
:
3598 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3599 reduction_omp_orig_ref
= true;
3601 case OMP_CLAUSE__LOOPTEMP_
:
3602 /* Handle _looptemp_ clauses only on parallel/task. */
3606 case OMP_CLAUSE_LASTPRIVATE
:
3607 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3609 lastprivate_firstprivate
= true;
3610 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3613 /* Even without corresponding firstprivate, if
3614 decl is Fortran allocatable, it needs outer var
3617 && lang_hooks
.decls
.omp_private_outer_ref
3618 (OMP_CLAUSE_DECL (c
)))
3619 lastprivate_firstprivate
= true;
3621 case OMP_CLAUSE_ALIGNED
:
3624 var
= OMP_CLAUSE_DECL (c
);
3625 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3626 && !is_global_var (var
))
3628 new_var
= maybe_lookup_decl (var
, ctx
);
3629 if (new_var
== NULL_TREE
)
3630 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3631 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3632 tree alarg
= omp_clause_aligned_alignment (c
);
3633 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3634 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
3635 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3636 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
3637 gimplify_and_add (x
, ilist
);
3639 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
3640 && is_global_var (var
))
3642 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
3643 new_var
= lookup_decl (var
, ctx
);
3644 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3645 t
= build_fold_addr_expr_loc (clause_loc
, t
);
3646 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3647 tree alarg
= omp_clause_aligned_alignment (c
);
3648 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3649 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
3650 t
= fold_convert_loc (clause_loc
, ptype
, t
);
3651 x
= create_tmp_var (ptype
);
3652 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
3653 gimplify_and_add (t
, ilist
);
3654 t
= build_simple_mem_ref_loc (clause_loc
, x
);
3655 SET_DECL_VALUE_EXPR (new_var
, t
);
3656 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3663 new_var
= var
= OMP_CLAUSE_DECL (c
);
3664 if (c_kind
== OMP_CLAUSE_REDUCTION
&& TREE_CODE (var
) == MEM_REF
)
3666 var
= TREE_OPERAND (var
, 0);
3667 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
3668 var
= TREE_OPERAND (var
, 0);
3669 if (TREE_CODE (var
) == INDIRECT_REF
3670 || TREE_CODE (var
) == ADDR_EXPR
)
3671 var
= TREE_OPERAND (var
, 0);
3672 if (is_variable_sized (var
))
3674 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
3675 var
= DECL_VALUE_EXPR (var
);
3676 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
3677 var
= TREE_OPERAND (var
, 0);
3678 gcc_assert (DECL_P (var
));
3682 if (c_kind
!= OMP_CLAUSE_COPYIN
)
3683 new_var
= lookup_decl (var
, ctx
);
3685 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
3690 /* C/C++ array section reductions. */
3691 else if (c_kind
== OMP_CLAUSE_REDUCTION
3692 && var
!= OMP_CLAUSE_DECL (c
))
3697 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
3698 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
3699 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
3701 tree b
= TREE_OPERAND (orig_var
, 1);
3702 b
= maybe_lookup_decl (b
, ctx
);
3705 b
= TREE_OPERAND (orig_var
, 1);
3706 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
3708 if (integer_zerop (bias
))
3712 bias
= fold_convert_loc (clause_loc
,
3713 TREE_TYPE (b
), bias
);
3714 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3715 TREE_TYPE (b
), b
, bias
);
3717 orig_var
= TREE_OPERAND (orig_var
, 0);
3719 if (TREE_CODE (orig_var
) == INDIRECT_REF
3720 || TREE_CODE (orig_var
) == ADDR_EXPR
)
3721 orig_var
= TREE_OPERAND (orig_var
, 0);
3722 tree d
= OMP_CLAUSE_DECL (c
);
3723 tree type
= TREE_TYPE (d
);
3724 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
3725 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3726 const char *name
= get_name (orig_var
);
3727 if (TREE_CONSTANT (v
))
3729 x
= create_tmp_var_raw (type
, name
);
3730 gimple_add_tmp_var (x
);
3731 TREE_ADDRESSABLE (x
) = 1;
3732 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3737 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3738 tree t
= maybe_lookup_decl (v
, ctx
);
3742 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
3743 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
3744 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3746 build_int_cst (TREE_TYPE (v
), 1));
3747 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
3749 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3750 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
3751 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
3754 tree ptype
= build_pointer_type (TREE_TYPE (type
));
3755 x
= fold_convert_loc (clause_loc
, ptype
, x
);
3756 tree y
= create_tmp_var (ptype
, name
);
3757 gimplify_assign (y
, x
, ilist
);
3761 if (!integer_zerop (bias
))
3763 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3765 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3767 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
3768 pointer_sized_int_node
, yb
, bias
);
3769 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
3770 yb
= create_tmp_var (ptype
, name
);
3771 gimplify_assign (yb
, x
, ilist
);
3775 d
= TREE_OPERAND (d
, 0);
3776 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
3777 d
= TREE_OPERAND (d
, 0);
3778 if (TREE_CODE (d
) == ADDR_EXPR
)
3780 if (orig_var
!= var
)
3782 gcc_assert (is_variable_sized (orig_var
));
3783 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
3785 gimplify_assign (new_var
, x
, ilist
);
3786 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
3787 tree t
= build_fold_indirect_ref (new_var
);
3788 DECL_IGNORED_P (new_var
) = 0;
3789 TREE_THIS_NOTRAP (t
);
3790 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
3791 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
3795 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
3796 build_int_cst (ptype
, 0));
3797 SET_DECL_VALUE_EXPR (new_var
, x
);
3798 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3803 gcc_assert (orig_var
== var
);
3804 if (TREE_CODE (d
) == INDIRECT_REF
)
3806 x
= create_tmp_var (ptype
, name
);
3807 TREE_ADDRESSABLE (x
) = 1;
3808 gimplify_assign (x
, yb
, ilist
);
3809 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3811 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3812 gimplify_assign (new_var
, x
, ilist
);
3814 tree y1
= create_tmp_var (ptype
, NULL
);
3815 gimplify_assign (y1
, y
, ilist
);
3816 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
3817 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
3818 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
3819 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
3821 y2
= create_tmp_var (ptype
, NULL
);
3822 gimplify_assign (y2
, y
, ilist
);
3823 tree ref
= build_outer_var_ref (var
, ctx
);
3824 /* For ref build_outer_var_ref already performs this. */
3825 if (TREE_CODE (d
) == INDIRECT_REF
)
3826 gcc_assert (omp_is_reference (var
));
3827 else if (TREE_CODE (d
) == ADDR_EXPR
)
3828 ref
= build_fold_addr_expr (ref
);
3829 else if (omp_is_reference (var
))
3830 ref
= build_fold_addr_expr (ref
);
3831 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
3832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
3833 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3835 y3
= create_tmp_var (ptype
, NULL
);
3836 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
3840 y4
= create_tmp_var (ptype
, NULL
);
3841 gimplify_assign (y4
, ref
, dlist
);
3844 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
3845 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
3846 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
3847 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
3848 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
3851 i2
= create_tmp_var (TREE_TYPE (v
), NULL
);
3852 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
3853 body2
= create_artificial_label (UNKNOWN_LOCATION
);
3854 end2
= create_artificial_label (UNKNOWN_LOCATION
);
3855 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
3857 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
3859 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
3860 tree decl_placeholder
3861 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
3862 SET_DECL_VALUE_EXPR (decl_placeholder
,
3863 build_simple_mem_ref (y1
));
3864 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
3865 SET_DECL_VALUE_EXPR (placeholder
,
3866 y3
? build_simple_mem_ref (y3
)
3868 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
3869 x
= lang_hooks
.decls
.omp_clause_default_ctor
3870 (c
, build_simple_mem_ref (y1
),
3871 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
3873 gimplify_and_add (x
, ilist
);
3874 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
3876 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
3877 lower_omp (&tseq
, ctx
);
3878 gimple_seq_add_seq (ilist
, tseq
);
3880 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
3883 SET_DECL_VALUE_EXPR (decl_placeholder
,
3884 build_simple_mem_ref (y2
));
3885 SET_DECL_VALUE_EXPR (placeholder
,
3886 build_simple_mem_ref (y4
));
3887 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
3888 lower_omp (&tseq
, ctx
);
3889 gimple_seq_add_seq (dlist
, tseq
);
3890 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
3892 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
3893 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
3894 x
= lang_hooks
.decls
.omp_clause_dtor
3895 (c
, build_simple_mem_ref (y2
));
3898 gimple_seq tseq
= NULL
;
3900 gimplify_stmt (&dtor
, &tseq
);
3901 gimple_seq_add_seq (dlist
, tseq
);
3906 x
= omp_reduction_init (c
, TREE_TYPE (type
));
3907 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
3909 /* reduction(-:var) sums up the partial results, so it
3910 acts identically to reduction(+:var). */
3911 if (code
== MINUS_EXPR
)
3914 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
3917 x
= build2 (code
, TREE_TYPE (type
),
3918 build_simple_mem_ref (y4
),
3919 build_simple_mem_ref (y2
));
3920 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
3924 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
3925 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3926 gimple_seq_add_stmt (ilist
, g
);
3929 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
3930 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3931 gimple_seq_add_stmt (ilist
, g
);
3933 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
3934 build_int_cst (TREE_TYPE (i
), 1));
3935 gimple_seq_add_stmt (ilist
, g
);
3936 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
3937 gimple_seq_add_stmt (ilist
, g
);
3938 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
3941 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
3942 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3943 gimple_seq_add_stmt (dlist
, g
);
3946 g
= gimple_build_assign
3947 (y4
, POINTER_PLUS_EXPR
, y4
,
3948 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3949 gimple_seq_add_stmt (dlist
, g
);
3951 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
3952 build_int_cst (TREE_TYPE (i2
), 1));
3953 gimple_seq_add_stmt (dlist
, g
);
3954 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
3955 gimple_seq_add_stmt (dlist
, g
);
3956 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
3960 else if (is_variable_sized (var
))
3962 /* For variable sized types, we need to allocate the
3963 actual storage here. Call alloca and store the
3964 result in the pointer decl that we created elsewhere. */
3968 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
3973 ptr
= DECL_VALUE_EXPR (new_var
);
3974 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
3975 ptr
= TREE_OPERAND (ptr
, 0);
3976 gcc_assert (DECL_P (ptr
));
3977 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
3979 /* void *tmp = __builtin_alloca */
3980 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3981 stmt
= gimple_build_call (atmp
, 2, x
,
3982 size_int (DECL_ALIGN (var
)));
3983 tmp
= create_tmp_var_raw (ptr_type_node
);
3984 gimple_add_tmp_var (tmp
);
3985 gimple_call_set_lhs (stmt
, tmp
);
3987 gimple_seq_add_stmt (ilist
, stmt
);
3989 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
3990 gimplify_assign (ptr
, x
, ilist
);
3993 else if (omp_is_reference (var
))
3995 /* For references that are being privatized for Fortran,
3996 allocate new backing storage for the new pointer
3997 variable. This allows us to avoid changing all the
3998 code that expects a pointer to something that expects
3999 a direct variable. */
4003 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4004 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4006 x
= build_receiver_ref (var
, false, ctx
);
4007 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4009 else if (TREE_CONSTANT (x
))
4011 /* For reduction in SIMD loop, defer adding the
4012 initialization of the reference, because if we decide
4013 to use SIMD array for it, the initilization could cause
4015 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4019 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4021 gimple_add_tmp_var (x
);
4022 TREE_ADDRESSABLE (x
) = 1;
4023 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4029 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4030 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4031 tree al
= size_int (TYPE_ALIGN (rtype
));
4032 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4037 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4038 gimplify_assign (new_var
, x
, ilist
);
4041 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4043 else if (c_kind
== OMP_CLAUSE_REDUCTION
4044 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4052 switch (OMP_CLAUSE_CODE (c
))
4054 case OMP_CLAUSE_SHARED
:
4055 /* Ignore shared directives in teams construct. */
4056 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
4058 /* Shared global vars are just accessed directly. */
4059 if (is_global_var (new_var
))
4061 /* For taskloop firstprivate/lastprivate, represented
4062 as firstprivate and shared clause on the task, new_var
4063 is the firstprivate var. */
4064 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4066 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4067 needs to be delayed until after fixup_child_record_type so
4068 that we get the correct type during the dereference. */
4069 by_ref
= use_pointer_for_field (var
, ctx
);
4070 x
= build_receiver_ref (var
, by_ref
, ctx
);
4071 SET_DECL_VALUE_EXPR (new_var
, x
);
4072 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4074 /* ??? If VAR is not passed by reference, and the variable
4075 hasn't been initialized yet, then we'll get a warning for
4076 the store into the omp_data_s structure. Ideally, we'd be
4077 able to notice this and not store anything at all, but
4078 we're generating code too early. Suppress the warning. */
4080 TREE_NO_WARNING (var
) = 1;
4083 case OMP_CLAUSE_LASTPRIVATE
:
4084 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4088 case OMP_CLAUSE_PRIVATE
:
4089 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4090 x
= build_outer_var_ref (var
, ctx
);
4091 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4093 if (is_task_ctx (ctx
))
4094 x
= build_receiver_ref (var
, false, ctx
);
4096 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4102 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4103 (c
, unshare_expr (new_var
), x
);
4106 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4107 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4108 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4109 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4113 x
= lang_hooks
.decls
.omp_clause_default_ctor
4114 (c
, unshare_expr (ivar
), x
);
4116 gimplify_and_add (x
, &llist
[0]);
4119 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4122 gimple_seq tseq
= NULL
;
4125 gimplify_stmt (&dtor
, &tseq
);
4126 gimple_seq_add_seq (&llist
[1], tseq
);
4133 gimplify_and_add (nx
, ilist
);
4137 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4140 gimple_seq tseq
= NULL
;
4143 gimplify_stmt (&dtor
, &tseq
);
4144 gimple_seq_add_seq (dlist
, tseq
);
4148 case OMP_CLAUSE_LINEAR
:
4149 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4150 goto do_firstprivate
;
4151 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4154 x
= build_outer_var_ref (var
, ctx
);
4157 case OMP_CLAUSE_FIRSTPRIVATE
:
4158 if (is_task_ctx (ctx
))
4160 if (omp_is_reference (var
) || is_variable_sized (var
))
4162 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4164 || use_pointer_for_field (var
, NULL
))
4166 x
= build_receiver_ref (var
, false, ctx
);
4167 SET_DECL_VALUE_EXPR (new_var
, x
);
4168 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4173 x
= build_outer_var_ref (var
, ctx
);
4176 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4177 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4179 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4180 tree stept
= TREE_TYPE (t
);
4181 tree ct
= omp_find_clause (clauses
,
4182 OMP_CLAUSE__LOOPTEMP_
);
4184 tree l
= OMP_CLAUSE_DECL (ct
);
4185 tree n1
= fd
->loop
.n1
;
4186 tree step
= fd
->loop
.step
;
4187 tree itype
= TREE_TYPE (l
);
4188 if (POINTER_TYPE_P (itype
))
4189 itype
= signed_type_for (itype
);
4190 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4191 if (TYPE_UNSIGNED (itype
)
4192 && fd
->loop
.cond_code
== GT_EXPR
)
4193 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4194 fold_build1 (NEGATE_EXPR
, itype
, l
),
4195 fold_build1 (NEGATE_EXPR
,
4198 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4199 t
= fold_build2 (MULT_EXPR
, stept
,
4200 fold_convert (stept
, l
), t
);
4202 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4204 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4206 gimplify_and_add (x
, ilist
);
4210 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4211 x
= fold_build2 (POINTER_PLUS_EXPR
,
4212 TREE_TYPE (x
), x
, t
);
4214 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4217 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4218 || TREE_ADDRESSABLE (new_var
))
4219 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4222 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4224 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4225 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4226 gimplify_and_add (x
, ilist
);
4227 gimple_stmt_iterator gsi
4228 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4230 = gimple_build_assign (unshare_expr (lvar
), iv
);
4231 gsi_insert_before_without_update (&gsi
, g
,
4233 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4234 enum tree_code code
= PLUS_EXPR
;
4235 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4236 code
= POINTER_PLUS_EXPR
;
4237 g
= gimple_build_assign (iv
, code
, iv
, t
);
4238 gsi_insert_before_without_update (&gsi
, g
,
4242 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4243 (c
, unshare_expr (ivar
), x
);
4244 gimplify_and_add (x
, &llist
[0]);
4245 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4248 gimple_seq tseq
= NULL
;
4251 gimplify_stmt (&dtor
, &tseq
);
4252 gimple_seq_add_seq (&llist
[1], tseq
);
4257 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4258 (c
, unshare_expr (new_var
), x
);
4259 gimplify_and_add (x
, ilist
);
4262 case OMP_CLAUSE__LOOPTEMP_
:
4263 gcc_assert (is_taskreg_ctx (ctx
));
4264 x
= build_outer_var_ref (var
, ctx
);
4265 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4266 gimplify_and_add (x
, ilist
);
4269 case OMP_CLAUSE_COPYIN
:
4270 by_ref
= use_pointer_for_field (var
, NULL
);
4271 x
= build_receiver_ref (var
, by_ref
, ctx
);
4272 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4273 append_to_statement_list (x
, ©in_seq
);
4274 copyin_by_ref
|= by_ref
;
4277 case OMP_CLAUSE_REDUCTION
:
4278 /* OpenACC reductions are initialized using the
4279 GOACC_REDUCTION internal function. */
4280 if (is_gimple_omp_oacc (ctx
->stmt
))
4282 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4284 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4286 x
= build_outer_var_ref (var
, ctx
);
4288 if (omp_is_reference (var
)
4289 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
4291 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4292 SET_DECL_VALUE_EXPR (placeholder
, x
);
4293 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4294 tree new_vard
= new_var
;
4295 if (omp_is_reference (var
))
4297 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4298 new_vard
= TREE_OPERAND (new_var
, 0);
4299 gcc_assert (DECL_P (new_vard
));
4302 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4305 if (new_vard
== new_var
)
4307 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4308 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4312 SET_DECL_VALUE_EXPR (new_vard
,
4313 build_fold_addr_expr (ivar
));
4314 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4316 x
= lang_hooks
.decls
.omp_clause_default_ctor
4317 (c
, unshare_expr (ivar
),
4318 build_outer_var_ref (var
, ctx
));
4320 gimplify_and_add (x
, &llist
[0]);
4321 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4323 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4324 lower_omp (&tseq
, ctx
);
4325 gimple_seq_add_seq (&llist
[0], tseq
);
4327 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4328 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4329 lower_omp (&tseq
, ctx
);
4330 gimple_seq_add_seq (&llist
[1], tseq
);
4331 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4332 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4333 if (new_vard
== new_var
)
4334 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4336 SET_DECL_VALUE_EXPR (new_vard
,
4337 build_fold_addr_expr (lvar
));
4338 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4343 gimplify_stmt (&dtor
, &tseq
);
4344 gimple_seq_add_seq (&llist
[1], tseq
);
4348 /* If this is a reference to constant size reduction var
4349 with placeholder, we haven't emitted the initializer
4350 for it because it is undesirable if SIMD arrays are used.
4351 But if they aren't used, we need to emit the deferred
4352 initialization now. */
4353 else if (omp_is_reference (var
) && is_simd
)
4354 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4355 x
= lang_hooks
.decls
.omp_clause_default_ctor
4356 (c
, unshare_expr (new_var
),
4357 build_outer_var_ref (var
, ctx
));
4359 gimplify_and_add (x
, ilist
);
4360 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4362 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4363 lower_omp (&tseq
, ctx
);
4364 gimple_seq_add_seq (ilist
, tseq
);
4366 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4369 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4370 lower_omp (&tseq
, ctx
);
4371 gimple_seq_add_seq (dlist
, tseq
);
4372 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4374 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4379 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
4380 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
4381 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4383 /* reduction(-:var) sums up the partial results, so it
4384 acts identically to reduction(+:var). */
4385 if (code
== MINUS_EXPR
)
4388 tree new_vard
= new_var
;
4389 if (is_simd
&& omp_is_reference (var
))
4391 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4392 new_vard
= TREE_OPERAND (new_var
, 0);
4393 gcc_assert (DECL_P (new_vard
));
4396 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4399 tree ref
= build_outer_var_ref (var
, ctx
);
4401 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
4406 simt_lane
= create_tmp_var (unsigned_type_node
);
4407 x
= build_call_expr_internal_loc
4408 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
4409 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
4410 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
4411 gimplify_assign (ivar
, x
, &llist
[2]);
4413 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
4414 ref
= build_outer_var_ref (var
, ctx
);
4415 gimplify_assign (ref
, x
, &llist
[1]);
4417 if (new_vard
!= new_var
)
4419 SET_DECL_VALUE_EXPR (new_vard
,
4420 build_fold_addr_expr (lvar
));
4421 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4426 if (omp_is_reference (var
) && is_simd
)
4427 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4428 gimplify_assign (new_var
, x
, ilist
);
4431 tree ref
= build_outer_var_ref (var
, ctx
);
4433 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
4434 ref
= build_outer_var_ref (var
, ctx
);
4435 gimplify_assign (ref
, x
, dlist
);
4447 if (known_eq (sctx
.max_vf
, 1U))
4448 sctx
.is_simt
= false;
4450 if (sctx
.lane
|| sctx
.is_simt
)
4452 uid
= create_tmp_var (ptr_type_node
, "simduid");
4453 /* Don't want uninit warnings on simduid, it is always uninitialized,
4454 but we use it not for the value, but for the DECL_UID only. */
4455 TREE_NO_WARNING (uid
) = 1;
4456 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
4457 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
4458 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4459 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4461 /* Emit calls denoting privatized variables and initializing a pointer to
4462 structure that holds private variables as fields after ompdevlow pass. */
4465 sctx
.simt_eargs
[0] = uid
;
4467 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
4468 gimple_call_set_lhs (g
, uid
);
4469 gimple_seq_add_stmt (ilist
, g
);
4470 sctx
.simt_eargs
.release ();
4472 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
4473 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
4474 gimple_call_set_lhs (g
, simtrec
);
4475 gimple_seq_add_stmt (ilist
, g
);
4480 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
4481 gimple_call_set_lhs (g
, sctx
.lane
);
4482 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4483 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
4484 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
4485 build_int_cst (unsigned_type_node
, 0));
4486 gimple_seq_add_stmt (ilist
, g
);
4487 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4490 tree simt_vf
= create_tmp_var (unsigned_type_node
);
4491 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
4492 gimple_call_set_lhs (g
, simt_vf
);
4493 gimple_seq_add_stmt (dlist
, g
);
4495 tree t
= build_int_cst (unsigned_type_node
, 1);
4496 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
4497 gimple_seq_add_stmt (dlist
, g
);
4499 t
= build_int_cst (unsigned_type_node
, 0);
4500 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4501 gimple_seq_add_stmt (dlist
, g
);
4503 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4504 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4505 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4506 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
4507 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
4509 gimple_seq_add_seq (dlist
, llist
[2]);
4511 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
4512 gimple_seq_add_stmt (dlist
, g
);
4514 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
4515 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
4516 gimple_seq_add_stmt (dlist
, g
);
4518 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
4520 for (int i
= 0; i
< 2; i
++)
4523 tree vf
= create_tmp_var (unsigned_type_node
);
4524 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
4525 gimple_call_set_lhs (g
, vf
);
4526 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
4527 gimple_seq_add_stmt (seq
, g
);
4528 tree t
= build_int_cst (unsigned_type_node
, 0);
4529 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4530 gimple_seq_add_stmt (seq
, g
);
4531 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4532 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4533 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4534 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
4535 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
4536 gimple_seq_add_seq (seq
, llist
[i
]);
4537 t
= build_int_cst (unsigned_type_node
, 1);
4538 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
4539 gimple_seq_add_stmt (seq
, g
);
4540 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
4541 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
4542 gimple_seq_add_stmt (seq
, g
);
4543 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
4548 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
4550 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
4551 gimple_seq_add_stmt (dlist
, g
);
4554 /* The copyin sequence is not to be executed by the main thread, since
4555 that would result in self-copies. Perhaps not visible to scalars,
4556 but it certainly is to C++ operator=. */
4559 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
4561 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
4562 build_int_cst (TREE_TYPE (x
), 0));
4563 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
4564 gimplify_and_add (x
, ilist
);
4567 /* If any copyin variable is passed by reference, we must ensure the
4568 master thread doesn't modify it before it is copied over in all
4569 threads. Similarly for variables in both firstprivate and
4570 lastprivate clauses we need to ensure the lastprivate copying
4571 happens after firstprivate copying in all threads. And similarly
4572 for UDRs if initializer expression refers to omp_orig. */
4573 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
4575 /* Don't add any barrier for #pragma omp simd or
4576 #pragma omp distribute. */
4577 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
4578 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
)
4579 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
4582 /* If max_vf is non-zero, then we can use only a vectorization factor
4583 up to the max_vf we chose. So stick it into the safelen clause. */
4584 if (maybe_ne (sctx
.max_vf
, 0U))
4586 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4587 OMP_CLAUSE_SAFELEN
);
4588 poly_uint64 safe_len
;
4590 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4591 && maybe_gt (safe_len
, sctx
.max_vf
)))
4593 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
4594 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
4596 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4597 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4603 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4604 both parallel and workshare constructs. PREDICATE may be NULL if it's
4608 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
4611 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
4612 bool par_clauses
= false;
4613 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
4615 /* Early exit if there are no lastprivate or linear clauses. */
4616 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
4617 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
4618 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
4619 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
4621 if (clauses
== NULL
)
4623 /* If this was a workshare clause, see if it had been combined
4624 with its parallel. In that case, look for the clauses on the
4625 parallel statement itself. */
4626 if (is_parallel_ctx (ctx
))
4630 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4633 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4634 OMP_CLAUSE_LASTPRIVATE
);
4635 if (clauses
== NULL
)
4640 bool maybe_simt
= false;
4641 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4642 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
4644 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
4645 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
4647 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
4653 tree label_true
, arm1
, arm2
;
4654 enum tree_code pred_code
= TREE_CODE (predicate
);
4656 label
= create_artificial_label (UNKNOWN_LOCATION
);
4657 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4658 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
4660 arm1
= TREE_OPERAND (predicate
, 0);
4661 arm2
= TREE_OPERAND (predicate
, 1);
4662 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4663 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4668 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4669 arm2
= boolean_false_node
;
4670 pred_code
= NE_EXPR
;
4674 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
4675 c
= fold_convert (integer_type_node
, c
);
4676 simtcond
= create_tmp_var (integer_type_node
);
4677 gimplify_assign (simtcond
, c
, stmt_list
);
4678 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
4680 c
= create_tmp_var (integer_type_node
);
4681 gimple_call_set_lhs (g
, c
);
4682 gimple_seq_add_stmt (stmt_list
, g
);
4683 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
4687 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
4688 gimple_seq_add_stmt (stmt_list
, stmt
);
4689 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
4692 for (c
= clauses
; c
;)
4695 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4697 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4698 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4699 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
4701 var
= OMP_CLAUSE_DECL (c
);
4702 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4703 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
4704 && is_taskloop_ctx (ctx
))
4706 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
4707 new_var
= lookup_decl (var
, ctx
->outer
);
4711 new_var
= lookup_decl (var
, ctx
);
4712 /* Avoid uninitialized warnings for lastprivate and
4713 for linear iterators. */
4715 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4716 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
4717 TREE_NO_WARNING (new_var
) = 1;
4720 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
4722 tree val
= DECL_VALUE_EXPR (new_var
);
4723 if (TREE_CODE (val
) == ARRAY_REF
4724 && VAR_P (TREE_OPERAND (val
, 0))
4725 && lookup_attribute ("omp simd array",
4726 DECL_ATTRIBUTES (TREE_OPERAND (val
,
4729 if (lastlane
== NULL
)
4731 lastlane
= create_tmp_var (unsigned_type_node
);
4733 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
4735 TREE_OPERAND (val
, 1));
4736 gimple_call_set_lhs (g
, lastlane
);
4737 gimple_seq_add_stmt (stmt_list
, g
);
4739 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
4740 TREE_OPERAND (val
, 0), lastlane
,
4741 NULL_TREE
, NULL_TREE
);
4744 else if (maybe_simt
)
4746 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
4747 ? DECL_VALUE_EXPR (new_var
)
4749 if (simtlast
== NULL
)
4751 simtlast
= create_tmp_var (unsigned_type_node
);
4752 gcall
*g
= gimple_build_call_internal
4753 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
4754 gimple_call_set_lhs (g
, simtlast
);
4755 gimple_seq_add_stmt (stmt_list
, g
);
4757 x
= build_call_expr_internal_loc
4758 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
4759 TREE_TYPE (val
), 2, val
, simtlast
);
4760 new_var
= unshare_expr (new_var
);
4761 gimplify_assign (new_var
, x
, stmt_list
);
4762 new_var
= unshare_expr (new_var
);
4765 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4766 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
4768 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
4769 gimple_seq_add_seq (stmt_list
,
4770 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
4771 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
4773 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4774 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
4776 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
4777 gimple_seq_add_seq (stmt_list
,
4778 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
4779 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
4783 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4784 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
4786 gcc_checking_assert (is_taskloop_ctx (ctx
));
4787 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
4789 if (is_global_var (ovar
))
4793 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
4794 if (omp_is_reference (var
))
4795 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4796 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
4797 gimplify_and_add (x
, stmt_list
);
4799 c
= OMP_CLAUSE_CHAIN (c
);
4800 if (c
== NULL
&& !par_clauses
)
4802 /* If this was a workshare clause, see if it had been combined
4803 with its parallel. In that case, continue looking for the
4804 clauses also on the parallel statement itself. */
4805 if (is_parallel_ctx (ctx
))
4809 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4812 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4813 OMP_CLAUSE_LASTPRIVATE
);
4819 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
4822 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4823 (which might be a placeholder). INNER is true if this is an inner
4824 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4825 join markers. Generate the before-loop forking sequence in
4826 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4827 general form of these sequences is
4829 GOACC_REDUCTION_SETUP
4831 GOACC_REDUCTION_INIT
4833 GOACC_REDUCTION_FINI
4835 GOACC_REDUCTION_TEARDOWN. */
4838 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
4839 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
4840 gimple_seq
*join_seq
, omp_context
*ctx
)
4842 gimple_seq before_fork
= NULL
;
4843 gimple_seq after_fork
= NULL
;
4844 gimple_seq before_join
= NULL
;
4845 gimple_seq after_join
= NULL
;
4846 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
4847 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
4848 unsigned offset
= 0;
4850 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4851 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4853 tree orig
= OMP_CLAUSE_DECL (c
);
4854 tree var
= maybe_lookup_decl (orig
, ctx
);
4855 tree ref_to_res
= NULL_TREE
;
4856 tree incoming
, outgoing
, v1
, v2
, v3
;
4857 bool is_private
= false;
4859 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
4860 if (rcode
== MINUS_EXPR
)
4862 else if (rcode
== TRUTH_ANDIF_EXPR
)
4863 rcode
= BIT_AND_EXPR
;
4864 else if (rcode
== TRUTH_ORIF_EXPR
)
4865 rcode
= BIT_IOR_EXPR
;
4866 tree op
= build_int_cst (unsigned_type_node
, rcode
);
4871 incoming
= outgoing
= var
;
4875 /* See if an outer construct also reduces this variable. */
4876 omp_context
*outer
= ctx
;
4878 while (omp_context
*probe
= outer
->outer
)
4880 enum gimple_code type
= gimple_code (probe
->stmt
);
4885 case GIMPLE_OMP_FOR
:
4886 cls
= gimple_omp_for_clauses (probe
->stmt
);
4889 case GIMPLE_OMP_TARGET
:
4890 if (gimple_omp_target_kind (probe
->stmt
)
4891 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
4894 cls
= gimple_omp_target_clauses (probe
->stmt
);
4902 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
4903 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
4904 && orig
== OMP_CLAUSE_DECL (cls
))
4906 incoming
= outgoing
= lookup_decl (orig
, probe
);
4907 goto has_outer_reduction
;
4909 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
4910 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
4911 && orig
== OMP_CLAUSE_DECL (cls
))
4919 /* This is the outermost construct with this reduction,
4920 see if there's a mapping for it. */
4921 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
4922 && maybe_lookup_field (orig
, outer
) && !is_private
)
4924 ref_to_res
= build_receiver_ref (orig
, false, outer
);
4925 if (omp_is_reference (orig
))
4926 ref_to_res
= build_simple_mem_ref (ref_to_res
);
4928 tree type
= TREE_TYPE (var
);
4929 if (POINTER_TYPE_P (type
))
4930 type
= TREE_TYPE (type
);
4933 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
4937 /* Try to look at enclosing contexts for reduction var,
4938 use original if no mapping found. */
4940 omp_context
*c
= ctx
->outer
;
4943 t
= maybe_lookup_decl (orig
, c
);
4946 incoming
= outgoing
= (t
? t
: orig
);
4949 has_outer_reduction
:;
4953 ref_to_res
= integer_zero_node
;
4955 if (omp_is_reference (orig
))
4957 tree type
= TREE_TYPE (var
);
4958 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
4962 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
4963 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
4966 v1
= create_tmp_var (type
, id
);
4967 v2
= create_tmp_var (type
, id
);
4968 v3
= create_tmp_var (type
, id
);
4970 gimplify_assign (v1
, var
, fork_seq
);
4971 gimplify_assign (v2
, var
, fork_seq
);
4972 gimplify_assign (v3
, var
, fork_seq
);
4974 var
= build_simple_mem_ref (var
);
4975 v1
= build_simple_mem_ref (v1
);
4976 v2
= build_simple_mem_ref (v2
);
4977 v3
= build_simple_mem_ref (v3
);
4978 outgoing
= build_simple_mem_ref (outgoing
);
4980 if (!TREE_CONSTANT (incoming
))
4981 incoming
= build_simple_mem_ref (incoming
);
4986 /* Determine position in reduction buffer, which may be used
4987 by target. The parser has ensured that this is not a
4988 variable-sized type. */
4989 fixed_size_mode mode
4990 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
4991 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
4992 offset
= (offset
+ align
- 1) & ~(align
- 1);
4993 tree off
= build_int_cst (sizetype
, offset
);
4994 offset
+= GET_MODE_SIZE (mode
);
4998 init_code
= build_int_cst (integer_type_node
,
4999 IFN_GOACC_REDUCTION_INIT
);
5000 fini_code
= build_int_cst (integer_type_node
,
5001 IFN_GOACC_REDUCTION_FINI
);
5002 setup_code
= build_int_cst (integer_type_node
,
5003 IFN_GOACC_REDUCTION_SETUP
);
5004 teardown_code
= build_int_cst (integer_type_node
,
5005 IFN_GOACC_REDUCTION_TEARDOWN
);
5009 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5010 TREE_TYPE (var
), 6, setup_code
,
5011 unshare_expr (ref_to_res
),
5012 incoming
, level
, op
, off
);
5014 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5015 TREE_TYPE (var
), 6, init_code
,
5016 unshare_expr (ref_to_res
),
5017 v1
, level
, op
, off
);
5019 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5020 TREE_TYPE (var
), 6, fini_code
,
5021 unshare_expr (ref_to_res
),
5022 v2
, level
, op
, off
);
5024 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5025 TREE_TYPE (var
), 6, teardown_code
,
5026 ref_to_res
, v3
, level
, op
, off
);
5028 gimplify_assign (v1
, setup_call
, &before_fork
);
5029 gimplify_assign (v2
, init_call
, &after_fork
);
5030 gimplify_assign (v3
, fini_call
, &before_join
);
5031 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5034 /* Now stitch things together. */
5035 gimple_seq_add_seq (fork_seq
, before_fork
);
5037 gimple_seq_add_stmt (fork_seq
, fork
);
5038 gimple_seq_add_seq (fork_seq
, after_fork
);
5040 gimple_seq_add_seq (join_seq
, before_join
);
5042 gimple_seq_add_stmt (join_seq
, join
);
5043 gimple_seq_add_seq (join_seq
, after_join
);
5046 /* Generate code to implement the REDUCTION clauses. */
5049 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5051 gimple_seq sub_seq
= NULL
;
5056 /* OpenACC loop reductions are handled elsewhere. */
5057 if (is_gimple_omp_oacc (ctx
->stmt
))
5060 /* SIMD reductions are handled in lower_rec_input_clauses. */
5061 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5062 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5065 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5066 update in that case, otherwise use a lock. */
5067 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5068 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5070 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5071 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5073 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5083 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5085 tree var
, ref
, new_var
, orig_var
;
5086 enum tree_code code
;
5087 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5089 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5092 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
5093 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5094 if (TREE_CODE (var
) == MEM_REF
)
5096 var
= TREE_OPERAND (var
, 0);
5097 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5098 var
= TREE_OPERAND (var
, 0);
5099 if (TREE_CODE (var
) == ADDR_EXPR
)
5100 var
= TREE_OPERAND (var
, 0);
5103 /* If this is a pointer or referenced based array
5104 section, the var could be private in the outer
5105 context e.g. on orphaned loop construct. Pretend this
5106 is private variable's outer reference. */
5107 ccode
= OMP_CLAUSE_PRIVATE
;
5108 if (TREE_CODE (var
) == INDIRECT_REF
)
5109 var
= TREE_OPERAND (var
, 0);
5112 if (is_variable_sized (var
))
5114 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5115 var
= DECL_VALUE_EXPR (var
);
5116 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5117 var
= TREE_OPERAND (var
, 0);
5118 gcc_assert (DECL_P (var
));
5121 new_var
= lookup_decl (var
, ctx
);
5122 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5123 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5124 ref
= build_outer_var_ref (var
, ctx
, ccode
);
5125 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5127 /* reduction(-:var) sums up the partial results, so it acts
5128 identically to reduction(+:var). */
5129 if (code
== MINUS_EXPR
)
5134 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5136 addr
= save_expr (addr
);
5137 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5138 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5139 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5140 gimplify_and_add (x
, stmt_seqp
);
5143 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5145 tree d
= OMP_CLAUSE_DECL (c
);
5146 tree type
= TREE_TYPE (d
);
5147 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5148 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
5149 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5150 tree bias
= TREE_OPERAND (d
, 1);
5151 d
= TREE_OPERAND (d
, 0);
5152 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5154 tree b
= TREE_OPERAND (d
, 1);
5155 b
= maybe_lookup_decl (b
, ctx
);
5158 b
= TREE_OPERAND (d
, 1);
5159 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5161 if (integer_zerop (bias
))
5165 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5166 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5167 TREE_TYPE (b
), b
, bias
);
5169 d
= TREE_OPERAND (d
, 0);
5171 /* For ref build_outer_var_ref already performs this, so
5172 only new_var needs a dereference. */
5173 if (TREE_CODE (d
) == INDIRECT_REF
)
5175 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5176 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5178 else if (TREE_CODE (d
) == ADDR_EXPR
)
5180 if (orig_var
== var
)
5182 new_var
= build_fold_addr_expr (new_var
);
5183 ref
= build_fold_addr_expr (ref
);
5188 gcc_assert (orig_var
== var
);
5189 if (omp_is_reference (var
))
5190 ref
= build_fold_addr_expr (ref
);
5194 tree t
= maybe_lookup_decl (v
, ctx
);
5198 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5199 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5201 if (!integer_zerop (bias
))
5203 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5204 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5205 TREE_TYPE (new_var
), new_var
,
5206 unshare_expr (bias
));
5207 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5208 TREE_TYPE (ref
), ref
, bias
);
5210 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5211 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5212 tree m
= create_tmp_var (ptype
, NULL
);
5213 gimplify_assign (m
, new_var
, stmt_seqp
);
5215 m
= create_tmp_var (ptype
, NULL
);
5216 gimplify_assign (m
, ref
, stmt_seqp
);
5218 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5219 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5220 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5221 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5222 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5223 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5224 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5226 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5227 tree decl_placeholder
5228 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5229 SET_DECL_VALUE_EXPR (placeholder
, out
);
5230 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5231 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5232 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5233 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5234 gimple_seq_add_seq (&sub_seq
,
5235 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5236 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5237 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5238 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5242 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5243 out
= unshare_expr (out
);
5244 gimplify_assign (out
, x
, &sub_seq
);
5246 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5247 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5248 gimple_seq_add_stmt (&sub_seq
, g
);
5249 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5250 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5251 gimple_seq_add_stmt (&sub_seq
, g
);
5252 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5253 build_int_cst (TREE_TYPE (i
), 1));
5254 gimple_seq_add_stmt (&sub_seq
, g
);
5255 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5256 gimple_seq_add_stmt (&sub_seq
, g
);
5257 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5259 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5261 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5263 if (omp_is_reference (var
)
5264 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
5266 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
5267 SET_DECL_VALUE_EXPR (placeholder
, ref
);
5268 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5269 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5270 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5271 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5272 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5276 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5277 ref
= build_outer_var_ref (var
, ctx
);
5278 gimplify_assign (ref
, x
, &sub_seq
);
5282 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
5284 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5286 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
5288 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
5290 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5294 /* Generate code to implement the COPYPRIVATE clauses. */
5297 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
5302 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5304 tree var
, new_var
, ref
, x
;
5306 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5308 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
5311 var
= OMP_CLAUSE_DECL (c
);
5312 by_ref
= use_pointer_for_field (var
, NULL
);
5314 ref
= build_sender_ref (var
, ctx
);
5315 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
5318 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
5319 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
5321 gimplify_assign (ref
, x
, slist
);
5323 ref
= build_receiver_ref (var
, false, ctx
);
5326 ref
= fold_convert_loc (clause_loc
,
5327 build_pointer_type (TREE_TYPE (new_var
)),
5329 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
5331 if (omp_is_reference (var
))
5333 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
5334 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
5335 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5337 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
5338 gimplify_and_add (x
, rlist
);
5343 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5344 and REDUCTION from the sender (aka parent) side. */
5347 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
5351 int ignored_looptemp
= 0;
5352 bool is_taskloop
= false;
5354 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5355 by GOMP_taskloop. */
5356 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
5358 ignored_looptemp
= 2;
5362 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5364 tree val
, ref
, x
, var
;
5365 bool by_ref
, do_in
= false, do_out
= false;
5366 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5368 switch (OMP_CLAUSE_CODE (c
))
5370 case OMP_CLAUSE_PRIVATE
:
5371 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5374 case OMP_CLAUSE_FIRSTPRIVATE
:
5375 case OMP_CLAUSE_COPYIN
:
5376 case OMP_CLAUSE_LASTPRIVATE
:
5377 case OMP_CLAUSE_REDUCTION
:
5379 case OMP_CLAUSE_SHARED
:
5380 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5383 case OMP_CLAUSE__LOOPTEMP_
:
5384 if (ignored_looptemp
)
5394 val
= OMP_CLAUSE_DECL (c
);
5395 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5396 && TREE_CODE (val
) == MEM_REF
)
5398 val
= TREE_OPERAND (val
, 0);
5399 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
5400 val
= TREE_OPERAND (val
, 0);
5401 if (TREE_CODE (val
) == INDIRECT_REF
5402 || TREE_CODE (val
) == ADDR_EXPR
)
5403 val
= TREE_OPERAND (val
, 0);
5404 if (is_variable_sized (val
))
5408 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5409 outer taskloop region. */
5410 omp_context
*ctx_for_o
= ctx
;
5412 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
5413 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5414 ctx_for_o
= ctx
->outer
;
5416 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
5418 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
5419 && is_global_var (var
))
5422 t
= omp_member_access_dummy_var (var
);
5425 var
= DECL_VALUE_EXPR (var
);
5426 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
5428 var
= unshare_and_remap (var
, t
, o
);
5430 var
= unshare_expr (var
);
5433 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
5435 /* Handle taskloop firstprivate/lastprivate, where the
5436 lastprivate on GIMPLE_OMP_TASK is represented as
5437 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5438 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
5439 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
5440 if (use_pointer_for_field (val
, ctx
))
5441 var
= build_fold_addr_expr (var
);
5442 gimplify_assign (x
, var
, ilist
);
5443 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
5447 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5448 || val
== OMP_CLAUSE_DECL (c
))
5449 && is_variable_sized (val
))
5451 by_ref
= use_pointer_for_field (val
, NULL
);
5453 switch (OMP_CLAUSE_CODE (c
))
5455 case OMP_CLAUSE_FIRSTPRIVATE
:
5456 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
5458 && is_task_ctx (ctx
))
5459 TREE_NO_WARNING (var
) = 1;
5463 case OMP_CLAUSE_PRIVATE
:
5464 case OMP_CLAUSE_COPYIN
:
5465 case OMP_CLAUSE__LOOPTEMP_
:
5469 case OMP_CLAUSE_LASTPRIVATE
:
5470 if (by_ref
|| omp_is_reference (val
))
5472 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5479 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
5484 case OMP_CLAUSE_REDUCTION
:
5486 if (val
== OMP_CLAUSE_DECL (c
))
5487 do_out
= !(by_ref
|| omp_is_reference (val
));
5489 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
5498 ref
= build_sender_ref (val
, ctx
);
5499 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
5500 gimplify_assign (ref
, x
, ilist
);
5501 if (is_task_ctx (ctx
))
5502 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
5507 ref
= build_sender_ref (val
, ctx
);
5508 gimplify_assign (var
, ref
, olist
);
5513 /* Generate code to implement SHARED from the sender (aka parent)
5514 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5515 list things that got automatically shared. */
5518 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
5520 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
5522 if (ctx
->record_type
== NULL
)
5525 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
5526 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
5528 ovar
= DECL_ABSTRACT_ORIGIN (f
);
5529 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
5532 nvar
= maybe_lookup_decl (ovar
, ctx
);
5533 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
5536 /* If CTX is a nested parallel directive. Find the immediately
5537 enclosing parallel or workshare construct that contains a
5538 mapping for OVAR. */
5539 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
5541 t
= omp_member_access_dummy_var (var
);
5544 var
= DECL_VALUE_EXPR (var
);
5545 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
5547 var
= unshare_and_remap (var
, t
, o
);
5549 var
= unshare_expr (var
);
5552 if (use_pointer_for_field (ovar
, ctx
))
5554 x
= build_sender_ref (ovar
, ctx
);
5555 var
= build_fold_addr_expr (var
);
5556 gimplify_assign (x
, var
, ilist
);
5560 x
= build_sender_ref (ovar
, ctx
);
5561 gimplify_assign (x
, var
, ilist
);
5563 if (!TREE_READONLY (var
)
5564 /* We don't need to receive a new reference to a result
5565 or parm decl. In fact we may not store to it as we will
5566 invalidate any pending RSO and generate wrong gimple
5568 && !((TREE_CODE (var
) == RESULT_DECL
5569 || TREE_CODE (var
) == PARM_DECL
)
5570 && DECL_BY_REFERENCE (var
)))
5572 x
= build_sender_ref (ovar
, ctx
);
5573 gimplify_assign (var
, x
, olist
);
5579 /* Emit an OpenACC head marker call, encapulating the partitioning and
5580 other information that must be processed by the target compiler.
5581 Return the maximum number of dimensions the associated loop might
5582 be partitioned over. */
5585 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
5586 gimple_seq
*seq
, omp_context
*ctx
)
5588 unsigned levels
= 0;
5590 tree gang_static
= NULL_TREE
;
5591 auto_vec
<tree
, 5> args
;
5593 args
.quick_push (build_int_cst
5594 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
5595 args
.quick_push (ddvar
);
5596 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5598 switch (OMP_CLAUSE_CODE (c
))
5600 case OMP_CLAUSE_GANG
:
5601 tag
|= OLF_DIM_GANG
;
5602 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
5603 /* static:* is represented by -1, and we can ignore it, as
5604 scheduling is always static. */
5605 if (gang_static
&& integer_minus_onep (gang_static
))
5606 gang_static
= NULL_TREE
;
5610 case OMP_CLAUSE_WORKER
:
5611 tag
|= OLF_DIM_WORKER
;
5615 case OMP_CLAUSE_VECTOR
:
5616 tag
|= OLF_DIM_VECTOR
;
5620 case OMP_CLAUSE_SEQ
:
5624 case OMP_CLAUSE_AUTO
:
5628 case OMP_CLAUSE_INDEPENDENT
:
5629 tag
|= OLF_INDEPENDENT
;
5632 case OMP_CLAUSE_TILE
:
5643 if (DECL_P (gang_static
))
5644 gang_static
= build_outer_var_ref (gang_static
, ctx
);
5645 tag
|= OLF_GANG_STATIC
;
5648 /* In a parallel region, loops are implicitly INDEPENDENT. */
5649 omp_context
*tgt
= enclosing_target_ctx (ctx
);
5650 if (!tgt
|| is_oacc_parallel (tgt
))
5651 tag
|= OLF_INDEPENDENT
;
5654 /* Tiling could use all 3 levels. */
5658 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5659 Ensure at least one level, or 2 for possible auto
5661 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
5662 << OLF_DIM_BASE
) | OLF_SEQ
));
5664 if (levels
< 1u + maybe_auto
)
5665 levels
= 1u + maybe_auto
;
5668 args
.quick_push (build_int_cst (integer_type_node
, levels
));
5669 args
.quick_push (build_int_cst (integer_type_node
, tag
));
5671 args
.quick_push (gang_static
);
5673 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
5674 gimple_set_location (call
, loc
);
5675 gimple_set_lhs (call
, ddvar
);
5676 gimple_seq_add_stmt (seq
, call
);
5681 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5682 partitioning level of the enclosed region. */
5685 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
5686 tree tofollow
, gimple_seq
*seq
)
5688 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
5689 : IFN_UNIQUE_OACC_TAIL_MARK
);
5690 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
5691 int nargs
= 2 + (tofollow
!= NULL_TREE
);
5692 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
5693 marker
, ddvar
, tofollow
);
5694 gimple_set_location (call
, loc
);
5695 gimple_set_lhs (call
, ddvar
);
5696 gimple_seq_add_stmt (seq
, call
);
5699 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5700 the loop clauses, from which we extract reductions. Initialize
5704 lower_oacc_head_tail (location_t loc
, tree clauses
,
5705 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
5708 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
5709 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
5711 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
5712 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
5713 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
5716 for (unsigned done
= 1; count
; count
--, done
++)
5718 gimple_seq fork_seq
= NULL
;
5719 gimple_seq join_seq
= NULL
;
5721 tree place
= build_int_cst (integer_type_node
, -1);
5722 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5723 fork_kind
, ddvar
, place
);
5724 gimple_set_location (fork
, loc
);
5725 gimple_set_lhs (fork
, ddvar
);
5727 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5728 join_kind
, ddvar
, place
);
5729 gimple_set_location (join
, loc
);
5730 gimple_set_lhs (join
, ddvar
);
5732 /* Mark the beginning of this level sequence. */
5734 lower_oacc_loop_marker (loc
, ddvar
, true,
5735 build_int_cst (integer_type_node
, count
),
5737 lower_oacc_loop_marker (loc
, ddvar
, false,
5738 build_int_cst (integer_type_node
, done
),
5741 lower_oacc_reductions (loc
, clauses
, place
, inner
,
5742 fork
, join
, &fork_seq
, &join_seq
, ctx
);
5744 /* Append this level to head. */
5745 gimple_seq_add_seq (head
, fork_seq
);
5746 /* Prepend it to tail. */
5747 gimple_seq_add_seq (&join_seq
, *tail
);
5753 /* Mark the end of the sequence. */
5754 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
5755 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
5758 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5759 catch handler and return it. This prevents programs from violating the
5760 structured block semantics with throws. */
5763 maybe_catch_exception (gimple_seq body
)
5768 if (!flag_exceptions
)
5771 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
5772 decl
= lang_hooks
.eh_protect_cleanup_actions ();
5774 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
5776 g
= gimple_build_eh_must_not_throw (decl
);
5777 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
5780 return gimple_seq_alloc_with_stmt (g
);
5784 /* Routines to lower OMP directives into OMP-GIMPLE. */
5786 /* If ctx is a worksharing context inside of a cancellable parallel
5787 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5788 and conditional branch to parallel's cancel_label to handle
5789 cancellation in the implicit barrier. */
5792 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple_seq
*body
)
5794 gimple
*omp_return
= gimple_seq_last_stmt (*body
);
5795 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
5796 if (gimple_omp_return_nowait_p (omp_return
))
5799 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_PARALLEL
5800 && ctx
->outer
->cancellable
)
5802 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5803 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
5804 tree lhs
= create_tmp_var (c_bool_type
);
5805 gimple_omp_return_set_lhs (omp_return
, lhs
);
5806 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5807 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
5808 fold_convert (c_bool_type
,
5809 boolean_false_node
),
5810 ctx
->outer
->cancel_label
, fallthru_label
);
5811 gimple_seq_add_stmt (body
, g
);
5812 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
5816 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5817 CTX is the enclosing OMP context for the current statement. */
5820 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
5822 tree block
, control
;
5823 gimple_stmt_iterator tgsi
;
5824 gomp_sections
*stmt
;
5826 gbind
*new_stmt
, *bind
;
5827 gimple_seq ilist
, dlist
, olist
, new_body
;
5829 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
5831 push_gimplify_context ();
5835 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
5836 &ilist
, &dlist
, ctx
, NULL
);
5838 new_body
= gimple_omp_body (stmt
);
5839 gimple_omp_set_body (stmt
, NULL
);
5840 tgsi
= gsi_start (new_body
);
5841 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
5846 sec_start
= gsi_stmt (tgsi
);
5847 sctx
= maybe_lookup_ctx (sec_start
);
5850 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
5851 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
5852 GSI_CONTINUE_LINKING
);
5853 gimple_omp_set_body (sec_start
, NULL
);
5855 if (gsi_one_before_end_p (tgsi
))
5857 gimple_seq l
= NULL
;
5858 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
5860 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
5861 gimple_omp_section_set_last (sec_start
);
5864 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
5865 GSI_CONTINUE_LINKING
);
5868 block
= make_node (BLOCK
);
5869 bind
= gimple_build_bind (NULL
, new_body
, block
);
5872 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
5874 block
= make_node (BLOCK
);
5875 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
5876 gsi_replace (gsi_p
, new_stmt
, true);
5878 pop_gimplify_context (new_stmt
);
5879 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
5880 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
5881 if (BLOCK_VARS (block
))
5882 TREE_USED (block
) = 1;
5885 gimple_seq_add_seq (&new_body
, ilist
);
5886 gimple_seq_add_stmt (&new_body
, stmt
);
5887 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
5888 gimple_seq_add_stmt (&new_body
, bind
);
5890 control
= create_tmp_var (unsigned_type_node
, ".section");
5891 t
= gimple_build_omp_continue (control
, control
);
5892 gimple_omp_sections_set_control (stmt
, control
);
5893 gimple_seq_add_stmt (&new_body
, t
);
5895 gimple_seq_add_seq (&new_body
, olist
);
5896 if (ctx
->cancellable
)
5897 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
5898 gimple_seq_add_seq (&new_body
, dlist
);
5900 new_body
= maybe_catch_exception (new_body
);
5902 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
5903 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
5904 t
= gimple_build_omp_return (nowait
);
5905 gimple_seq_add_stmt (&new_body
, t
);
5906 maybe_add_implicit_barrier_cancel (ctx
, &new_body
);
5908 gimple_bind_set_body (new_stmt
, new_body
);
5912 /* A subroutine of lower_omp_single. Expand the simple form of
5913 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5915 if (GOMP_single_start ())
5917 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5919 FIXME. It may be better to delay expanding the logic of this until
5920 pass_expand_omp. The expanded logic may make the job more difficult
5921 to a synchronization analysis pass. */
5924 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
5926 location_t loc
= gimple_location (single_stmt
);
5927 tree tlabel
= create_artificial_label (loc
);
5928 tree flabel
= create_artificial_label (loc
);
5929 gimple
*call
, *cond
;
5932 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
5933 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
5934 call
= gimple_build_call (decl
, 0);
5935 gimple_call_set_lhs (call
, lhs
);
5936 gimple_seq_add_stmt (pre_p
, call
);
5938 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
5939 fold_convert_loc (loc
, TREE_TYPE (lhs
),
5942 gimple_seq_add_stmt (pre_p
, cond
);
5943 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
5944 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
5945 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
5949 /* A subroutine of lower_omp_single. Expand the simple form of
5950 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5952 #pragma omp single copyprivate (a, b, c)
5954 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5957 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5963 GOMP_single_copy_end (©out);
5974 FIXME. It may be better to delay expanding the logic of this until
5975 pass_expand_omp. The expanded logic may make the job more difficult
5976 to a synchronization analysis pass. */
5979 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
5982 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
5983 gimple_seq copyin_seq
;
5984 location_t loc
= gimple_location (single_stmt
);
5986 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
5988 ptr_type
= build_pointer_type (ctx
->record_type
);
5989 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
5991 l0
= create_artificial_label (loc
);
5992 l1
= create_artificial_label (loc
);
5993 l2
= create_artificial_label (loc
);
5995 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
5996 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
5997 t
= fold_convert_loc (loc
, ptr_type
, t
);
5998 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
6000 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
6001 build_int_cst (ptr_type
, 0));
6002 t
= build3 (COND_EXPR
, void_type_node
, t
,
6003 build_and_jump (&l0
), build_and_jump (&l1
));
6004 gimplify_and_add (t
, pre_p
);
6006 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
6008 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6011 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
6014 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
6015 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
6016 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
6017 gimplify_and_add (t
, pre_p
);
6019 t
= build_and_jump (&l2
);
6020 gimplify_and_add (t
, pre_p
);
6022 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
6024 gimple_seq_add_seq (pre_p
, copyin_seq
);
6026 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
6030 /* Expand code for an OpenMP single directive. */
6033 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6036 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
6038 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6040 push_gimplify_context ();
6042 block
= make_node (BLOCK
);
6043 bind
= gimple_build_bind (NULL
, NULL
, block
);
6044 gsi_replace (gsi_p
, bind
, true);
6047 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6048 &bind_body
, &dlist
, ctx
, NULL
);
6049 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6051 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6053 if (ctx
->record_type
)
6054 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6056 lower_omp_single_simple (single_stmt
, &bind_body
);
6058 gimple_omp_set_body (single_stmt
, NULL
);
6060 gimple_seq_add_seq (&bind_body
, dlist
);
6062 bind_body
= maybe_catch_exception (bind_body
);
6064 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6065 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6066 gimple
*g
= gimple_build_omp_return (nowait
);
6067 gimple_seq_add_stmt (&bind_body_tail
, g
);
6068 maybe_add_implicit_barrier_cancel (ctx
, &bind_body_tail
);
6069 if (ctx
->record_type
)
6071 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6072 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6073 TREE_THIS_VOLATILE (clobber
) = 1;
6074 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6075 clobber
), GSI_SAME_STMT
);
6077 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6078 gimple_bind_set_body (bind
, bind_body
);
6080 pop_gimplify_context (bind
);
6082 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6083 BLOCK_VARS (block
) = ctx
->block_vars
;
6084 if (BLOCK_VARS (block
))
6085 TREE_USED (block
) = 1;
6089 /* Expand code for an OpenMP master directive. */
6092 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6094 tree block
, lab
= NULL
, x
, bfn_decl
;
6095 gimple
*stmt
= gsi_stmt (*gsi_p
);
6097 location_t loc
= gimple_location (stmt
);
6100 push_gimplify_context ();
6102 block
= make_node (BLOCK
);
6103 bind
= gimple_build_bind (NULL
, NULL
, block
);
6104 gsi_replace (gsi_p
, bind
, true);
6105 gimple_bind_add_stmt (bind
, stmt
);
6107 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6108 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6109 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6110 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6112 gimplify_and_add (x
, &tseq
);
6113 gimple_bind_add_seq (bind
, tseq
);
6115 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6116 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6117 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6118 gimple_omp_set_body (stmt
, NULL
);
6120 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6122 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6124 pop_gimplify_context (bind
);
6126 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6127 BLOCK_VARS (block
) = ctx
->block_vars
;
6131 /* Expand code for an OpenMP taskgroup directive. */
6134 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6136 gimple
*stmt
= gsi_stmt (*gsi_p
);
6139 tree block
= make_node (BLOCK
);
6141 bind
= gimple_build_bind (NULL
, NULL
, block
);
6142 gsi_replace (gsi_p
, bind
, true);
6143 gimple_bind_add_stmt (bind
, stmt
);
6145 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
6147 gimple_bind_add_stmt (bind
, x
);
6149 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6150 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6151 gimple_omp_set_body (stmt
, NULL
);
6153 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6155 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6156 BLOCK_VARS (block
) = ctx
->block_vars
;
6160 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6163 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
6166 struct omp_for_data fd
;
6167 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
6170 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
6171 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
6172 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
6176 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6177 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
6178 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
6179 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
6181 /* Merge depend clauses from multiple adjacent
6182 #pragma omp ordered depend(sink:...) constructs
6183 into one #pragma omp ordered depend(sink:...), so that
6184 we can optimize them together. */
6185 gimple_stmt_iterator gsi
= *gsi_p
;
6187 while (!gsi_end_p (gsi
))
6189 gimple
*stmt
= gsi_stmt (gsi
);
6190 if (is_gimple_debug (stmt
)
6191 || gimple_code (stmt
) == GIMPLE_NOP
)
6196 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
6198 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
6199 c
= gimple_omp_ordered_clauses (ord_stmt2
);
6201 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
6202 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6205 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
6207 gsi_remove (&gsi
, true);
6211 /* Canonicalize sink dependence clauses into one folded clause if
6214 The basic algorithm is to create a sink vector whose first
6215 element is the GCD of all the first elements, and whose remaining
6216 elements are the minimum of the subsequent columns.
6218 We ignore dependence vectors whose first element is zero because
6219 such dependencies are known to be executed by the same thread.
6221 We take into account the direction of the loop, so a minimum
6222 becomes a maximum if the loop is iterating forwards. We also
6223 ignore sink clauses where the loop direction is unknown, or where
6224 the offsets are clearly invalid because they are not a multiple
6225 of the loop increment.
6229 #pragma omp for ordered(2)
6230 for (i=0; i < N; ++i)
6231 for (j=0; j < M; ++j)
6233 #pragma omp ordered \
6234 depend(sink:i-8,j-2) \
6235 depend(sink:i,j-1) \ // Completely ignored because i+0.
6236 depend(sink:i-4,j-3) \
6237 depend(sink:i-6,j-4)
6238 #pragma omp ordered depend(source)
6243 depend(sink:-gcd(8,4,6),-min(2,3,4))
6248 /* FIXME: Computing GCD's where the first element is zero is
6249 non-trivial in the presence of collapsed loops. Do this later. */
6250 if (fd
.collapse
> 1)
6253 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
6255 /* wide_int is not a POD so it must be default-constructed. */
6256 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
6257 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
6259 tree folded_dep
= NULL_TREE
;
6260 /* TRUE if the first dimension's offset is negative. */
6261 bool neg_offset_p
= false;
6263 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6265 while ((c
= *list_p
) != NULL
)
6267 bool remove
= false;
6269 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
6270 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6271 goto next_ordered_clause
;
6274 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
6275 vec
&& TREE_CODE (vec
) == TREE_LIST
;
6276 vec
= TREE_CHAIN (vec
), ++i
)
6278 gcc_assert (i
< len
);
6280 /* omp_extract_for_data has canonicalized the condition. */
6281 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
6282 || fd
.loops
[i
].cond_code
== GT_EXPR
);
6283 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
6284 bool maybe_lexically_later
= true;
6286 /* While the committee makes up its mind, bail if we have any
6287 non-constant steps. */
6288 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
6289 goto lower_omp_ordered_ret
;
6291 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
6292 if (POINTER_TYPE_P (itype
))
6294 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
6295 TYPE_PRECISION (itype
),
6298 /* Ignore invalid offsets that are not multiples of the step. */
6299 if (!wi::multiple_of_p (wi::abs (offset
),
6300 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
6303 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
6304 "ignoring sink clause with offset that is not "
6305 "a multiple of the loop step");
6307 goto next_ordered_clause
;
6310 /* Calculate the first dimension. The first dimension of
6311 the folded dependency vector is the GCD of the first
6312 elements, while ignoring any first elements whose offset
6316 /* Ignore dependence vectors whose first dimension is 0. */
6320 goto next_ordered_clause
;
6324 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
6326 error_at (OMP_CLAUSE_LOCATION (c
),
6327 "first offset must be in opposite direction "
6328 "of loop iterations");
6329 goto lower_omp_ordered_ret
;
6333 neg_offset_p
= forward
;
6334 /* Initialize the first time around. */
6335 if (folded_dep
== NULL_TREE
)
6338 folded_deps
[0] = offset
;
6341 folded_deps
[0] = wi::gcd (folded_deps
[0],
6345 /* Calculate minimum for the remaining dimensions. */
6348 folded_deps
[len
+ i
- 1] = offset
;
6349 if (folded_dep
== c
)
6350 folded_deps
[i
] = offset
;
6351 else if (maybe_lexically_later
6352 && !wi::eq_p (folded_deps
[i
], offset
))
6354 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
6358 for (j
= 1; j
<= i
; j
++)
6359 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
6362 maybe_lexically_later
= false;
6366 gcc_assert (i
== len
);
6370 next_ordered_clause
:
6372 *list_p
= OMP_CLAUSE_CHAIN (c
);
6374 list_p
= &OMP_CLAUSE_CHAIN (c
);
6380 folded_deps
[0] = -folded_deps
[0];
6382 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
6383 if (POINTER_TYPE_P (itype
))
6386 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
6387 = wide_int_to_tree (itype
, folded_deps
[0]);
6388 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
6389 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
6392 lower_omp_ordered_ret
:
6394 /* Ordered without clauses is #pragma omp threads, while we want
6395 a nop instead if we remove all clauses. */
6396 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
6397 gsi_replace (gsi_p
, gimple_build_nop (), true);
6401 /* Expand code for an OpenMP ordered directive. */
6404 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6407 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
6408 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
6411 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6413 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6416 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
6417 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6418 OMP_CLAUSE_THREADS
);
6420 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6423 /* FIXME: This is needs to be moved to the expansion to verify various
6424 conditions only testable on cfg with dominators computed, and also
6425 all the depend clauses to be merged still might need to be available
6426 for the runtime checks. */
6428 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
6432 push_gimplify_context ();
6434 block
= make_node (BLOCK
);
6435 bind
= gimple_build_bind (NULL
, NULL
, block
);
6436 gsi_replace (gsi_p
, bind
, true);
6437 gimple_bind_add_stmt (bind
, stmt
);
6441 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
6442 build_int_cst (NULL_TREE
, threads
));
6443 cfun
->has_simduid_loops
= true;
6446 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
6448 gimple_bind_add_stmt (bind
, x
);
6450 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
6453 counter
= create_tmp_var (integer_type_node
);
6454 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
6455 gimple_call_set_lhs (g
, counter
);
6456 gimple_bind_add_stmt (bind
, g
);
6458 body
= create_artificial_label (UNKNOWN_LOCATION
);
6459 test
= create_artificial_label (UNKNOWN_LOCATION
);
6460 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
6462 tree simt_pred
= create_tmp_var (integer_type_node
);
6463 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
6464 gimple_call_set_lhs (g
, simt_pred
);
6465 gimple_bind_add_stmt (bind
, g
);
6467 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
6468 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
6469 gimple_bind_add_stmt (bind
, g
);
6471 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
6473 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6474 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6475 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6476 gimple_omp_set_body (stmt
, NULL
);
6480 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
6481 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
6482 gimple_bind_add_stmt (bind
, g
);
6484 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
6485 tree nonneg
= create_tmp_var (integer_type_node
);
6486 gimple_seq tseq
= NULL
;
6487 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
6488 gimple_bind_add_seq (bind
, tseq
);
6490 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
6491 gimple_call_set_lhs (g
, nonneg
);
6492 gimple_bind_add_stmt (bind
, g
);
6494 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6495 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
6496 gimple_bind_add_stmt (bind
, g
);
6498 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
6501 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
6502 build_int_cst (NULL_TREE
, threads
));
6504 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
6506 gimple_bind_add_stmt (bind
, x
);
6508 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6510 pop_gimplify_context (bind
);
6512 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6513 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6517 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6518 substitution of a couple of function calls. But in the NAMED case,
6519 requires that languages coordinate a symbol name. It is therefore
6520 best put here in common code. */
6522 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
6525 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6528 tree name
, lock
, unlock
;
6529 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
6531 location_t loc
= gimple_location (stmt
);
6534 name
= gimple_omp_critical_name (stmt
);
6539 if (!critical_name_mutexes
)
6540 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
6542 tree
*n
= critical_name_mutexes
->get (name
);
6547 decl
= create_tmp_var_raw (ptr_type_node
);
6549 new_str
= ACONCAT ((".gomp_critical_user_",
6550 IDENTIFIER_POINTER (name
), NULL
));
6551 DECL_NAME (decl
) = get_identifier (new_str
);
6552 TREE_PUBLIC (decl
) = 1;
6553 TREE_STATIC (decl
) = 1;
6554 DECL_COMMON (decl
) = 1;
6555 DECL_ARTIFICIAL (decl
) = 1;
6556 DECL_IGNORED_P (decl
) = 1;
6558 varpool_node::finalize_decl (decl
);
6560 critical_name_mutexes
->put (name
, decl
);
6565 /* If '#pragma omp critical' is inside offloaded region or
6566 inside function marked as offloadable, the symbol must be
6567 marked as offloadable too. */
6569 if (cgraph_node::get (current_function_decl
)->offloadable
)
6570 varpool_node::get_create (decl
)->offloadable
= 1;
6572 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
6573 if (is_gimple_omp_offloaded (octx
->stmt
))
6575 varpool_node::get_create (decl
)->offloadable
= 1;
6579 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
6580 lock
= build_call_expr_loc (loc
, lock
, 1,
6581 build_fold_addr_expr_loc (loc
, decl
));
6583 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
6584 unlock
= build_call_expr_loc (loc
, unlock
, 1,
6585 build_fold_addr_expr_loc (loc
, decl
));
6589 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
6590 lock
= build_call_expr_loc (loc
, lock
, 0);
6592 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
6593 unlock
= build_call_expr_loc (loc
, unlock
, 0);
6596 push_gimplify_context ();
6598 block
= make_node (BLOCK
);
6599 bind
= gimple_build_bind (NULL
, NULL
, block
);
6600 gsi_replace (gsi_p
, bind
, true);
6601 gimple_bind_add_stmt (bind
, stmt
);
6603 tbody
= gimple_bind_body (bind
);
6604 gimplify_and_add (lock
, &tbody
);
6605 gimple_bind_set_body (bind
, tbody
);
6607 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6608 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6609 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6610 gimple_omp_set_body (stmt
, NULL
);
6612 tbody
= gimple_bind_body (bind
);
6613 gimplify_and_add (unlock
, &tbody
);
6614 gimple_bind_set_body (bind
, tbody
);
6616 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6618 pop_gimplify_context (bind
);
6619 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6620 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6623 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6624 for a lastprivate clause. Given a loop control predicate of (V
6625 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6626 is appended to *DLIST, iterator initialization is appended to
6630 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
6631 gimple_seq
*dlist
, struct omp_context
*ctx
)
6633 tree clauses
, cond
, vinit
;
6634 enum tree_code cond_code
;
6637 cond_code
= fd
->loop
.cond_code
;
6638 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
6640 /* When possible, use a strict equality expression. This can let VRP
6641 type optimizations deduce the value and remove a copy. */
6642 if (tree_fits_shwi_p (fd
->loop
.step
))
6644 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
6645 if (step
== 1 || step
== -1)
6646 cond_code
= EQ_EXPR
;
6649 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
6650 || gimple_omp_for_grid_phony (fd
->for_stmt
))
6651 cond
= omp_grid_lastprivate_predicate (fd
);
6654 tree n2
= fd
->loop
.n2
;
6655 if (fd
->collapse
> 1
6656 && TREE_CODE (n2
) != INTEGER_CST
6657 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
6659 struct omp_context
*taskreg_ctx
= NULL
;
6660 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
6662 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
6663 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
6664 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
6666 if (gimple_omp_for_combined_into_p (gfor
))
6668 gcc_assert (ctx
->outer
->outer
6669 && is_parallel_ctx (ctx
->outer
->outer
));
6670 taskreg_ctx
= ctx
->outer
->outer
;
6674 struct omp_for_data outer_fd
;
6675 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
6676 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
6679 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
6680 taskreg_ctx
= ctx
->outer
->outer
;
6682 else if (is_taskreg_ctx (ctx
->outer
))
6683 taskreg_ctx
= ctx
->outer
;
6687 tree taskreg_clauses
6688 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
6689 tree innerc
= omp_find_clause (taskreg_clauses
,
6690 OMP_CLAUSE__LOOPTEMP_
);
6691 gcc_assert (innerc
);
6692 for (i
= 0; i
< fd
->collapse
; i
++)
6694 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6695 OMP_CLAUSE__LOOPTEMP_
);
6696 gcc_assert (innerc
);
6698 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6699 OMP_CLAUSE__LOOPTEMP_
);
6701 n2
= fold_convert (TREE_TYPE (n2
),
6702 lookup_decl (OMP_CLAUSE_DECL (innerc
),
6706 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
6709 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
6711 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
6712 if (!gimple_seq_empty_p (stmts
))
6714 gimple_seq_add_seq (&stmts
, *dlist
);
6717 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6718 vinit
= fd
->loop
.n1
;
6719 if (cond_code
== EQ_EXPR
6720 && tree_fits_shwi_p (fd
->loop
.n2
)
6721 && ! integer_zerop (fd
->loop
.n2
))
6722 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
6724 vinit
= unshare_expr (vinit
);
6726 /* Initialize the iterator variable, so that threads that don't execute
6727 any iterations don't execute the lastprivate clauses by accident. */
6728 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
6733 /* Lower code for an OMP loop directive. */
6736 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6739 struct omp_for_data fd
, *fdp
= NULL
;
6740 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
6742 gimple_seq omp_for_body
, body
, dlist
;
6743 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
6746 push_gimplify_context ();
6748 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
6750 block
= make_node (BLOCK
);
6751 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6752 /* Replace at gsi right away, so that 'stmt' is no member
6753 of a sequence anymore as we're going to add to a different
6755 gsi_replace (gsi_p
, new_stmt
, true);
6757 /* Move declaration of temporaries in the loop body before we make
6759 omp_for_body
= gimple_omp_body (stmt
);
6760 if (!gimple_seq_empty_p (omp_for_body
)
6761 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
6764 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
6765 tree vars
= gimple_bind_vars (inner_bind
);
6766 gimple_bind_append_vars (new_stmt
, vars
);
6767 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6768 keep them on the inner_bind and it's block. */
6769 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
6770 if (gimple_bind_block (inner_bind
))
6771 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
6774 if (gimple_omp_for_combined_into_p (stmt
))
6776 omp_extract_for_data (stmt
, &fd
, NULL
);
6779 /* We need two temporaries with fd.loop.v type (istart/iend)
6780 and then (fd.collapse - 1) temporaries with the same
6781 type for count2 ... countN-1 vars if not constant. */
6783 tree type
= fd
.iter_type
;
6785 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
6786 count
+= fd
.collapse
- 1;
6788 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
6789 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
6790 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
6795 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
6796 OMP_CLAUSE__LOOPTEMP_
);
6798 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
6799 OMP_CLAUSE__LOOPTEMP_
);
6800 for (i
= 0; i
< count
; i
++)
6805 gcc_assert (outerc
);
6806 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
6807 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
6808 OMP_CLAUSE__LOOPTEMP_
);
6812 /* If there are 2 adjacent SIMD stmts, one with _simt_
6813 clause, another without, make sure they have the same
6814 decls in _looptemp_ clauses, because the outer stmt
6815 they are combined into will look up just one inner_stmt. */
6817 temp
= OMP_CLAUSE_DECL (simtc
);
6819 temp
= create_tmp_var (type
);
6820 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
6822 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
6823 OMP_CLAUSE_DECL (*pc
) = temp
;
6824 pc
= &OMP_CLAUSE_CHAIN (*pc
);
6826 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
6827 OMP_CLAUSE__LOOPTEMP_
);
6832 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6835 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
6837 gimple_seq_add_seq (&body
, gimple_omp_for_pre_body (stmt
));
6839 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6841 /* Lower the header expressions. At this point, we can assume that
6842 the header is of the form:
6844 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6846 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6847 using the .omp_data_s mapping, if needed. */
6848 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
6850 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
6851 if (!is_gimple_min_invariant (*rhs_p
))
6852 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6853 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
6854 recompute_tree_invariant_for_addr_expr (*rhs_p
);
6856 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
6857 if (!is_gimple_min_invariant (*rhs_p
))
6858 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6859 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
6860 recompute_tree_invariant_for_addr_expr (*rhs_p
);
6862 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
6863 if (!is_gimple_min_invariant (*rhs_p
))
6864 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6867 /* Once lowered, extract the bounds and clauses. */
6868 omp_extract_for_data (stmt
, &fd
, NULL
);
6870 if (is_gimple_omp_oacc (ctx
->stmt
)
6871 && !ctx_in_oacc_kernels_region (ctx
))
6872 lower_oacc_head_tail (gimple_location (stmt
),
6873 gimple_omp_for_clauses (stmt
),
6874 &oacc_head
, &oacc_tail
, ctx
);
6876 /* Add OpenACC partitioning and reduction markers just before the loop. */
6878 gimple_seq_add_seq (&body
, oacc_head
);
6880 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
6882 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
6883 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
6884 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6885 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6887 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6888 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
6889 OMP_CLAUSE_LINEAR_STEP (c
)
6890 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
6894 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
6895 && gimple_omp_for_grid_phony (stmt
));
6897 gimple_seq_add_stmt (&body
, stmt
);
6898 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
6901 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
6904 /* After the loop, add exit clauses. */
6905 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
6907 if (ctx
->cancellable
)
6908 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
6910 gimple_seq_add_seq (&body
, dlist
);
6912 body
= maybe_catch_exception (body
);
6916 /* Region exit marker goes at the end of the loop body. */
6917 gimple_seq_add_stmt (&body
, gimple_build_omp_return (fd
.have_nowait
));
6918 maybe_add_implicit_barrier_cancel (ctx
, &body
);
6921 /* Add OpenACC joining and reduction markers just after the loop. */
6923 gimple_seq_add_seq (&body
, oacc_tail
);
6925 pop_gimplify_context (new_stmt
);
6927 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
6928 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
6929 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
6930 if (BLOCK_VARS (block
))
6931 TREE_USED (block
) = 1;
6933 gimple_bind_set_body (new_stmt
, body
);
6934 gimple_omp_set_body (stmt
, NULL
);
6935 gimple_omp_for_set_pre_body (stmt
, NULL
);
6938 /* Callback for walk_stmts. Check if the current statement only contains
6939 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6942 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
6943 bool *handled_ops_p
,
6944 struct walk_stmt_info
*wi
)
6946 int *info
= (int *) wi
->info
;
6947 gimple
*stmt
= gsi_stmt (*gsi_p
);
6949 *handled_ops_p
= true;
6950 switch (gimple_code (stmt
))
6956 case GIMPLE_OMP_FOR
:
6957 case GIMPLE_OMP_SECTIONS
:
6958 *info
= *info
== 0 ? 1 : -1;
6967 struct omp_taskcopy_context
6969 /* This field must be at the beginning, as we do "inheritance": Some
6970 callback functions for tree-inline.c (e.g., omp_copy_decl)
6971 receive a copy_body_data pointer that is up-casted to an
6972 omp_context pointer. */
6978 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
6980 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
6982 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
6983 return create_tmp_var (TREE_TYPE (var
));
6989 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
6991 tree name
, new_fields
= NULL
, type
, f
;
6993 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
6994 name
= DECL_NAME (TYPE_NAME (orig_type
));
6995 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
6996 TYPE_DECL
, name
, type
);
6997 TYPE_NAME (type
) = name
;
6999 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
7001 tree new_f
= copy_node (f
);
7002 DECL_CONTEXT (new_f
) = type
;
7003 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
7004 TREE_CHAIN (new_f
) = new_fields
;
7005 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7006 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7007 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
7010 tcctx
->cb
.decl_map
->put (f
, new_f
);
7012 TYPE_FIELDS (type
) = nreverse (new_fields
);
7017 /* Create task copyfn. */
7020 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
7022 struct function
*child_cfun
;
7023 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
7024 tree record_type
, srecord_type
, bind
, list
;
7025 bool record_needs_remap
= false, srecord_needs_remap
= false;
7027 struct omp_taskcopy_context tcctx
;
7028 location_t loc
= gimple_location (task_stmt
);
7029 size_t looptempno
= 0;
7031 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
7032 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
7033 gcc_assert (child_cfun
->cfg
== NULL
);
7034 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
7036 /* Reset DECL_CONTEXT on function arguments. */
7037 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
7038 DECL_CONTEXT (t
) = child_fn
;
7040 /* Populate the function. */
7041 push_gimplify_context ();
7042 push_cfun (child_cfun
);
7044 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
7045 TREE_SIDE_EFFECTS (bind
) = 1;
7047 DECL_SAVED_TREE (child_fn
) = bind
;
7048 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
7050 /* Remap src and dst argument types if needed. */
7051 record_type
= ctx
->record_type
;
7052 srecord_type
= ctx
->srecord_type
;
7053 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7054 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7056 record_needs_remap
= true;
7059 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
7060 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7062 srecord_needs_remap
= true;
7066 if (record_needs_remap
|| srecord_needs_remap
)
7068 memset (&tcctx
, '\0', sizeof (tcctx
));
7069 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
7070 tcctx
.cb
.dst_fn
= child_fn
;
7071 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
7072 gcc_checking_assert (tcctx
.cb
.src_node
);
7073 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
7074 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
7075 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
7076 tcctx
.cb
.eh_lp_nr
= 0;
7077 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
7078 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
7081 if (record_needs_remap
)
7082 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
7083 if (srecord_needs_remap
)
7084 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
7087 tcctx
.cb
.decl_map
= NULL
;
7089 arg
= DECL_ARGUMENTS (child_fn
);
7090 TREE_TYPE (arg
) = build_pointer_type (record_type
);
7091 sarg
= DECL_CHAIN (arg
);
7092 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
7094 /* First pass: initialize temporaries used in record_type and srecord_type
7095 sizes and field offsets. */
7096 if (tcctx
.cb
.decl_map
)
7097 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7098 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7102 decl
= OMP_CLAUSE_DECL (c
);
7103 p
= tcctx
.cb
.decl_map
->get (decl
);
7106 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7107 sf
= (tree
) n
->value
;
7108 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7109 src
= build_simple_mem_ref_loc (loc
, sarg
);
7110 src
= omp_build_component_ref (src
, sf
);
7111 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
7112 append_to_statement_list (t
, &list
);
7115 /* Second pass: copy shared var pointers and copy construct non-VLA
7116 firstprivate vars. */
7117 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7118 switch (OMP_CLAUSE_CODE (c
))
7121 case OMP_CLAUSE_SHARED
:
7122 decl
= OMP_CLAUSE_DECL (c
);
7123 key
= (splay_tree_key
) decl
;
7124 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7125 key
= (splay_tree_key
) &DECL_UID (decl
);
7126 n
= splay_tree_lookup (ctx
->field_map
, key
);
7129 f
= (tree
) n
->value
;
7130 if (tcctx
.cb
.decl_map
)
7131 f
= *tcctx
.cb
.decl_map
->get (f
);
7132 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
7133 sf
= (tree
) n
->value
;
7134 if (tcctx
.cb
.decl_map
)
7135 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7136 src
= build_simple_mem_ref_loc (loc
, sarg
);
7137 src
= omp_build_component_ref (src
, sf
);
7138 dst
= build_simple_mem_ref_loc (loc
, arg
);
7139 dst
= omp_build_component_ref (dst
, f
);
7140 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7141 append_to_statement_list (t
, &list
);
7143 case OMP_CLAUSE__LOOPTEMP_
:
7144 /* Fields for first two _looptemp_ clauses are initialized by
7145 GOMP_taskloop*, the rest are handled like firstprivate. */
7152 case OMP_CLAUSE_FIRSTPRIVATE
:
7153 decl
= OMP_CLAUSE_DECL (c
);
7154 if (is_variable_sized (decl
))
7156 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7159 f
= (tree
) n
->value
;
7160 if (tcctx
.cb
.decl_map
)
7161 f
= *tcctx
.cb
.decl_map
->get (f
);
7162 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7165 sf
= (tree
) n
->value
;
7166 if (tcctx
.cb
.decl_map
)
7167 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7168 src
= build_simple_mem_ref_loc (loc
, sarg
);
7169 src
= omp_build_component_ref (src
, sf
);
7170 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
7171 src
= build_simple_mem_ref_loc (loc
, src
);
7175 dst
= build_simple_mem_ref_loc (loc
, arg
);
7176 dst
= omp_build_component_ref (dst
, f
);
7177 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__LOOPTEMP_
)
7178 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7180 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7181 append_to_statement_list (t
, &list
);
7183 case OMP_CLAUSE_PRIVATE
:
7184 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7186 decl
= OMP_CLAUSE_DECL (c
);
7187 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7188 f
= (tree
) n
->value
;
7189 if (tcctx
.cb
.decl_map
)
7190 f
= *tcctx
.cb
.decl_map
->get (f
);
7191 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7194 sf
= (tree
) n
->value
;
7195 if (tcctx
.cb
.decl_map
)
7196 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7197 src
= build_simple_mem_ref_loc (loc
, sarg
);
7198 src
= omp_build_component_ref (src
, sf
);
7199 if (use_pointer_for_field (decl
, NULL
))
7200 src
= build_simple_mem_ref_loc (loc
, src
);
7204 dst
= build_simple_mem_ref_loc (loc
, arg
);
7205 dst
= omp_build_component_ref (dst
, f
);
7206 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7207 append_to_statement_list (t
, &list
);
7213 /* Last pass: handle VLA firstprivates. */
7214 if (tcctx
.cb
.decl_map
)
7215 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7220 decl
= OMP_CLAUSE_DECL (c
);
7221 if (!is_variable_sized (decl
))
7223 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7226 f
= (tree
) n
->value
;
7227 f
= *tcctx
.cb
.decl_map
->get (f
);
7228 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
7229 ind
= DECL_VALUE_EXPR (decl
);
7230 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
7231 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
7232 n
= splay_tree_lookup (ctx
->sfield_map
,
7233 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7234 sf
= (tree
) n
->value
;
7235 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7236 src
= build_simple_mem_ref_loc (loc
, sarg
);
7237 src
= omp_build_component_ref (src
, sf
);
7238 src
= build_simple_mem_ref_loc (loc
, src
);
7239 dst
= build_simple_mem_ref_loc (loc
, arg
);
7240 dst
= omp_build_component_ref (dst
, f
);
7241 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7242 append_to_statement_list (t
, &list
);
7243 n
= splay_tree_lookup (ctx
->field_map
,
7244 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7245 df
= (tree
) n
->value
;
7246 df
= *tcctx
.cb
.decl_map
->get (df
);
7247 ptr
= build_simple_mem_ref_loc (loc
, arg
);
7248 ptr
= omp_build_component_ref (ptr
, df
);
7249 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
7250 build_fold_addr_expr_loc (loc
, dst
));
7251 append_to_statement_list (t
, &list
);
7254 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
7255 append_to_statement_list (t
, &list
);
7257 if (tcctx
.cb
.decl_map
)
7258 delete tcctx
.cb
.decl_map
;
7259 pop_gimplify_context (NULL
);
7260 BIND_EXPR_BODY (bind
) = list
;
7265 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
7269 size_t n_in
= 0, n_out
= 0, idx
= 2, i
;
7271 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
7272 gcc_assert (clauses
);
7273 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7274 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7275 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7277 case OMP_CLAUSE_DEPEND_IN
:
7280 case OMP_CLAUSE_DEPEND_OUT
:
7281 case OMP_CLAUSE_DEPEND_INOUT
:
7284 case OMP_CLAUSE_DEPEND_SOURCE
:
7285 case OMP_CLAUSE_DEPEND_SINK
:
7290 tree type
= build_array_type_nelts (ptr_type_node
, n_in
+ n_out
+ 2);
7291 tree array
= create_tmp_var (type
);
7292 TREE_ADDRESSABLE (array
) = 1;
7293 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7295 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_in
+ n_out
));
7296 gimple_seq_add_stmt (iseq
, g
);
7297 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7299 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_out
));
7300 gimple_seq_add_stmt (iseq
, g
);
7301 for (i
= 0; i
< 2; i
++)
7303 if ((i
? n_in
: n_out
) == 0)
7305 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7306 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7307 && ((OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_IN
) ^ i
))
7309 tree t
= OMP_CLAUSE_DECL (c
);
7310 t
= fold_convert (ptr_type_node
, t
);
7311 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
7312 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
7313 NULL_TREE
, NULL_TREE
);
7314 g
= gimple_build_assign (r
, t
);
7315 gimple_seq_add_stmt (iseq
, g
);
7318 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
7319 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
7320 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
7322 tree clobber
= build_constructor (type
, NULL
);
7323 TREE_THIS_VOLATILE (clobber
) = 1;
7324 g
= gimple_build_assign (array
, clobber
);
7325 gimple_seq_add_stmt (oseq
, g
);
7328 /* Lower the OpenMP parallel or task directive in the current statement
7329 in GSI_P. CTX holds context information for the directive. */
7332 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7336 gimple
*stmt
= gsi_stmt (*gsi_p
);
7337 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
7338 gimple_seq par_body
, olist
, ilist
, par_olist
, par_rlist
, par_ilist
, new_body
;
7339 location_t loc
= gimple_location (stmt
);
7341 clauses
= gimple_omp_taskreg_clauses (stmt
);
7343 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
7344 par_body
= gimple_bind_body (par_bind
);
7345 child_fn
= ctx
->cb
.dst_fn
;
7346 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7347 && !gimple_omp_parallel_combined_p (stmt
))
7349 struct walk_stmt_info wi
;
7352 memset (&wi
, 0, sizeof (wi
));
7355 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
7357 gimple_omp_parallel_set_combined_p (stmt
, true);
7359 gimple_seq dep_ilist
= NULL
;
7360 gimple_seq dep_olist
= NULL
;
7361 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
7362 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7364 push_gimplify_context ();
7365 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7366 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
7367 &dep_ilist
, &dep_olist
);
7370 if (ctx
->srecord_type
)
7371 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
7373 push_gimplify_context ();
7378 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7379 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
7380 if (phony_construct
&& ctx
->record_type
)
7382 gcc_checking_assert (!ctx
->receiver_decl
);
7383 ctx
->receiver_decl
= create_tmp_var
7384 (build_reference_type (ctx
->record_type
), ".omp_rec");
7386 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
7387 lower_omp (&par_body
, ctx
);
7388 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
7389 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
7391 /* Declare all the variables created by mapping and the variables
7392 declared in the scope of the parallel body. */
7393 record_vars_into (ctx
->block_vars
, child_fn
);
7394 maybe_remove_omp_member_access_dummy_vars (par_bind
);
7395 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
7397 if (ctx
->record_type
)
7400 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
7401 : ctx
->record_type
, ".omp_data_o");
7402 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7403 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7404 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
7409 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
7410 lower_send_shared_vars (&ilist
, &olist
, ctx
);
7412 if (ctx
->record_type
)
7414 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
7415 TREE_THIS_VOLATILE (clobber
) = 1;
7416 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
7420 /* Once all the expansions are done, sequence all the different
7421 fragments inside gimple_omp_body. */
7425 if (ctx
->record_type
)
7427 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7428 /* fixup_child_record_type might have changed receiver_decl's type. */
7429 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
7430 gimple_seq_add_stmt (&new_body
,
7431 gimple_build_assign (ctx
->receiver_decl
, t
));
7434 gimple_seq_add_seq (&new_body
, par_ilist
);
7435 gimple_seq_add_seq (&new_body
, par_body
);
7436 gimple_seq_add_seq (&new_body
, par_rlist
);
7437 if (ctx
->cancellable
)
7438 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7439 gimple_seq_add_seq (&new_body
, par_olist
);
7440 new_body
= maybe_catch_exception (new_body
);
7441 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
7442 gimple_seq_add_stmt (&new_body
,
7443 gimple_build_omp_continue (integer_zero_node
,
7444 integer_zero_node
));
7445 if (!phony_construct
)
7447 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
7448 gimple_omp_set_body (stmt
, new_body
);
7451 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
7452 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
7453 gimple_bind_add_seq (bind
, ilist
);
7454 if (!phony_construct
)
7455 gimple_bind_add_stmt (bind
, stmt
);
7457 gimple_bind_add_seq (bind
, new_body
);
7458 gimple_bind_add_seq (bind
, olist
);
7460 pop_gimplify_context (NULL
);
7464 gimple_bind_add_seq (dep_bind
, dep_ilist
);
7465 gimple_bind_add_stmt (dep_bind
, bind
);
7466 gimple_bind_add_seq (dep_bind
, dep_olist
);
7467 pop_gimplify_context (dep_bind
);
7471 /* Lower the GIMPLE_OMP_TARGET in the current statement
7472 in GSI_P. CTX holds context information for the directive. */
7475 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7478 tree child_fn
, t
, c
;
7479 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
7480 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
7481 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
7482 location_t loc
= gimple_location (stmt
);
7483 bool offloaded
, data_region
;
7484 unsigned int map_cnt
= 0;
7486 offloaded
= is_gimple_omp_offloaded (stmt
);
7487 switch (gimple_omp_target_kind (stmt
))
7489 case GF_OMP_TARGET_KIND_REGION
:
7490 case GF_OMP_TARGET_KIND_UPDATE
:
7491 case GF_OMP_TARGET_KIND_ENTER_DATA
:
7492 case GF_OMP_TARGET_KIND_EXIT_DATA
:
7493 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
7494 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
7495 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
7496 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
7497 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
7498 data_region
= false;
7500 case GF_OMP_TARGET_KIND_DATA
:
7501 case GF_OMP_TARGET_KIND_OACC_DATA
:
7502 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
7509 clauses
= gimple_omp_target_clauses (stmt
);
7511 gimple_seq dep_ilist
= NULL
;
7512 gimple_seq dep_olist
= NULL
;
7513 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7515 push_gimplify_context ();
7516 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7517 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
7518 &dep_ilist
, &dep_olist
);
7525 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
7526 tgt_body
= gimple_bind_body (tgt_bind
);
7528 else if (data_region
)
7529 tgt_body
= gimple_omp_body (stmt
);
7530 child_fn
= ctx
->cb
.dst_fn
;
7532 push_gimplify_context ();
7535 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7536 switch (OMP_CLAUSE_CODE (c
))
7542 case OMP_CLAUSE_MAP
:
7544 /* First check what we're prepared to handle in the following. */
7545 switch (OMP_CLAUSE_MAP_KIND (c
))
7547 case GOMP_MAP_ALLOC
:
7550 case GOMP_MAP_TOFROM
:
7551 case GOMP_MAP_POINTER
:
7552 case GOMP_MAP_TO_PSET
:
7553 case GOMP_MAP_DELETE
:
7554 case GOMP_MAP_RELEASE
:
7555 case GOMP_MAP_ALWAYS_TO
:
7556 case GOMP_MAP_ALWAYS_FROM
:
7557 case GOMP_MAP_ALWAYS_TOFROM
:
7558 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
7559 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
7560 case GOMP_MAP_STRUCT
:
7561 case GOMP_MAP_ALWAYS_POINTER
:
7563 case GOMP_MAP_FORCE_ALLOC
:
7564 case GOMP_MAP_FORCE_TO
:
7565 case GOMP_MAP_FORCE_FROM
:
7566 case GOMP_MAP_FORCE_TOFROM
:
7567 case GOMP_MAP_FORCE_PRESENT
:
7568 case GOMP_MAP_FORCE_DEVICEPTR
:
7569 case GOMP_MAP_DEVICE_RESIDENT
:
7571 gcc_assert (is_gimple_omp_oacc (stmt
));
7579 case OMP_CLAUSE_FROM
:
7581 var
= OMP_CLAUSE_DECL (c
);
7584 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
7585 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7586 && (OMP_CLAUSE_MAP_KIND (c
)
7587 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
7593 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
7595 tree var2
= DECL_VALUE_EXPR (var
);
7596 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
7597 var2
= TREE_OPERAND (var2
, 0);
7598 gcc_assert (DECL_P (var2
));
7603 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7604 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7605 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7607 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7609 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
7610 && varpool_node::get_create (var
)->offloadable
)
7613 tree type
= build_pointer_type (TREE_TYPE (var
));
7614 tree new_var
= lookup_decl (var
, ctx
);
7615 x
= create_tmp_var_raw (type
, get_name (new_var
));
7616 gimple_add_tmp_var (x
);
7617 x
= build_simple_mem_ref (x
);
7618 SET_DECL_VALUE_EXPR (new_var
, x
);
7619 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7624 if (!maybe_lookup_field (var
, ctx
))
7627 /* Don't remap oacc parallel reduction variables, because the
7628 intermediate result must be local to each gang. */
7629 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7630 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
7632 x
= build_receiver_ref (var
, true, ctx
);
7633 tree new_var
= lookup_decl (var
, ctx
);
7635 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7636 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7637 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7638 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7639 x
= build_simple_mem_ref (x
);
7640 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7642 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7643 if (omp_is_reference (new_var
)
7644 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
7646 /* Create a local object to hold the instance
7648 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
7649 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
7650 tree inst
= create_tmp_var (type
, id
);
7651 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
7652 x
= build_fold_addr_expr (inst
);
7654 gimplify_assign (new_var
, x
, &fplist
);
7656 else if (DECL_P (new_var
))
7658 SET_DECL_VALUE_EXPR (new_var
, x
);
7659 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7667 case OMP_CLAUSE_FIRSTPRIVATE
:
7668 if (is_oacc_parallel (ctx
))
7669 goto oacc_firstprivate
;
7671 var
= OMP_CLAUSE_DECL (c
);
7672 if (!omp_is_reference (var
)
7673 && !is_gimple_reg_type (TREE_TYPE (var
)))
7675 tree new_var
= lookup_decl (var
, ctx
);
7676 if (is_variable_sized (var
))
7678 tree pvar
= DECL_VALUE_EXPR (var
);
7679 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7680 pvar
= TREE_OPERAND (pvar
, 0);
7681 gcc_assert (DECL_P (pvar
));
7682 tree new_pvar
= lookup_decl (pvar
, ctx
);
7683 x
= build_fold_indirect_ref (new_pvar
);
7684 TREE_THIS_NOTRAP (x
) = 1;
7687 x
= build_receiver_ref (var
, true, ctx
);
7688 SET_DECL_VALUE_EXPR (new_var
, x
);
7689 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7693 case OMP_CLAUSE_PRIVATE
:
7694 if (is_gimple_omp_oacc (ctx
->stmt
))
7696 var
= OMP_CLAUSE_DECL (c
);
7697 if (is_variable_sized (var
))
7699 tree new_var
= lookup_decl (var
, ctx
);
7700 tree pvar
= DECL_VALUE_EXPR (var
);
7701 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7702 pvar
= TREE_OPERAND (pvar
, 0);
7703 gcc_assert (DECL_P (pvar
));
7704 tree new_pvar
= lookup_decl (pvar
, ctx
);
7705 x
= build_fold_indirect_ref (new_pvar
);
7706 TREE_THIS_NOTRAP (x
) = 1;
7707 SET_DECL_VALUE_EXPR (new_var
, x
);
7708 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7712 case OMP_CLAUSE_USE_DEVICE_PTR
:
7713 case OMP_CLAUSE_IS_DEVICE_PTR
:
7714 var
= OMP_CLAUSE_DECL (c
);
7716 if (is_variable_sized (var
))
7718 tree new_var
= lookup_decl (var
, ctx
);
7719 tree pvar
= DECL_VALUE_EXPR (var
);
7720 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7721 pvar
= TREE_OPERAND (pvar
, 0);
7722 gcc_assert (DECL_P (pvar
));
7723 tree new_pvar
= lookup_decl (pvar
, ctx
);
7724 x
= build_fold_indirect_ref (new_pvar
);
7725 TREE_THIS_NOTRAP (x
) = 1;
7726 SET_DECL_VALUE_EXPR (new_var
, x
);
7727 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7729 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7731 tree new_var
= lookup_decl (var
, ctx
);
7732 tree type
= build_pointer_type (TREE_TYPE (var
));
7733 x
= create_tmp_var_raw (type
, get_name (new_var
));
7734 gimple_add_tmp_var (x
);
7735 x
= build_simple_mem_ref (x
);
7736 SET_DECL_VALUE_EXPR (new_var
, x
);
7737 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7741 tree new_var
= lookup_decl (var
, ctx
);
7742 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
7743 gimple_add_tmp_var (x
);
7744 SET_DECL_VALUE_EXPR (new_var
, x
);
7745 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7752 target_nesting_level
++;
7753 lower_omp (&tgt_body
, ctx
);
7754 target_nesting_level
--;
7756 else if (data_region
)
7757 lower_omp (&tgt_body
, ctx
);
7761 /* Declare all the variables created by mapping and the variables
7762 declared in the scope of the target body. */
7763 record_vars_into (ctx
->block_vars
, child_fn
);
7764 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
7765 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
7770 if (ctx
->record_type
)
7773 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
7774 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7775 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7776 t
= make_tree_vec (3);
7777 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
7779 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
7781 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
7782 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
7783 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
7784 tree tkind_type
= short_unsigned_type_node
;
7785 int talign_shift
= 8;
7787 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
7789 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
7790 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
7791 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
7792 gimple_omp_target_set_data_arg (stmt
, t
);
7794 vec
<constructor_elt
, va_gc
> *vsize
;
7795 vec
<constructor_elt
, va_gc
> *vkind
;
7796 vec_alloc (vsize
, map_cnt
);
7797 vec_alloc (vkind
, map_cnt
);
7798 unsigned int map_idx
= 0;
7800 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7801 switch (OMP_CLAUSE_CODE (c
))
7803 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
7804 unsigned int talign
;
7809 case OMP_CLAUSE_MAP
:
7811 case OMP_CLAUSE_FROM
:
7812 oacc_firstprivate_map
:
7814 ovar
= OMP_CLAUSE_DECL (c
);
7815 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7816 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7817 || (OMP_CLAUSE_MAP_KIND (c
)
7818 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
7822 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7823 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
7825 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
7826 == get_base_address (ovar
));
7827 nc
= OMP_CLAUSE_CHAIN (c
);
7828 ovar
= OMP_CLAUSE_DECL (nc
);
7832 tree x
= build_sender_ref (ovar
, ctx
);
7834 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
7835 gimplify_assign (x
, v
, &ilist
);
7841 if (DECL_SIZE (ovar
)
7842 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
7844 tree ovar2
= DECL_VALUE_EXPR (ovar
);
7845 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
7846 ovar2
= TREE_OPERAND (ovar2
, 0);
7847 gcc_assert (DECL_P (ovar2
));
7850 if (!maybe_lookup_field (ovar
, ctx
))
7854 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
7855 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
7856 talign
= DECL_ALIGN_UNIT (ovar
);
7859 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7860 x
= build_sender_ref (ovar
, ctx
);
7862 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7863 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7864 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7865 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
7867 gcc_assert (offloaded
);
7869 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
7870 mark_addressable (avar
);
7871 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
7872 talign
= DECL_ALIGN_UNIT (avar
);
7873 avar
= build_fold_addr_expr (avar
);
7874 gimplify_assign (x
, avar
, &ilist
);
7876 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7878 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7879 if (!omp_is_reference (var
))
7881 if (is_gimple_reg (var
)
7882 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7883 TREE_NO_WARNING (var
) = 1;
7884 var
= build_fold_addr_expr (var
);
7887 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7888 gimplify_assign (x
, var
, &ilist
);
7890 else if (is_gimple_reg (var
))
7892 gcc_assert (offloaded
);
7893 tree avar
= create_tmp_var (TREE_TYPE (var
));
7894 mark_addressable (avar
);
7895 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
7896 if (GOMP_MAP_COPY_TO_P (map_kind
)
7897 || map_kind
== GOMP_MAP_POINTER
7898 || map_kind
== GOMP_MAP_TO_PSET
7899 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7901 /* If we need to initialize a temporary
7902 with VAR because it is not addressable, and
7903 the variable hasn't been initialized yet, then
7904 we'll get a warning for the store to avar.
7905 Don't warn in that case, the mapping might
7907 TREE_NO_WARNING (var
) = 1;
7908 gimplify_assign (avar
, var
, &ilist
);
7910 avar
= build_fold_addr_expr (avar
);
7911 gimplify_assign (x
, avar
, &ilist
);
7912 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
7913 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7914 && !TYPE_READONLY (TREE_TYPE (var
)))
7916 x
= unshare_expr (x
);
7917 x
= build_simple_mem_ref (x
);
7918 gimplify_assign (var
, x
, &olist
);
7923 var
= build_fold_addr_expr (var
);
7924 gimplify_assign (x
, var
, &ilist
);
7928 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7930 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7931 s
= TREE_TYPE (ovar
);
7932 if (TREE_CODE (s
) == REFERENCE_TYPE
)
7934 s
= TYPE_SIZE_UNIT (s
);
7937 s
= OMP_CLAUSE_SIZE (c
);
7939 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
7940 s
= fold_convert (size_type_node
, s
);
7941 purpose
= size_int (map_idx
++);
7942 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
7943 if (TREE_CODE (s
) != INTEGER_CST
)
7944 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
7946 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
7947 switch (OMP_CLAUSE_CODE (c
))
7949 case OMP_CLAUSE_MAP
:
7950 tkind
= OMP_CLAUSE_MAP_KIND (c
);
7952 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
7955 case GOMP_MAP_ALLOC
:
7958 case GOMP_MAP_TOFROM
:
7959 case GOMP_MAP_ALWAYS_TO
:
7960 case GOMP_MAP_ALWAYS_FROM
:
7961 case GOMP_MAP_ALWAYS_TOFROM
:
7962 case GOMP_MAP_RELEASE
:
7963 case GOMP_MAP_FORCE_TO
:
7964 case GOMP_MAP_FORCE_FROM
:
7965 case GOMP_MAP_FORCE_TOFROM
:
7966 case GOMP_MAP_FORCE_PRESENT
:
7967 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
7969 case GOMP_MAP_DELETE
:
7970 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
7974 if (tkind_zero
!= tkind
)
7976 if (integer_zerop (s
))
7978 else if (integer_nonzerop (s
))
7982 case OMP_CLAUSE_FIRSTPRIVATE
:
7983 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7984 tkind
= GOMP_MAP_TO
;
7988 tkind
= GOMP_MAP_TO
;
7991 case OMP_CLAUSE_FROM
:
7992 tkind
= GOMP_MAP_FROM
;
7998 gcc_checking_assert (tkind
7999 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8000 gcc_checking_assert (tkind_zero
8001 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8002 talign
= ceil_log2 (talign
);
8003 tkind
|= talign
<< talign_shift
;
8004 tkind_zero
|= talign
<< talign_shift
;
8005 gcc_checking_assert (tkind
8006 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8007 gcc_checking_assert (tkind_zero
8008 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8009 if (tkind
== tkind_zero
)
8010 x
= build_int_cstu (tkind_type
, tkind
);
8013 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
8014 x
= build3 (COND_EXPR
, tkind_type
,
8015 fold_build2 (EQ_EXPR
, boolean_type_node
,
8016 unshare_expr (s
), size_zero_node
),
8017 build_int_cstu (tkind_type
, tkind_zero
),
8018 build_int_cstu (tkind_type
, tkind
));
8020 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
8025 case OMP_CLAUSE_FIRSTPRIVATE
:
8026 if (is_oacc_parallel (ctx
))
8027 goto oacc_firstprivate_map
;
8028 ovar
= OMP_CLAUSE_DECL (c
);
8029 if (omp_is_reference (ovar
))
8030 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8032 talign
= DECL_ALIGN_UNIT (ovar
);
8033 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8034 x
= build_sender_ref (ovar
, ctx
);
8035 tkind
= GOMP_MAP_FIRSTPRIVATE
;
8036 type
= TREE_TYPE (ovar
);
8037 if (omp_is_reference (ovar
))
8038 type
= TREE_TYPE (type
);
8039 if ((INTEGRAL_TYPE_P (type
)
8040 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8041 || TREE_CODE (type
) == POINTER_TYPE
)
8043 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8045 if (omp_is_reference (var
))
8046 t
= build_simple_mem_ref (var
);
8047 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8048 TREE_NO_WARNING (var
) = 1;
8049 if (TREE_CODE (type
) != POINTER_TYPE
)
8050 t
= fold_convert (pointer_sized_int_node
, t
);
8051 t
= fold_convert (TREE_TYPE (x
), t
);
8052 gimplify_assign (x
, t
, &ilist
);
8054 else if (omp_is_reference (var
))
8055 gimplify_assign (x
, var
, &ilist
);
8056 else if (is_gimple_reg (var
))
8058 tree avar
= create_tmp_var (TREE_TYPE (var
));
8059 mark_addressable (avar
);
8060 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8061 TREE_NO_WARNING (var
) = 1;
8062 gimplify_assign (avar
, var
, &ilist
);
8063 avar
= build_fold_addr_expr (avar
);
8064 gimplify_assign (x
, avar
, &ilist
);
8068 var
= build_fold_addr_expr (var
);
8069 gimplify_assign (x
, var
, &ilist
);
8071 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
8073 else if (omp_is_reference (ovar
))
8074 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8076 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
8077 s
= fold_convert (size_type_node
, s
);
8078 purpose
= size_int (map_idx
++);
8079 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8080 if (TREE_CODE (s
) != INTEGER_CST
)
8081 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8083 gcc_checking_assert (tkind
8084 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8085 talign
= ceil_log2 (talign
);
8086 tkind
|= talign
<< talign_shift
;
8087 gcc_checking_assert (tkind
8088 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8089 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8090 build_int_cstu (tkind_type
, tkind
));
8093 case OMP_CLAUSE_USE_DEVICE_PTR
:
8094 case OMP_CLAUSE_IS_DEVICE_PTR
:
8095 ovar
= OMP_CLAUSE_DECL (c
);
8096 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8097 x
= build_sender_ref (ovar
, ctx
);
8098 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8099 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
8101 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8102 type
= TREE_TYPE (ovar
);
8103 if (TREE_CODE (type
) == ARRAY_TYPE
)
8104 var
= build_fold_addr_expr (var
);
8107 if (omp_is_reference (ovar
))
8109 type
= TREE_TYPE (type
);
8110 if (TREE_CODE (type
) != ARRAY_TYPE
)
8111 var
= build_simple_mem_ref (var
);
8112 var
= fold_convert (TREE_TYPE (x
), var
);
8115 gimplify_assign (x
, var
, &ilist
);
8117 purpose
= size_int (map_idx
++);
8118 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8119 gcc_checking_assert (tkind
8120 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8121 gcc_checking_assert (tkind
8122 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8123 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8124 build_int_cstu (tkind_type
, tkind
));
8128 gcc_assert (map_idx
== map_cnt
);
8130 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
8131 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
8132 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
8133 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
8134 for (int i
= 1; i
<= 2; i
++)
8135 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
8137 gimple_seq initlist
= NULL
;
8138 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
8139 TREE_VEC_ELT (t
, i
)),
8140 &initlist
, true, NULL_TREE
);
8141 gimple_seq_add_seq (&ilist
, initlist
);
8143 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
8145 TREE_THIS_VOLATILE (clobber
) = 1;
8146 gimple_seq_add_stmt (&olist
,
8147 gimple_build_assign (TREE_VEC_ELT (t
, i
),
8151 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
8152 TREE_THIS_VOLATILE (clobber
) = 1;
8153 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
8157 /* Once all the expansions are done, sequence all the different
8158 fragments inside gimple_omp_body. */
8163 && ctx
->record_type
)
8165 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8166 /* fixup_child_record_type might have changed receiver_decl's type. */
8167 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
8168 gimple_seq_add_stmt (&new_body
,
8169 gimple_build_assign (ctx
->receiver_decl
, t
));
8171 gimple_seq_add_seq (&new_body
, fplist
);
8173 if (offloaded
|| data_region
)
8175 tree prev
= NULL_TREE
;
8176 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8177 switch (OMP_CLAUSE_CODE (c
))
8182 case OMP_CLAUSE_FIRSTPRIVATE
:
8183 if (is_gimple_omp_oacc (ctx
->stmt
))
8185 var
= OMP_CLAUSE_DECL (c
);
8186 if (omp_is_reference (var
)
8187 || is_gimple_reg_type (TREE_TYPE (var
)))
8189 tree new_var
= lookup_decl (var
, ctx
);
8191 type
= TREE_TYPE (var
);
8192 if (omp_is_reference (var
))
8193 type
= TREE_TYPE (type
);
8194 if ((INTEGRAL_TYPE_P (type
)
8195 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8196 || TREE_CODE (type
) == POINTER_TYPE
)
8198 x
= build_receiver_ref (var
, false, ctx
);
8199 if (TREE_CODE (type
) != POINTER_TYPE
)
8200 x
= fold_convert (pointer_sized_int_node
, x
);
8201 x
= fold_convert (type
, x
);
8202 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8204 if (omp_is_reference (var
))
8206 tree v
= create_tmp_var_raw (type
, get_name (var
));
8207 gimple_add_tmp_var (v
);
8208 TREE_ADDRESSABLE (v
) = 1;
8209 gimple_seq_add_stmt (&new_body
,
8210 gimple_build_assign (v
, x
));
8211 x
= build_fold_addr_expr (v
);
8213 gimple_seq_add_stmt (&new_body
,
8214 gimple_build_assign (new_var
, x
));
8218 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
8219 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8221 gimple_seq_add_stmt (&new_body
,
8222 gimple_build_assign (new_var
, x
));
8225 else if (is_variable_sized (var
))
8227 tree pvar
= DECL_VALUE_EXPR (var
);
8228 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8229 pvar
= TREE_OPERAND (pvar
, 0);
8230 gcc_assert (DECL_P (pvar
));
8231 tree new_var
= lookup_decl (pvar
, ctx
);
8232 x
= build_receiver_ref (var
, false, ctx
);
8233 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8234 gimple_seq_add_stmt (&new_body
,
8235 gimple_build_assign (new_var
, x
));
8238 case OMP_CLAUSE_PRIVATE
:
8239 if (is_gimple_omp_oacc (ctx
->stmt
))
8241 var
= OMP_CLAUSE_DECL (c
);
8242 if (omp_is_reference (var
))
8244 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8245 tree new_var
= lookup_decl (var
, ctx
);
8246 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8247 if (TREE_CONSTANT (x
))
8249 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
8251 gimple_add_tmp_var (x
);
8252 TREE_ADDRESSABLE (x
) = 1;
8253 x
= build_fold_addr_expr_loc (clause_loc
, x
);
8258 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8259 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8260 gimple_seq_add_stmt (&new_body
,
8261 gimple_build_assign (new_var
, x
));
8264 case OMP_CLAUSE_USE_DEVICE_PTR
:
8265 case OMP_CLAUSE_IS_DEVICE_PTR
:
8266 var
= OMP_CLAUSE_DECL (c
);
8267 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8268 x
= build_sender_ref (var
, ctx
);
8270 x
= build_receiver_ref (var
, false, ctx
);
8271 if (is_variable_sized (var
))
8273 tree pvar
= DECL_VALUE_EXPR (var
);
8274 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8275 pvar
= TREE_OPERAND (pvar
, 0);
8276 gcc_assert (DECL_P (pvar
));
8277 tree new_var
= lookup_decl (pvar
, ctx
);
8278 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8279 gimple_seq_add_stmt (&new_body
,
8280 gimple_build_assign (new_var
, x
));
8282 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
8284 tree new_var
= lookup_decl (var
, ctx
);
8285 new_var
= DECL_VALUE_EXPR (new_var
);
8286 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
8287 new_var
= TREE_OPERAND (new_var
, 0);
8288 gcc_assert (DECL_P (new_var
));
8289 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8290 gimple_seq_add_stmt (&new_body
,
8291 gimple_build_assign (new_var
, x
));
8295 tree type
= TREE_TYPE (var
);
8296 tree new_var
= lookup_decl (var
, ctx
);
8297 if (omp_is_reference (var
))
8299 type
= TREE_TYPE (type
);
8300 if (TREE_CODE (type
) != ARRAY_TYPE
)
8302 tree v
= create_tmp_var_raw (type
, get_name (var
));
8303 gimple_add_tmp_var (v
);
8304 TREE_ADDRESSABLE (v
) = 1;
8305 x
= fold_convert (type
, x
);
8306 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8308 gimple_seq_add_stmt (&new_body
,
8309 gimple_build_assign (v
, x
));
8310 x
= build_fold_addr_expr (v
);
8313 new_var
= DECL_VALUE_EXPR (new_var
);
8314 x
= fold_convert (TREE_TYPE (new_var
), x
);
8315 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8316 gimple_seq_add_stmt (&new_body
,
8317 gimple_build_assign (new_var
, x
));
8321 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8322 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8323 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8324 or references to VLAs. */
8325 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8326 switch (OMP_CLAUSE_CODE (c
))
8331 case OMP_CLAUSE_MAP
:
8332 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8333 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8335 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8336 poly_int64 offset
= 0;
8338 var
= OMP_CLAUSE_DECL (c
);
8340 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
8341 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
8343 && varpool_node::get_create (var
)->offloadable
)
8345 if (TREE_CODE (var
) == INDIRECT_REF
8346 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
8347 var
= TREE_OPERAND (var
, 0);
8348 if (TREE_CODE (var
) == COMPONENT_REF
)
8350 var
= get_addr_base_and_unit_offset (var
, &offset
);
8351 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
8353 else if (DECL_SIZE (var
)
8354 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
8356 tree var2
= DECL_VALUE_EXPR (var
);
8357 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
8358 var2
= TREE_OPERAND (var2
, 0);
8359 gcc_assert (DECL_P (var2
));
8362 tree new_var
= lookup_decl (var
, ctx
), x
;
8363 tree type
= TREE_TYPE (new_var
);
8365 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
8366 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8369 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
8371 new_var
= build2 (MEM_REF
, type
,
8372 build_fold_addr_expr (new_var
),
8373 build_int_cst (build_pointer_type (type
),
8376 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
8378 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
8379 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
8380 new_var
= build2 (MEM_REF
, type
,
8381 build_fold_addr_expr (new_var
),
8382 build_int_cst (build_pointer_type (type
),
8386 is_ref
= omp_is_reference (var
);
8387 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8389 bool ref_to_array
= false;
8392 type
= TREE_TYPE (type
);
8393 if (TREE_CODE (type
) == ARRAY_TYPE
)
8395 type
= build_pointer_type (type
);
8396 ref_to_array
= true;
8399 else if (TREE_CODE (type
) == ARRAY_TYPE
)
8401 tree decl2
= DECL_VALUE_EXPR (new_var
);
8402 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
8403 decl2
= TREE_OPERAND (decl2
, 0);
8404 gcc_assert (DECL_P (decl2
));
8406 type
= TREE_TYPE (new_var
);
8408 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
8409 x
= fold_convert_loc (clause_loc
, type
, x
);
8410 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
8412 tree bias
= OMP_CLAUSE_SIZE (c
);
8414 bias
= lookup_decl (bias
, ctx
);
8415 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
8416 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
8418 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
8419 TREE_TYPE (x
), x
, bias
);
8422 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8423 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8424 if (is_ref
&& !ref_to_array
)
8426 tree t
= create_tmp_var_raw (type
, get_name (var
));
8427 gimple_add_tmp_var (t
);
8428 TREE_ADDRESSABLE (t
) = 1;
8429 gimple_seq_add_stmt (&new_body
,
8430 gimple_build_assign (t
, x
));
8431 x
= build_fold_addr_expr_loc (clause_loc
, t
);
8433 gimple_seq_add_stmt (&new_body
,
8434 gimple_build_assign (new_var
, x
));
8437 else if (OMP_CLAUSE_CHAIN (c
)
8438 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
8440 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8441 == GOMP_MAP_FIRSTPRIVATE_POINTER
8442 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8443 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
8446 case OMP_CLAUSE_PRIVATE
:
8447 var
= OMP_CLAUSE_DECL (c
);
8448 if (is_variable_sized (var
))
8450 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8451 tree new_var
= lookup_decl (var
, ctx
);
8452 tree pvar
= DECL_VALUE_EXPR (var
);
8453 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8454 pvar
= TREE_OPERAND (pvar
, 0);
8455 gcc_assert (DECL_P (pvar
));
8456 tree new_pvar
= lookup_decl (pvar
, ctx
);
8457 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8458 tree al
= size_int (DECL_ALIGN (var
));
8459 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
8460 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8461 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
8462 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8463 gimple_seq_add_stmt (&new_body
,
8464 gimple_build_assign (new_pvar
, x
));
8466 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
8468 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8469 tree new_var
= lookup_decl (var
, ctx
);
8470 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8471 if (TREE_CONSTANT (x
))
8476 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8477 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
8478 tree al
= size_int (TYPE_ALIGN (rtype
));
8479 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8482 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8483 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8484 gimple_seq_add_stmt (&new_body
,
8485 gimple_build_assign (new_var
, x
));
8490 gimple_seq fork_seq
= NULL
;
8491 gimple_seq join_seq
= NULL
;
8493 if (is_oacc_parallel (ctx
))
8495 /* If there are reductions on the offloaded region itself, treat
8496 them as a dummy GANG loop. */
8497 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
8499 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
8500 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
8503 gimple_seq_add_seq (&new_body
, fork_seq
);
8504 gimple_seq_add_seq (&new_body
, tgt_body
);
8505 gimple_seq_add_seq (&new_body
, join_seq
);
8508 new_body
= maybe_catch_exception (new_body
);
8510 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
8511 gimple_omp_set_body (stmt
, new_body
);
8514 bind
= gimple_build_bind (NULL
, NULL
,
8515 tgt_bind
? gimple_bind_block (tgt_bind
)
8517 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
8518 gimple_bind_add_seq (bind
, ilist
);
8519 gimple_bind_add_stmt (bind
, stmt
);
8520 gimple_bind_add_seq (bind
, olist
);
8522 pop_gimplify_context (NULL
);
8526 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8527 gimple_bind_add_stmt (dep_bind
, bind
);
8528 gimple_bind_add_seq (dep_bind
, dep_olist
);
8529 pop_gimplify_context (dep_bind
);
8533 /* Expand code for an OpenMP teams directive. */
8536 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8538 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
8539 push_gimplify_context ();
8541 tree block
= make_node (BLOCK
);
8542 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
8543 gsi_replace (gsi_p
, bind
, true);
8544 gimple_seq bind_body
= NULL
;
8545 gimple_seq dlist
= NULL
;
8546 gimple_seq olist
= NULL
;
8548 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8549 OMP_CLAUSE_NUM_TEAMS
);
8550 if (num_teams
== NULL_TREE
)
8551 num_teams
= build_int_cst (unsigned_type_node
, 0);
8554 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
8555 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
8556 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
8558 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8559 OMP_CLAUSE_THREAD_LIMIT
);
8560 if (thread_limit
== NULL_TREE
)
8561 thread_limit
= build_int_cst (unsigned_type_node
, 0);
8564 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
8565 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
8566 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
8570 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
8571 &bind_body
, &dlist
, ctx
, NULL
);
8572 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
8573 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
8574 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8576 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
8577 location_t loc
= gimple_location (teams_stmt
);
8578 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
8579 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
8580 gimple_set_location (call
, loc
);
8581 gimple_seq_add_stmt (&bind_body
, call
);
8584 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
8585 gimple_omp_set_body (teams_stmt
, NULL
);
8586 gimple_seq_add_seq (&bind_body
, olist
);
8587 gimple_seq_add_seq (&bind_body
, dlist
);
8588 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8589 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
8590 gimple_bind_set_body (bind
, bind_body
);
8592 pop_gimplify_context (bind
);
8594 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8595 BLOCK_VARS (block
) = ctx
->block_vars
;
8596 if (BLOCK_VARS (block
))
8597 TREE_USED (block
) = 1;
8600 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8603 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8605 gimple
*stmt
= gsi_stmt (*gsi_p
);
8606 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8607 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
8608 gimple_build_omp_return (false));
8612 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8613 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8614 of OMP context, but with task_shared_vars set. */
8617 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
8622 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8623 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
8626 if (task_shared_vars
8628 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
8631 /* If a global variable has been privatized, TREE_CONSTANT on
8632 ADDR_EXPR might be wrong. */
8633 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
8634 recompute_tree_invariant_for_addr_expr (t
);
8636 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
8640 /* Data to be communicated between lower_omp_regimplify_operands and
8641 lower_omp_regimplify_operands_p. */
8643 struct lower_omp_regimplify_operands_data
8649 /* Helper function for lower_omp_regimplify_operands. Find
8650 omp_member_access_dummy_var vars and adjust temporarily their
8651 DECL_VALUE_EXPRs if needed. */
8654 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
8657 tree t
= omp_member_access_dummy_var (*tp
);
8660 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8661 lower_omp_regimplify_operands_data
*ldata
8662 = (lower_omp_regimplify_operands_data
*) wi
->info
;
8663 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
8666 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
8667 ldata
->decls
->safe_push (*tp
);
8668 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
8669 SET_DECL_VALUE_EXPR (*tp
, v
);
8672 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
8676 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8677 of omp_member_access_dummy_var vars during regimplification. */
8680 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
8681 gimple_stmt_iterator
*gsi_p
)
8683 auto_vec
<tree
, 10> decls
;
8686 struct walk_stmt_info wi
;
8687 memset (&wi
, '\0', sizeof (wi
));
8688 struct lower_omp_regimplify_operands_data data
;
8690 data
.decls
= &decls
;
8692 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
8694 gimple_regimplify_operands (stmt
, gsi_p
);
8695 while (!decls
.is_empty ())
8697 tree t
= decls
.pop ();
8698 tree v
= decls
.pop ();
8699 SET_DECL_VALUE_EXPR (t
, v
);
8704 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8706 gimple
*stmt
= gsi_stmt (*gsi_p
);
8707 struct walk_stmt_info wi
;
8710 if (gimple_has_location (stmt
))
8711 input_location
= gimple_location (stmt
);
8713 if (task_shared_vars
)
8714 memset (&wi
, '\0', sizeof (wi
));
8716 /* If we have issued syntax errors, avoid doing any heavy lifting.
8717 Just replace the OMP directives with a NOP to avoid
8718 confusing RTL expansion. */
8719 if (seen_error () && is_gimple_omp (stmt
))
8721 gsi_replace (gsi_p
, gimple_build_nop (), true);
8725 switch (gimple_code (stmt
))
8729 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
8730 if ((ctx
|| task_shared_vars
)
8731 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
8732 lower_omp_regimplify_p
,
8733 ctx
? NULL
: &wi
, NULL
)
8734 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
8735 lower_omp_regimplify_p
,
8736 ctx
? NULL
: &wi
, NULL
)))
8737 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
8741 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
8743 case GIMPLE_EH_FILTER
:
8744 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
8747 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
8748 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
8750 case GIMPLE_TRANSACTION
:
8751 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
8755 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
8756 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
8758 case GIMPLE_OMP_PARALLEL
:
8759 case GIMPLE_OMP_TASK
:
8760 ctx
= maybe_lookup_ctx (stmt
);
8762 if (ctx
->cancellable
)
8763 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8764 lower_omp_taskreg (gsi_p
, ctx
);
8766 case GIMPLE_OMP_FOR
:
8767 ctx
= maybe_lookup_ctx (stmt
);
8769 if (ctx
->cancellable
)
8770 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8771 lower_omp_for (gsi_p
, ctx
);
8773 case GIMPLE_OMP_SECTIONS
:
8774 ctx
= maybe_lookup_ctx (stmt
);
8776 if (ctx
->cancellable
)
8777 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8778 lower_omp_sections (gsi_p
, ctx
);
8780 case GIMPLE_OMP_SINGLE
:
8781 ctx
= maybe_lookup_ctx (stmt
);
8783 lower_omp_single (gsi_p
, ctx
);
8785 case GIMPLE_OMP_MASTER
:
8786 ctx
= maybe_lookup_ctx (stmt
);
8788 lower_omp_master (gsi_p
, ctx
);
8790 case GIMPLE_OMP_TASKGROUP
:
8791 ctx
= maybe_lookup_ctx (stmt
);
8793 lower_omp_taskgroup (gsi_p
, ctx
);
8795 case GIMPLE_OMP_ORDERED
:
8796 ctx
= maybe_lookup_ctx (stmt
);
8798 lower_omp_ordered (gsi_p
, ctx
);
8800 case GIMPLE_OMP_CRITICAL
:
8801 ctx
= maybe_lookup_ctx (stmt
);
8803 lower_omp_critical (gsi_p
, ctx
);
8805 case GIMPLE_OMP_ATOMIC_LOAD
:
8806 if ((ctx
|| task_shared_vars
)
8807 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8808 as_a
<gomp_atomic_load
*> (stmt
)),
8809 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
8810 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8812 case GIMPLE_OMP_TARGET
:
8813 ctx
= maybe_lookup_ctx (stmt
);
8815 lower_omp_target (gsi_p
, ctx
);
8817 case GIMPLE_OMP_TEAMS
:
8818 ctx
= maybe_lookup_ctx (stmt
);
8820 lower_omp_teams (gsi_p
, ctx
);
8822 case GIMPLE_OMP_GRID_BODY
:
8823 ctx
= maybe_lookup_ctx (stmt
);
8825 lower_omp_grid_body (gsi_p
, ctx
);
8829 call_stmt
= as_a
<gcall
*> (stmt
);
8830 fndecl
= gimple_call_fndecl (call_stmt
);
8832 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8833 switch (DECL_FUNCTION_CODE (fndecl
))
8835 case BUILT_IN_GOMP_BARRIER
:
8839 case BUILT_IN_GOMP_CANCEL
:
8840 case BUILT_IN_GOMP_CANCELLATION_POINT
:
8843 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
8845 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
8846 if (!cctx
->cancellable
)
8848 if (DECL_FUNCTION_CODE (fndecl
)
8849 == BUILT_IN_GOMP_CANCELLATION_POINT
)
8851 stmt
= gimple_build_nop ();
8852 gsi_replace (gsi_p
, stmt
, false);
8856 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
8858 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
8859 gimple_call_set_fndecl (call_stmt
, fndecl
);
8860 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
8863 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
8864 gimple_call_set_lhs (call_stmt
, lhs
);
8865 tree fallthru_label
;
8866 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8868 g
= gimple_build_label (fallthru_label
);
8869 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8870 g
= gimple_build_cond (NE_EXPR
, lhs
,
8871 fold_convert (TREE_TYPE (lhs
),
8872 boolean_false_node
),
8873 cctx
->cancel_label
, fallthru_label
);
8874 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8881 if ((ctx
|| task_shared_vars
)
8882 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
8885 /* Just remove clobbers, this should happen only if we have
8886 "privatized" local addressable variables in SIMD regions,
8887 the clobber isn't needed in that case and gimplifying address
8888 of the ARRAY_REF into a pointer and creating MEM_REF based
8889 clobber would create worse code than we get with the clobber
8891 if (gimple_clobber_p (stmt
))
8893 gsi_replace (gsi_p
, gimple_build_nop (), true);
8896 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8903 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
8905 location_t saved_location
= input_location
;
8906 gimple_stmt_iterator gsi
;
8907 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8908 lower_omp_1 (&gsi
, ctx
);
8909 /* During gimplification, we haven't folded statments inside offloading
8910 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8911 if (target_nesting_level
|| taskreg_nesting_level
)
8912 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8914 input_location
= saved_location
;
8917 /* Main entry point. */
8920 execute_lower_omp (void)
8926 /* This pass always runs, to provide PROP_gimple_lomp.
8927 But often, there is nothing to do. */
8928 if (flag_openacc
== 0 && flag_openmp
== 0
8929 && flag_openmp_simd
== 0)
8932 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
8933 delete_omp_context
);
8935 body
= gimple_body (current_function_decl
);
8937 if (hsa_gen_requested_p ())
8938 omp_grid_gridify_all_targets (&body
);
8940 scan_omp (&body
, NULL
);
8941 gcc_assert (taskreg_nesting_level
== 0);
8942 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
8943 finish_taskreg_scan (ctx
);
8944 taskreg_contexts
.release ();
8946 if (all_contexts
->root
)
8948 if (task_shared_vars
)
8949 push_gimplify_context ();
8950 lower_omp (&body
, NULL
);
8951 if (task_shared_vars
)
8952 pop_gimplify_context (NULL
);
8957 splay_tree_delete (all_contexts
);
8958 all_contexts
= NULL
;
8960 BITMAP_FREE (task_shared_vars
);
8962 /* If current function is a method, remove artificial dummy VAR_DECL created
8963 for non-static data member privatization, they aren't needed for
8964 debuginfo nor anything else, have been already replaced everywhere in the
8965 IL and cause problems with LTO. */
8966 if (DECL_ARGUMENTS (current_function_decl
)
8967 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
8968 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
8970 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
8976 const pass_data pass_data_lower_omp
=
8978 GIMPLE_PASS
, /* type */
8979 "omplower", /* name */
8980 OPTGROUP_OMP
, /* optinfo_flags */
8981 TV_NONE
, /* tv_id */
8982 PROP_gimple_any
, /* properties_required */
8983 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
8984 0, /* properties_destroyed */
8985 0, /* todo_flags_start */
8986 0, /* todo_flags_finish */
8989 class pass_lower_omp
: public gimple_opt_pass
8992 pass_lower_omp (gcc::context
*ctxt
)
8993 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
8996 /* opt_pass methods: */
8997 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
8999 }; // class pass_lower_omp
9004 make_pass_lower_omp (gcc::context
*ctxt
)
9006 return new pass_lower_omp (ctxt
);
9009 /* The following is a utility to diagnose structured block violations.
9010 It is not part of the "omplower" pass, as that's invoked too late. It
9011 should be invoked by the respective front ends after gimplification. */
9013 static splay_tree all_labels
;
9015 /* Check for mismatched contexts and generate an error if needed. Return
9016 true if an error is detected. */
9019 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
9020 gimple
*branch_ctx
, gimple
*label_ctx
)
9022 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
9023 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
9025 if (label_ctx
== branch_ctx
)
9028 const char* kind
= NULL
;
9032 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
9033 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
9035 gcc_checking_assert (kind
== NULL
);
9041 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
9045 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9046 so we could traverse it and issue a correct "exit" or "enter" error
9047 message upon a structured block violation.
9049 We built the context by building a list with tree_cons'ing, but there is
9050 no easy counterpart in gimple tuples. It seems like far too much work
9051 for issuing exit/enter error messages. If someone really misses the
9052 distinct error message... patches welcome. */
9055 /* Try to avoid confusing the user by producing and error message
9056 with correct "exit" or "enter" verbiage. We prefer "exit"
9057 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9058 if (branch_ctx
== NULL
)
9064 if (TREE_VALUE (label_ctx
) == branch_ctx
)
9069 label_ctx
= TREE_CHAIN (label_ctx
);
9074 error ("invalid exit from %s structured block", kind
);
9076 error ("invalid entry to %s structured block", kind
);
9079 /* If it's obvious we have an invalid entry, be specific about the error. */
9080 if (branch_ctx
== NULL
)
9081 error ("invalid entry to %s structured block", kind
);
9084 /* Otherwise, be vague and lazy, but efficient. */
9085 error ("invalid branch to/from %s structured block", kind
);
9088 gsi_replace (gsi_p
, gimple_build_nop (), false);
9092 /* Pass 1: Create a minimal tree of structured blocks, and record
9093 where each label is found. */
9096 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9097 struct walk_stmt_info
*wi
)
9099 gimple
*context
= (gimple
*) wi
->info
;
9100 gimple
*inner_context
;
9101 gimple
*stmt
= gsi_stmt (*gsi_p
);
9103 *handled_ops_p
= true;
9105 switch (gimple_code (stmt
))
9109 case GIMPLE_OMP_PARALLEL
:
9110 case GIMPLE_OMP_TASK
:
9111 case GIMPLE_OMP_SECTIONS
:
9112 case GIMPLE_OMP_SINGLE
:
9113 case GIMPLE_OMP_SECTION
:
9114 case GIMPLE_OMP_MASTER
:
9115 case GIMPLE_OMP_ORDERED
:
9116 case GIMPLE_OMP_CRITICAL
:
9117 case GIMPLE_OMP_TARGET
:
9118 case GIMPLE_OMP_TEAMS
:
9119 case GIMPLE_OMP_TASKGROUP
:
9120 /* The minimal context here is just the current OMP construct. */
9121 inner_context
= stmt
;
9122 wi
->info
= inner_context
;
9123 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9127 case GIMPLE_OMP_FOR
:
9128 inner_context
= stmt
;
9129 wi
->info
= inner_context
;
9130 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9132 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9133 diagnose_sb_1
, NULL
, wi
);
9134 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9139 splay_tree_insert (all_labels
,
9140 (splay_tree_key
) gimple_label_label (
9141 as_a
<glabel
*> (stmt
)),
9142 (splay_tree_value
) context
);
9152 /* Pass 2: Check each branch and see if its context differs from that of
9153 the destination label's context. */
9156 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9157 struct walk_stmt_info
*wi
)
9159 gimple
*context
= (gimple
*) wi
->info
;
9161 gimple
*stmt
= gsi_stmt (*gsi_p
);
9163 *handled_ops_p
= true;
9165 switch (gimple_code (stmt
))
9169 case GIMPLE_OMP_PARALLEL
:
9170 case GIMPLE_OMP_TASK
:
9171 case GIMPLE_OMP_SECTIONS
:
9172 case GIMPLE_OMP_SINGLE
:
9173 case GIMPLE_OMP_SECTION
:
9174 case GIMPLE_OMP_MASTER
:
9175 case GIMPLE_OMP_ORDERED
:
9176 case GIMPLE_OMP_CRITICAL
:
9177 case GIMPLE_OMP_TARGET
:
9178 case GIMPLE_OMP_TEAMS
:
9179 case GIMPLE_OMP_TASKGROUP
:
9181 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9185 case GIMPLE_OMP_FOR
:
9187 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9189 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
9190 diagnose_sb_2
, NULL
, wi
);
9191 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9197 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
9198 tree lab
= gimple_cond_true_label (cond_stmt
);
9201 n
= splay_tree_lookup (all_labels
,
9202 (splay_tree_key
) lab
);
9203 diagnose_sb_0 (gsi_p
, context
,
9204 n
? (gimple
*) n
->value
: NULL
);
9206 lab
= gimple_cond_false_label (cond_stmt
);
9209 n
= splay_tree_lookup (all_labels
,
9210 (splay_tree_key
) lab
);
9211 diagnose_sb_0 (gsi_p
, context
,
9212 n
? (gimple
*) n
->value
: NULL
);
9219 tree lab
= gimple_goto_dest (stmt
);
9220 if (TREE_CODE (lab
) != LABEL_DECL
)
9223 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9224 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
9230 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
9232 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
9234 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
9235 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9236 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
9243 diagnose_sb_0 (gsi_p
, context
, NULL
);
9254 diagnose_omp_structured_block_errors (void)
9256 struct walk_stmt_info wi
;
9257 gimple_seq body
= gimple_body (current_function_decl
);
9259 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
9261 memset (&wi
, 0, sizeof (wi
));
9262 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
9264 memset (&wi
, 0, sizeof (wi
));
9265 wi
.want_locations
= true;
9266 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
9268 gimple_set_body (current_function_decl
, body
);
9270 splay_tree_delete (all_labels
);
9278 const pass_data pass_data_diagnose_omp_blocks
=
9280 GIMPLE_PASS
, /* type */
9281 "*diagnose_omp_blocks", /* name */
9282 OPTGROUP_OMP
, /* optinfo_flags */
9283 TV_NONE
, /* tv_id */
9284 PROP_gimple_any
, /* properties_required */
9285 0, /* properties_provided */
9286 0, /* properties_destroyed */
9287 0, /* todo_flags_start */
9288 0, /* todo_flags_finish */
9291 class pass_diagnose_omp_blocks
: public gimple_opt_pass
9294 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9295 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
9298 /* opt_pass methods: */
9299 virtual bool gate (function
*)
9301 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
9303 virtual unsigned int execute (function
*)
9305 return diagnose_omp_structured_block_errors ();
9308 }; // class pass_diagnose_omp_blocks
9313 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9315 return new pass_diagnose_omp_blocks (ctxt
);
9319 #include "gt-omp-low.h"