1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
122 /* True if this parallel directive is nested within another. */
125 /* True if this construct can be cancelled. */
129 static splay_tree all_contexts
;
130 static int taskreg_nesting_level
;
131 static int target_nesting_level
;
132 static bitmap task_shared_vars
;
133 static vec
<omp_context
*> taskreg_contexts
;
135 static void scan_omp (gimple_seq
*, omp_context
*);
136 static tree
scan_omp_1_op (tree
*, int *, void *);
138 #define WALK_SUBSTMTS \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
148 /* Return true if CTX corresponds to an oacc parallel region. */
151 is_oacc_parallel (omp_context
*ctx
)
153 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
154 return ((outer_type
== GIMPLE_OMP_TARGET
)
155 && (gimple_omp_target_kind (ctx
->stmt
)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
159 /* Return true if CTX corresponds to an oacc kernels region. */
162 is_oacc_kernels (omp_context
*ctx
)
164 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
165 return ((outer_type
== GIMPLE_OMP_TARGET
)
166 && (gimple_omp_target_kind (ctx
->stmt
)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
175 omp_member_access_dummy_var (tree decl
)
178 || !DECL_ARTIFICIAL (decl
)
179 || !DECL_IGNORED_P (decl
)
180 || !DECL_HAS_VALUE_EXPR_P (decl
)
181 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
184 tree v
= DECL_VALUE_EXPR (decl
);
185 if (TREE_CODE (v
) != COMPONENT_REF
)
189 switch (TREE_CODE (v
))
195 case POINTER_PLUS_EXPR
:
196 v
= TREE_OPERAND (v
, 0);
199 if (DECL_CONTEXT (v
) == current_function_decl
200 && DECL_ARTIFICIAL (v
)
201 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
209 /* Helper for unshare_and_remap, called through walk_tree. */
212 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
214 tree
*pair
= (tree
*) data
;
217 *tp
= unshare_expr (pair
[1]);
220 else if (IS_TYPE_OR_DECL_P (*tp
))
225 /* Return unshare_expr (X) with all occurrences of FROM
229 unshare_and_remap (tree x
, tree from
, tree to
)
231 tree pair
[2] = { from
, to
};
232 x
= unshare_expr (x
);
233 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
240 scan_omp_op (tree
*tp
, omp_context
*ctx
)
242 struct walk_stmt_info wi
;
244 memset (&wi
, 0, sizeof (wi
));
246 wi
.want_locations
= true;
248 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
251 static void lower_omp (gimple_seq
*, omp_context
*);
252 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
253 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
255 /* Return true if CTX is for an omp parallel. */
258 is_parallel_ctx (omp_context
*ctx
)
260 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
264 /* Return true if CTX is for an omp task. */
267 is_task_ctx (omp_context
*ctx
)
269 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
273 /* Return true if CTX is for an omp taskloop. */
276 is_taskloop_ctx (omp_context
*ctx
)
278 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
283 /* Return true if CTX is for an omp parallel or omp task. */
286 is_taskreg_ctx (omp_context
*ctx
)
288 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
);
291 /* Return true if EXPR is variable sized. */
294 is_variable_sized (const_tree expr
)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
304 lookup_decl (tree var
, omp_context
*ctx
)
306 tree
*n
= ctx
->cb
.decl_map
->get (var
);
311 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
313 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
314 return n
? *n
: NULL_TREE
;
318 lookup_field (tree var
, omp_context
*ctx
)
321 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
322 return (tree
) n
->value
;
326 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
329 n
= splay_tree_lookup (ctx
->sfield_map
330 ? ctx
->sfield_map
: ctx
->field_map
, key
);
331 return (tree
) n
->value
;
335 lookup_sfield (tree var
, omp_context
*ctx
)
337 return lookup_sfield ((splay_tree_key
) var
, ctx
);
341 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
344 n
= splay_tree_lookup (ctx
->field_map
, key
);
345 return n
? (tree
) n
->value
: NULL_TREE
;
349 maybe_lookup_field (tree var
, omp_context
*ctx
)
351 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
358 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
361 || TYPE_ATOMIC (TREE_TYPE (decl
)))
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
384 /* Do not use copy-in/copy-out for variables that have their
386 if (TREE_ADDRESSABLE (decl
))
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
391 if (TREE_READONLY (decl
)
392 || ((TREE_CODE (decl
) == RESULT_DECL
393 || TREE_CODE (decl
) == PARM_DECL
)
394 && DECL_BY_REFERENCE (decl
)))
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx
->is_nested
)
406 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
407 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
414 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
415 c
; c
= OMP_CLAUSE_CHAIN (c
))
416 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c
) == decl
)
421 goto maybe_mark_addressable_and_ret
;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx
))
431 maybe_mark_addressable_and_ret
:
432 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
433 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
438 if (!task_shared_vars
)
439 task_shared_vars
= BITMAP_ALLOC (NULL
);
440 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
441 TREE_ADDRESSABLE (outer
) = 1;
450 /* Construct a new automatic decl similar to VAR. */
453 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
455 tree copy
= copy_var_decl (var
, name
, type
);
457 DECL_CONTEXT (copy
) = current_function_decl
;
458 DECL_CHAIN (copy
) = ctx
->block_vars
;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
463 if (TREE_ADDRESSABLE (var
)
465 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
466 TREE_ADDRESSABLE (copy
) = 0;
467 ctx
->block_vars
= copy
;
473 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
475 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 omp_build_component_ref (tree obj
, tree field
)
483 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
484 if (TREE_THIS_VOLATILE (field
))
485 TREE_THIS_VOLATILE (ret
) |= 1;
486 if (TREE_READONLY (field
))
487 TREE_READONLY (ret
) |= 1;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
494 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
496 tree x
, field
= lookup_field (var
, ctx
);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x
= maybe_lookup_field (field
, ctx
);
504 x
= build_simple_mem_ref (ctx
->receiver_decl
);
505 TREE_THIS_NOTRAP (x
) = 1;
506 x
= omp_build_component_ref (x
, field
);
509 x
= build_simple_mem_ref (x
);
510 TREE_THIS_NOTRAP (x
) = 1;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
521 build_outer_var_ref (tree var
, omp_context
*ctx
,
522 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
528 else if (is_variable_sized (var
))
530 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
531 x
= build_outer_var_ref (x
, ctx
, code
);
532 x
= build_simple_mem_ref (x
);
534 else if (is_taskreg_ctx (ctx
))
536 bool by_ref
= use_pointer_for_field (var
, NULL
);
537 x
= build_receiver_ref (var
, by_ref
, ctx
);
539 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
541 || (code
== OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
543 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
551 if (ctx
->outer
&& is_taskreg_ctx (ctx
))
552 x
= lookup_decl (var
, ctx
->outer
);
554 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
558 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
560 gcc_assert (ctx
->outer
);
562 = splay_tree_lookup (ctx
->outer
->field_map
,
563 (splay_tree_key
) &DECL_UID (var
));
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
->outer
)))
569 x
= lookup_decl (var
, ctx
->outer
);
573 tree field
= (tree
) n
->value
;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x
= maybe_lookup_field (field
, ctx
->outer
);
580 x
= build_simple_mem_ref (ctx
->outer
->receiver_decl
);
581 x
= omp_build_component_ref (x
, field
);
582 if (use_pointer_for_field (var
, ctx
->outer
))
583 x
= build_simple_mem_ref (x
);
588 omp_context
*outer
= ctx
->outer
;
589 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
591 outer
= outer
->outer
;
593 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
595 x
= lookup_decl (var
, outer
);
597 else if (omp_is_reference (var
))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
601 else if (omp_member_access_dummy_var (var
))
608 tree t
= omp_member_access_dummy_var (var
);
611 x
= DECL_VALUE_EXPR (var
);
612 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
614 x
= unshare_and_remap (x
, t
, o
);
616 x
= unshare_expr (x
);
620 if (omp_is_reference (var
))
621 x
= build_simple_mem_ref (x
);
626 /* Build tree nodes to access the field for VAR on the sender side. */
629 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
631 tree field
= lookup_sfield (key
, ctx
);
632 return omp_build_component_ref (ctx
->sender_decl
, field
);
636 build_sender_ref (tree var
, omp_context
*ctx
)
638 return build_sender_ref ((splay_tree_key
) var
, ctx
);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
645 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
,
646 bool base_pointers_restrict
= false)
648 tree field
, type
, sfield
= NULL_TREE
;
649 splay_tree_key key
= (splay_tree_key
) var
;
653 key
= (splay_tree_key
) &DECL_UID (var
);
654 gcc_checking_assert (key
!= (splay_tree_key
) var
);
656 gcc_assert ((mask
& 1) == 0
657 || !splay_tree_lookup (ctx
->field_map
, key
));
658 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
659 || !splay_tree_lookup (ctx
->sfield_map
, key
));
660 gcc_assert ((mask
& 3) == 3
661 || !is_gimple_omp_oacc (ctx
->stmt
));
663 type
= TREE_TYPE (var
);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type
)
668 && TYPE_RESTRICT (type
))
669 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
673 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
674 type
= build_pointer_type (build_pointer_type (type
));
678 type
= build_pointer_type (type
);
679 if (base_pointers_restrict
)
680 type
= build_qualified_type (type
, TYPE_QUAL_RESTRICT
);
682 else if ((mask
& 3) == 1 && omp_is_reference (var
))
683 type
= TREE_TYPE (type
);
685 field
= build_decl (DECL_SOURCE_LOCATION (var
),
686 FIELD_DECL
, DECL_NAME (var
), type
);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field
) = var
;
692 if (type
== TREE_TYPE (var
))
694 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
695 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
696 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
699 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
703 insert_field_into_struct (ctx
->record_type
, field
);
704 if (ctx
->srecord_type
)
706 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
707 FIELD_DECL
, DECL_NAME (var
), type
);
708 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
709 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
710 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
711 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
712 insert_field_into_struct (ctx
->srecord_type
, sfield
);
717 if (ctx
->srecord_type
== NULL_TREE
)
721 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
722 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
723 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
725 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
726 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
727 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
728 insert_field_into_struct (ctx
->srecord_type
, sfield
);
729 splay_tree_insert (ctx
->sfield_map
,
730 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
731 (splay_tree_value
) sfield
);
735 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
736 : ctx
->srecord_type
, field
);
740 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
741 if ((mask
& 2) && ctx
->sfield_map
)
742 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
746 install_var_local (tree var
, omp_context
*ctx
)
748 tree new_var
= omp_copy_decl_1 (var
, ctx
);
749 insert_decl_map (&ctx
->cb
, var
, new_var
);
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
757 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
761 new_decl
= lookup_decl (decl
, ctx
);
763 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
766 && DECL_HAS_VALUE_EXPR_P (decl
))
768 tree ve
= DECL_VALUE_EXPR (decl
);
769 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
770 SET_DECL_VALUE_EXPR (new_decl
, ve
);
771 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
776 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
777 if (size
== error_mark_node
)
778 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
779 DECL_SIZE (new_decl
) = size
;
781 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
782 if (size
== error_mark_node
)
783 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
784 DECL_SIZE_UNIT (new_decl
) = size
;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
794 omp_copy_decl (tree var
, copy_body_data
*cb
)
796 omp_context
*ctx
= (omp_context
*) cb
;
799 if (TREE_CODE (var
) == LABEL_DECL
)
801 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
803 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
804 DECL_CONTEXT (new_var
) = current_function_decl
;
805 insert_decl_map (&ctx
->cb
, var
, new_var
);
809 while (!is_taskreg_ctx (ctx
))
814 new_var
= maybe_lookup_decl (var
, ctx
);
819 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
822 return error_mark_node
;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
828 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
830 omp_context
*ctx
= XCNEW (omp_context
);
832 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
833 (splay_tree_value
) ctx
);
838 ctx
->outer
= outer_ctx
;
839 ctx
->cb
= outer_ctx
->cb
;
840 ctx
->cb
.block
= NULL
;
841 ctx
->depth
= outer_ctx
->depth
+ 1;
845 ctx
->cb
.src_fn
= current_function_decl
;
846 ctx
->cb
.dst_fn
= current_function_decl
;
847 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
848 gcc_checking_assert (ctx
->cb
.src_node
);
849 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
850 ctx
->cb
.src_cfun
= cfun
;
851 ctx
->cb
.copy_decl
= omp_copy_decl
;
852 ctx
->cb
.eh_lp_nr
= 0;
853 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
857 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
862 static gimple_seq
maybe_catch_exception (gimple_seq
);
864 /* Finalize task copyfn. */
867 finalize_task_copyfn (gomp_task
*task_stmt
)
869 struct function
*child_cfun
;
871 gimple_seq seq
= NULL
, new_seq
;
874 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
875 if (child_fn
== NULL_TREE
)
878 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
879 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
881 push_cfun (child_cfun
);
882 bind
= gimplify_body (child_fn
, false);
883 gimple_seq_add_stmt (&seq
, bind
);
884 new_seq
= maybe_catch_exception (seq
);
887 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
889 gimple_seq_add_stmt (&seq
, bind
);
891 gimple_set_body (child_fn
, seq
);
894 /* Inform the callgraph about the new function. */
895 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
896 node
->parallelized_function
= 1;
897 cgraph_node::add_new_function (child_fn
, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
904 delete_omp_context (splay_tree_value value
)
906 omp_context
*ctx
= (omp_context
*) value
;
908 delete ctx
->cb
.decl_map
;
911 splay_tree_delete (ctx
->field_map
);
913 splay_tree_delete (ctx
->sfield_map
);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx
->record_type
)
920 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
921 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
923 if (ctx
->srecord_type
)
926 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
927 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
930 if (is_task_ctx (ctx
))
931 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
940 fixup_child_record_type (omp_context
*ctx
)
942 tree f
, type
= ctx
->record_type
;
944 if (!ctx
->receiver_decl
)
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
951 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
955 tree name
, new_fields
= NULL
;
957 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
958 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
959 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
960 TYPE_DECL
, name
, type
);
961 TYPE_NAME (type
) = name
;
963 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
965 tree new_f
= copy_node (f
);
966 DECL_CONTEXT (new_f
) = type
;
967 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
968 DECL_CHAIN (new_f
) = new_fields
;
969 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
970 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
972 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
979 (splay_tree_value
) new_f
);
981 TYPE_FIELDS (type
) = nreverse (new_fields
);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx
->stmt
))
988 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
990 TREE_TYPE (ctx
->receiver_decl
)
991 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
999 scan_sharing_clauses (tree clauses
, omp_context
*ctx
,
1000 bool base_pointers_restrict
= false)
1003 bool scan_array_reductions
= false;
1005 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1009 switch (OMP_CLAUSE_CODE (c
))
1011 case OMP_CLAUSE_PRIVATE
:
1012 decl
= OMP_CLAUSE_DECL (c
);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1015 else if (!is_variable_sized (decl
))
1016 install_var_local (decl
, ctx
);
1019 case OMP_CLAUSE_SHARED
:
1020 decl
= OMP_CLAUSE_DECL (c
);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1027 if (is_global_var (odecl
))
1029 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1032 gcc_assert (is_taskreg_ctx (ctx
));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1034 || !is_variable_sized (decl
));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1041 use_pointer_for_field (decl
, ctx
);
1044 by_ref
= use_pointer_for_field (decl
, NULL
);
1045 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1046 || TREE_ADDRESSABLE (decl
)
1048 || omp_is_reference (decl
))
1050 by_ref
= use_pointer_for_field (decl
, ctx
);
1051 install_var_field (decl
, by_ref
, 3, ctx
);
1052 install_var_local (decl
, ctx
);
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1059 case OMP_CLAUSE_REDUCTION
:
1060 decl
= OMP_CLAUSE_DECL (c
);
1061 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl
) == MEM_REF
)
1064 tree t
= TREE_OPERAND (decl
, 0);
1065 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1066 t
= TREE_OPERAND (t
, 0);
1067 if (TREE_CODE (t
) == INDIRECT_REF
1068 || TREE_CODE (t
) == ADDR_EXPR
)
1069 t
= TREE_OPERAND (t
, 0);
1070 install_var_local (t
, ctx
);
1071 if (is_taskreg_ctx (ctx
)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1073 && !is_variable_sized (t
))
1075 by_ref
= use_pointer_for_field (t
, ctx
);
1076 install_var_field (t
, by_ref
, 3, ctx
);
1082 case OMP_CLAUSE_LASTPRIVATE
:
1083 /* Let the corresponding firstprivate clause create
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1089 case OMP_CLAUSE_FIRSTPRIVATE
:
1090 case OMP_CLAUSE_LINEAR
:
1091 decl
= OMP_CLAUSE_DECL (c
);
1093 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1095 && is_gimple_omp_offloaded (ctx
->stmt
))
1097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1098 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1099 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1100 install_var_field (decl
, true, 3, ctx
);
1102 install_var_field (decl
, false, 3, ctx
);
1104 if (is_variable_sized (decl
))
1106 if (is_task_ctx (ctx
))
1107 install_var_field (decl
, false, 1, ctx
);
1110 else if (is_taskreg_ctx (ctx
))
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1114 by_ref
= use_pointer_for_field (decl
, NULL
);
1116 if (is_task_ctx (ctx
)
1117 && (global
|| by_ref
|| omp_is_reference (decl
)))
1119 install_var_field (decl
, false, 1, ctx
);
1121 install_var_field (decl
, by_ref
, 2, ctx
);
1124 install_var_field (decl
, by_ref
, 3, ctx
);
1126 install_var_local (decl
, ctx
);
1129 case OMP_CLAUSE_USE_DEVICE_PTR
:
1130 decl
= OMP_CLAUSE_DECL (c
);
1131 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1132 install_var_field (decl
, true, 3, ctx
);
1134 install_var_field (decl
, false, 3, ctx
);
1135 if (DECL_SIZE (decl
)
1136 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1138 tree decl2
= DECL_VALUE_EXPR (decl
);
1139 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1140 decl2
= TREE_OPERAND (decl2
, 0);
1141 gcc_assert (DECL_P (decl2
));
1142 install_var_local (decl2
, ctx
);
1144 install_var_local (decl
, ctx
);
1147 case OMP_CLAUSE_IS_DEVICE_PTR
:
1148 decl
= OMP_CLAUSE_DECL (c
);
1151 case OMP_CLAUSE__LOOPTEMP_
:
1152 gcc_assert (is_taskreg_ctx (ctx
));
1153 decl
= OMP_CLAUSE_DECL (c
);
1154 install_var_field (decl
, false, 3, ctx
);
1155 install_var_local (decl
, ctx
);
1158 case OMP_CLAUSE_COPYPRIVATE
:
1159 case OMP_CLAUSE_COPYIN
:
1160 decl
= OMP_CLAUSE_DECL (c
);
1161 by_ref
= use_pointer_for_field (decl
, NULL
);
1162 install_var_field (decl
, by_ref
, 3, ctx
);
1165 case OMP_CLAUSE_FINAL
:
1167 case OMP_CLAUSE_NUM_THREADS
:
1168 case OMP_CLAUSE_NUM_TEAMS
:
1169 case OMP_CLAUSE_THREAD_LIMIT
:
1170 case OMP_CLAUSE_DEVICE
:
1171 case OMP_CLAUSE_SCHEDULE
:
1172 case OMP_CLAUSE_DIST_SCHEDULE
:
1173 case OMP_CLAUSE_DEPEND
:
1174 case OMP_CLAUSE_PRIORITY
:
1175 case OMP_CLAUSE_GRAINSIZE
:
1176 case OMP_CLAUSE_NUM_TASKS
:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1178 case OMP_CLAUSE_NUM_GANGS
:
1179 case OMP_CLAUSE_NUM_WORKERS
:
1180 case OMP_CLAUSE_VECTOR_LENGTH
:
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1186 case OMP_CLAUSE_FROM
:
1187 case OMP_CLAUSE_MAP
:
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1190 decl
= OMP_CLAUSE_DECL (c
);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1197 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c
)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1200 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1202 && varpool_node::get_create (decl
)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl
)))
1206 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c
)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1221 if (TREE_CODE (decl
) == COMPONENT_REF
1222 || (TREE_CODE (decl
) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1225 == REFERENCE_TYPE
)))
1227 if (DECL_SIZE (decl
)
1228 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1230 tree decl2
= DECL_VALUE_EXPR (decl
);
1231 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1232 decl2
= TREE_OPERAND (decl2
, 0);
1233 gcc_assert (DECL_P (decl2
));
1234 install_var_local (decl2
, ctx
);
1236 install_var_local (decl
, ctx
);
1241 if (DECL_SIZE (decl
)
1242 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1244 tree decl2
= DECL_VALUE_EXPR (decl
);
1245 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1246 decl2
= TREE_OPERAND (decl2
, 0);
1247 gcc_assert (DECL_P (decl2
));
1248 install_var_field (decl2
, true, 3, ctx
);
1249 install_var_local (decl2
, ctx
);
1250 install_var_local (decl
, ctx
);
1254 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1257 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1258 install_var_field (decl
, true, 7, ctx
);
1260 install_var_field (decl
, true, 3, ctx
,
1261 base_pointers_restrict
);
1262 if (is_gimple_omp_offloaded (ctx
->stmt
)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1264 install_var_local (decl
, ctx
);
1269 tree base
= get_base_address (decl
);
1270 tree nc
= OMP_CLAUSE_CHAIN (c
);
1273 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc
) == base
1275 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1285 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1286 decl
= OMP_CLAUSE_DECL (c
);
1288 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1289 (splay_tree_key
) decl
));
1291 = build_decl (OMP_CLAUSE_LOCATION (c
),
1292 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1293 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1294 insert_field_into_struct (ctx
->record_type
, field
);
1295 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1296 (splay_tree_value
) field
);
1301 case OMP_CLAUSE__GRIDDIM_
:
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1309 case OMP_CLAUSE_NOWAIT
:
1310 case OMP_CLAUSE_ORDERED
:
1311 case OMP_CLAUSE_COLLAPSE
:
1312 case OMP_CLAUSE_UNTIED
:
1313 case OMP_CLAUSE_MERGEABLE
:
1314 case OMP_CLAUSE_PROC_BIND
:
1315 case OMP_CLAUSE_SAFELEN
:
1316 case OMP_CLAUSE_SIMDLEN
:
1317 case OMP_CLAUSE_THREADS
:
1318 case OMP_CLAUSE_SIMD
:
1319 case OMP_CLAUSE_NOGROUP
:
1320 case OMP_CLAUSE_DEFAULTMAP
:
1321 case OMP_CLAUSE_ASYNC
:
1322 case OMP_CLAUSE_WAIT
:
1323 case OMP_CLAUSE_GANG
:
1324 case OMP_CLAUSE_WORKER
:
1325 case OMP_CLAUSE_VECTOR
:
1326 case OMP_CLAUSE_INDEPENDENT
:
1327 case OMP_CLAUSE_AUTO
:
1328 case OMP_CLAUSE_SEQ
:
1329 case OMP_CLAUSE_TILE
:
1330 case OMP_CLAUSE__SIMT_
:
1331 case OMP_CLAUSE_DEFAULT
:
1334 case OMP_CLAUSE_ALIGNED
:
1335 decl
= OMP_CLAUSE_DECL (c
);
1336 if (is_global_var (decl
)
1337 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1338 install_var_local (decl
, ctx
);
1341 case OMP_CLAUSE__CACHE_
:
1347 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1349 switch (OMP_CLAUSE_CODE (c
))
1351 case OMP_CLAUSE_LASTPRIVATE
:
1352 /* Let the corresponding firstprivate clause create
1354 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1355 scan_array_reductions
= true;
1356 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1360 case OMP_CLAUSE_FIRSTPRIVATE
:
1361 case OMP_CLAUSE_PRIVATE
:
1362 case OMP_CLAUSE_LINEAR
:
1363 case OMP_CLAUSE_IS_DEVICE_PTR
:
1364 decl
= OMP_CLAUSE_DECL (c
);
1365 if (is_variable_sized (decl
))
1367 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1368 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1369 && is_gimple_omp_offloaded (ctx
->stmt
))
1371 tree decl2
= DECL_VALUE_EXPR (decl
);
1372 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1373 decl2
= TREE_OPERAND (decl2
, 0);
1374 gcc_assert (DECL_P (decl2
));
1375 install_var_local (decl2
, ctx
);
1376 fixup_remapped_decl (decl2
, ctx
, false);
1378 install_var_local (decl
, ctx
);
1380 fixup_remapped_decl (decl
, ctx
,
1381 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1382 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1383 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1384 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1385 scan_array_reductions
= true;
1388 case OMP_CLAUSE_REDUCTION
:
1389 decl
= OMP_CLAUSE_DECL (c
);
1390 if (TREE_CODE (decl
) != MEM_REF
)
1392 if (is_variable_sized (decl
))
1393 install_var_local (decl
, ctx
);
1394 fixup_remapped_decl (decl
, ctx
, false);
1396 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1397 scan_array_reductions
= true;
1400 case OMP_CLAUSE_SHARED
:
1401 /* Ignore shared directives in teams construct. */
1402 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1404 decl
= OMP_CLAUSE_DECL (c
);
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1407 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1409 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1412 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1413 install_var_field (decl
, by_ref
, 11, ctx
);
1416 fixup_remapped_decl (decl
, ctx
, false);
1419 case OMP_CLAUSE_MAP
:
1420 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1422 decl
= OMP_CLAUSE_DECL (c
);
1424 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1425 && (OMP_CLAUSE_MAP_KIND (c
)
1426 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1427 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1428 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1429 && varpool_node::get_create (decl
)->offloadable
)
1433 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1434 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1435 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1436 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1438 tree new_decl
= lookup_decl (decl
, ctx
);
1439 TREE_TYPE (new_decl
)
1440 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1442 else if (DECL_SIZE (decl
)
1443 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1445 tree decl2
= DECL_VALUE_EXPR (decl
);
1446 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1447 decl2
= TREE_OPERAND (decl2
, 0);
1448 gcc_assert (DECL_P (decl2
));
1449 fixup_remapped_decl (decl2
, ctx
, false);
1450 fixup_remapped_decl (decl
, ctx
, true);
1453 fixup_remapped_decl (decl
, ctx
, false);
1457 case OMP_CLAUSE_COPYPRIVATE
:
1458 case OMP_CLAUSE_COPYIN
:
1459 case OMP_CLAUSE_DEFAULT
:
1461 case OMP_CLAUSE_NUM_THREADS
:
1462 case OMP_CLAUSE_NUM_TEAMS
:
1463 case OMP_CLAUSE_THREAD_LIMIT
:
1464 case OMP_CLAUSE_DEVICE
:
1465 case OMP_CLAUSE_SCHEDULE
:
1466 case OMP_CLAUSE_DIST_SCHEDULE
:
1467 case OMP_CLAUSE_NOWAIT
:
1468 case OMP_CLAUSE_ORDERED
:
1469 case OMP_CLAUSE_COLLAPSE
:
1470 case OMP_CLAUSE_UNTIED
:
1471 case OMP_CLAUSE_FINAL
:
1472 case OMP_CLAUSE_MERGEABLE
:
1473 case OMP_CLAUSE_PROC_BIND
:
1474 case OMP_CLAUSE_SAFELEN
:
1475 case OMP_CLAUSE_SIMDLEN
:
1476 case OMP_CLAUSE_ALIGNED
:
1477 case OMP_CLAUSE_DEPEND
:
1478 case OMP_CLAUSE__LOOPTEMP_
:
1480 case OMP_CLAUSE_FROM
:
1481 case OMP_CLAUSE_PRIORITY
:
1482 case OMP_CLAUSE_GRAINSIZE
:
1483 case OMP_CLAUSE_NUM_TASKS
:
1484 case OMP_CLAUSE_THREADS
:
1485 case OMP_CLAUSE_SIMD
:
1486 case OMP_CLAUSE_NOGROUP
:
1487 case OMP_CLAUSE_DEFAULTMAP
:
1488 case OMP_CLAUSE_USE_DEVICE_PTR
:
1489 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1490 case OMP_CLAUSE_ASYNC
:
1491 case OMP_CLAUSE_WAIT
:
1492 case OMP_CLAUSE_NUM_GANGS
:
1493 case OMP_CLAUSE_NUM_WORKERS
:
1494 case OMP_CLAUSE_VECTOR_LENGTH
:
1495 case OMP_CLAUSE_GANG
:
1496 case OMP_CLAUSE_WORKER
:
1497 case OMP_CLAUSE_VECTOR
:
1498 case OMP_CLAUSE_INDEPENDENT
:
1499 case OMP_CLAUSE_AUTO
:
1500 case OMP_CLAUSE_SEQ
:
1501 case OMP_CLAUSE_TILE
:
1502 case OMP_CLAUSE__GRIDDIM_
:
1503 case OMP_CLAUSE__SIMT_
:
1506 case OMP_CLAUSE__CACHE_
:
1512 gcc_checking_assert (!scan_array_reductions
1513 || !is_gimple_omp_oacc (ctx
->stmt
));
1514 if (scan_array_reductions
)
1516 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1517 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1518 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1521 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1523 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1524 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1525 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1526 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1527 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1528 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1532 /* Create a new name for omp child function. Returns an identifier. If
1533 IS_CILK_FOR is true then the suffix for the child function is
1537 create_omp_child_function_name (bool task_copy
, bool is_cilk_for
)
1540 return clone_function_name (current_function_decl
, "_cilk_for_fn");
1541 return clone_function_name (current_function_decl
,
1542 task_copy
? "_omp_cpyfn" : "_omp_fn");
1545 /* Returns the type of the induction variable for the child function for
1546 _Cilk_for and the types for _high and _low variables based on TYPE. */
1549 cilk_for_check_loop_diff_type (tree type
)
1551 if (TYPE_PRECISION (type
) <= TYPE_PRECISION (uint32_type_node
))
1553 if (TYPE_UNSIGNED (type
))
1554 return uint32_type_node
;
1556 return integer_type_node
;
1560 if (TYPE_UNSIGNED (type
))
1561 return uint64_type_node
;
1563 return long_long_integer_type_node
;
1567 /* Return true if CTX may belong to offloaded code: either if current function
1568 is offloaded, or any enclosing context corresponds to a target region. */
1571 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1573 if (cgraph_node::get (current_function_decl
)->offloadable
)
1575 for (; ctx
; ctx
= ctx
->outer
)
1576 if (is_gimple_omp_offloaded (ctx
->stmt
))
1581 /* Build a decl for the omp child function. It'll not contain a body
1582 yet, just the bare decl. */
1585 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1587 tree decl
, type
, name
, t
;
1590 = (flag_cilkplus
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1591 ? omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
1592 OMP_CLAUSE__CILK_FOR_COUNT_
) : NULL_TREE
;
1593 tree cilk_var_type
= NULL_TREE
;
1595 name
= create_omp_child_function_name (task_copy
,
1596 cilk_for_count
!= NULL_TREE
);
1598 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1599 ptr_type_node
, NULL_TREE
);
1600 else if (cilk_for_count
)
1602 type
= TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count
, 0));
1603 cilk_var_type
= cilk_for_check_loop_diff_type (type
);
1604 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1605 cilk_var_type
, cilk_var_type
, NULL_TREE
);
1608 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1610 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1612 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1615 ctx
->cb
.dst_fn
= decl
;
1617 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1619 TREE_STATIC (decl
) = 1;
1620 TREE_USED (decl
) = 1;
1621 DECL_ARTIFICIAL (decl
) = 1;
1622 DECL_IGNORED_P (decl
) = 0;
1623 TREE_PUBLIC (decl
) = 0;
1624 DECL_UNINLINABLE (decl
) = 1;
1625 DECL_EXTERNAL (decl
) = 0;
1626 DECL_CONTEXT (decl
) = NULL_TREE
;
1627 DECL_INITIAL (decl
) = make_node (BLOCK
);
1628 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1629 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1630 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1631 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1632 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1633 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1634 DECL_FUNCTION_VERSIONED (decl
)
1635 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1637 if (omp_maybe_offloaded_ctx (ctx
))
1639 cgraph_node::get_create (decl
)->offloadable
= 1;
1640 if (ENABLE_OFFLOADING
)
1641 g
->have_offload
= true;
1644 if (cgraph_node::get_create (decl
)->offloadable
1645 && !lookup_attribute ("omp declare target",
1646 DECL_ATTRIBUTES (current_function_decl
)))
1648 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1649 ? "omp target entrypoint"
1650 : "omp declare target");
1651 DECL_ATTRIBUTES (decl
)
1652 = tree_cons (get_identifier (target_attr
),
1653 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1656 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1657 RESULT_DECL
, NULL_TREE
, void_type_node
);
1658 DECL_ARTIFICIAL (t
) = 1;
1659 DECL_IGNORED_P (t
) = 1;
1660 DECL_CONTEXT (t
) = decl
;
1661 DECL_RESULT (decl
) = t
;
1663 /* _Cilk_for's child function requires two extra parameters called
1664 __low and __high that are set the by Cilk runtime when it calls this
1668 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1669 PARM_DECL
, get_identifier ("__high"), cilk_var_type
);
1670 DECL_ARTIFICIAL (t
) = 1;
1671 DECL_NAMELESS (t
) = 1;
1672 DECL_ARG_TYPE (t
) = ptr_type_node
;
1673 DECL_CONTEXT (t
) = current_function_decl
;
1675 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1676 DECL_ARGUMENTS (decl
) = t
;
1678 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1679 PARM_DECL
, get_identifier ("__low"), cilk_var_type
);
1680 DECL_ARTIFICIAL (t
) = 1;
1681 DECL_NAMELESS (t
) = 1;
1682 DECL_ARG_TYPE (t
) = ptr_type_node
;
1683 DECL_CONTEXT (t
) = current_function_decl
;
1685 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1686 DECL_ARGUMENTS (decl
) = t
;
1689 tree data_name
= get_identifier (".omp_data_i");
1690 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1692 DECL_ARTIFICIAL (t
) = 1;
1693 DECL_NAMELESS (t
) = 1;
1694 DECL_ARG_TYPE (t
) = ptr_type_node
;
1695 DECL_CONTEXT (t
) = current_function_decl
;
1697 TREE_READONLY (t
) = 1;
1699 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1700 DECL_ARGUMENTS (decl
) = t
;
1702 ctx
->receiver_decl
= t
;
1705 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1706 PARM_DECL
, get_identifier (".omp_data_o"),
1708 DECL_ARTIFICIAL (t
) = 1;
1709 DECL_NAMELESS (t
) = 1;
1710 DECL_ARG_TYPE (t
) = ptr_type_node
;
1711 DECL_CONTEXT (t
) = current_function_decl
;
1713 TREE_ADDRESSABLE (t
) = 1;
1714 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1715 DECL_ARGUMENTS (decl
) = t
;
1718 /* Allocate memory for the function structure. The call to
1719 allocate_struct_function clobbers CFUN, so we need to restore
1721 push_struct_function (decl
);
1722 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1723 init_tree_ssa (cfun
);
1727 /* Callback for walk_gimple_seq. Check if combined parallel
1728 contains gimple_omp_for_combined_into_p OMP_FOR. */
1731 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1732 bool *handled_ops_p
,
1733 struct walk_stmt_info
*wi
)
1735 gimple
*stmt
= gsi_stmt (*gsi_p
);
1737 *handled_ops_p
= true;
1738 switch (gimple_code (stmt
))
1742 case GIMPLE_OMP_FOR
:
1743 if (gimple_omp_for_combined_into_p (stmt
)
1744 && gimple_omp_for_kind (stmt
)
1745 == *(const enum gf_mask
*) (wi
->info
))
1748 return integer_zero_node
;
1757 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1760 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1761 omp_context
*outer_ctx
)
1763 struct walk_stmt_info wi
;
1765 memset (&wi
, 0, sizeof (wi
));
1767 wi
.info
= (void *) &msk
;
1768 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1769 if (wi
.info
!= (void *) &msk
)
1771 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1772 struct omp_for_data fd
;
1773 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1774 /* We need two temporaries with fd.loop.v type (istart/iend)
1775 and then (fd.collapse - 1) temporaries with the same
1776 type for count2 ... countN-1 vars if not constant. */
1777 size_t count
= 2, i
;
1778 tree type
= fd
.iter_type
;
1780 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1782 count
+= fd
.collapse
- 1;
1783 /* If there are lastprivate clauses on the inner
1784 GIMPLE_OMP_FOR, add one more temporaries for the total number
1785 of iterations (product of count1 ... countN-1). */
1786 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1787 OMP_CLAUSE_LASTPRIVATE
))
1789 else if (msk
== GF_OMP_FOR_KIND_FOR
1790 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1791 OMP_CLAUSE_LASTPRIVATE
))
1794 for (i
= 0; i
< count
; i
++)
1796 tree temp
= create_tmp_var (type
);
1797 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1798 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1799 OMP_CLAUSE_DECL (c
) = temp
;
1800 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1801 gimple_omp_taskreg_set_clauses (stmt
, c
);
1806 /* Scan an OpenMP parallel directive. */
1809 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1813 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1815 /* Ignore parallel directives with empty bodies, unless there
1816 are copyin clauses. */
1818 && empty_body_p (gimple_omp_body (stmt
))
1819 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1820 OMP_CLAUSE_COPYIN
) == NULL
)
1822 gsi_replace (gsi
, gimple_build_nop (), false);
1826 if (gimple_omp_parallel_combined_p (stmt
))
1827 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1829 ctx
= new_omp_context (stmt
, outer_ctx
);
1830 taskreg_contexts
.safe_push (ctx
);
1831 if (taskreg_nesting_level
> 1)
1832 ctx
->is_nested
= true;
1833 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1834 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1835 name
= create_tmp_var_name (".omp_data_s");
1836 name
= build_decl (gimple_location (stmt
),
1837 TYPE_DECL
, name
, ctx
->record_type
);
1838 DECL_ARTIFICIAL (name
) = 1;
1839 DECL_NAMELESS (name
) = 1;
1840 TYPE_NAME (ctx
->record_type
) = name
;
1841 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1842 if (!gimple_omp_parallel_grid_phony (stmt
))
1844 create_omp_child_function (ctx
, false);
1845 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1848 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1849 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1851 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1852 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1855 /* Scan an OpenMP task directive. */
1858 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1862 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1864 /* Ignore task directives with empty bodies, unless they have depend
1867 && empty_body_p (gimple_omp_body (stmt
))
1868 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1870 gsi_replace (gsi
, gimple_build_nop (), false);
1874 if (gimple_omp_task_taskloop_p (stmt
))
1875 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1877 ctx
= new_omp_context (stmt
, outer_ctx
);
1878 taskreg_contexts
.safe_push (ctx
);
1879 if (taskreg_nesting_level
> 1)
1880 ctx
->is_nested
= true;
1881 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1882 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1883 name
= create_tmp_var_name (".omp_data_s");
1884 name
= build_decl (gimple_location (stmt
),
1885 TYPE_DECL
, name
, ctx
->record_type
);
1886 DECL_ARTIFICIAL (name
) = 1;
1887 DECL_NAMELESS (name
) = 1;
1888 TYPE_NAME (ctx
->record_type
) = name
;
1889 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1890 create_omp_child_function (ctx
, false);
1891 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1893 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1895 if (ctx
->srecord_type
)
1897 name
= create_tmp_var_name (".omp_data_a");
1898 name
= build_decl (gimple_location (stmt
),
1899 TYPE_DECL
, name
, ctx
->srecord_type
);
1900 DECL_ARTIFICIAL (name
) = 1;
1901 DECL_NAMELESS (name
) = 1;
1902 TYPE_NAME (ctx
->srecord_type
) = name
;
1903 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1904 create_omp_child_function (ctx
, true);
1907 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1909 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1911 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1912 t
= build_int_cst (long_integer_type_node
, 0);
1913 gimple_omp_task_set_arg_size (stmt
, t
);
1914 t
= build_int_cst (long_integer_type_node
, 1);
1915 gimple_omp_task_set_arg_align (stmt
, t
);
1919 /* Helper function for finish_taskreg_scan, called through walk_tree.
1920 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1921 tree, replace it in the expression. */
1924 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
1928 omp_context
*ctx
= (omp_context
*) data
;
1929 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
1932 if (DECL_HAS_VALUE_EXPR_P (t
))
1933 t
= unshare_expr (DECL_VALUE_EXPR (t
));
1938 else if (IS_TYPE_OR_DECL_P (*tp
))
1943 /* If any decls have been made addressable during scan_omp,
1944 adjust their fields if needed, and layout record types
1945 of parallel/task constructs. */
1948 finish_taskreg_scan (omp_context
*ctx
)
1950 if (ctx
->record_type
== NULL_TREE
)
1953 /* If any task_shared_vars were needed, verify all
1954 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1955 statements if use_pointer_for_field hasn't changed
1956 because of that. If it did, update field types now. */
1957 if (task_shared_vars
)
1961 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
1962 c
; c
= OMP_CLAUSE_CHAIN (c
))
1963 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1964 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1966 tree decl
= OMP_CLAUSE_DECL (c
);
1968 /* Global variables don't need to be copied,
1969 the receiver side will use them directly. */
1970 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1972 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
1973 || !use_pointer_for_field (decl
, ctx
))
1975 tree field
= lookup_field (decl
, ctx
);
1976 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
1977 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
1979 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
1980 TREE_THIS_VOLATILE (field
) = 0;
1981 DECL_USER_ALIGN (field
) = 0;
1982 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
1983 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
1984 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
1985 if (ctx
->srecord_type
)
1987 tree sfield
= lookup_sfield (decl
, ctx
);
1988 TREE_TYPE (sfield
) = TREE_TYPE (field
);
1989 TREE_THIS_VOLATILE (sfield
) = 0;
1990 DECL_USER_ALIGN (sfield
) = 0;
1991 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
1992 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
1993 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
1998 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2000 layout_type (ctx
->record_type
);
2001 fixup_child_record_type (ctx
);
2005 location_t loc
= gimple_location (ctx
->stmt
);
2006 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2007 /* Move VLA fields to the end. */
2008 p
= &TYPE_FIELDS (ctx
->record_type
);
2010 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2011 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2014 *p
= TREE_CHAIN (*p
);
2015 TREE_CHAIN (*q
) = NULL_TREE
;
2016 q
= &TREE_CHAIN (*q
);
2019 p
= &DECL_CHAIN (*p
);
2021 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2023 /* Move fields corresponding to first and second _looptemp_
2024 clause first. There are filled by GOMP_taskloop
2025 and thus need to be in specific positions. */
2026 tree c1
= gimple_omp_task_clauses (ctx
->stmt
);
2027 c1
= omp_find_clause (c1
, OMP_CLAUSE__LOOPTEMP_
);
2028 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2029 OMP_CLAUSE__LOOPTEMP_
);
2030 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2031 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2032 p
= &TYPE_FIELDS (ctx
->record_type
);
2034 if (*p
== f1
|| *p
== f2
)
2035 *p
= DECL_CHAIN (*p
);
2037 p
= &DECL_CHAIN (*p
);
2038 DECL_CHAIN (f1
) = f2
;
2039 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2040 TYPE_FIELDS (ctx
->record_type
) = f1
;
2041 if (ctx
->srecord_type
)
2043 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2044 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2045 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2047 if (*p
== f1
|| *p
== f2
)
2048 *p
= DECL_CHAIN (*p
);
2050 p
= &DECL_CHAIN (*p
);
2051 DECL_CHAIN (f1
) = f2
;
2052 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2053 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2056 layout_type (ctx
->record_type
);
2057 fixup_child_record_type (ctx
);
2058 if (ctx
->srecord_type
)
2059 layout_type (ctx
->srecord_type
);
2060 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2061 TYPE_SIZE_UNIT (ctx
->record_type
));
2062 if (TREE_CODE (t
) != INTEGER_CST
)
2064 t
= unshare_expr (t
);
2065 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2067 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2068 t
= build_int_cst (long_integer_type_node
,
2069 TYPE_ALIGN_UNIT (ctx
->record_type
));
2070 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2074 /* Find the enclosing offload context. */
2076 static omp_context
*
2077 enclosing_target_ctx (omp_context
*ctx
)
2079 for (; ctx
; ctx
= ctx
->outer
)
2080 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2086 /* Return true if ctx is part of an oacc kernels region. */
2089 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2091 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2093 gimple
*stmt
= ctx
->stmt
;
2094 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2095 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2102 /* Check the parallelism clauses inside a kernels regions.
2103 Until kernels handling moves to use the same loop indirection
2104 scheme as parallel, we need to do this checking early. */
2107 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2109 bool checking
= true;
2110 unsigned outer_mask
= 0;
2111 unsigned this_mask
= 0;
2112 bool has_seq
= false, has_auto
= false;
2115 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2119 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2121 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2124 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2126 switch (OMP_CLAUSE_CODE (c
))
2128 case OMP_CLAUSE_GANG
:
2129 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2131 case OMP_CLAUSE_WORKER
:
2132 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2134 case OMP_CLAUSE_VECTOR
:
2135 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2137 case OMP_CLAUSE_SEQ
:
2140 case OMP_CLAUSE_AUTO
:
2150 if (has_seq
&& (this_mask
|| has_auto
))
2151 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2152 " OpenACC loop specifiers");
2153 else if (has_auto
&& this_mask
)
2154 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2155 " OpenACC loop specifiers");
2157 if (this_mask
& outer_mask
)
2158 error_at (gimple_location (stmt
), "inner loop uses same"
2159 " OpenACC parallelism as containing loop");
2162 return outer_mask
| this_mask
;
2165 /* Scan a GIMPLE_OMP_FOR. */
2167 static omp_context
*
2168 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2172 tree clauses
= gimple_omp_for_clauses (stmt
);
2174 ctx
= new_omp_context (stmt
, outer_ctx
);
2176 if (is_gimple_omp_oacc (stmt
))
2178 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2180 if (!tgt
|| is_oacc_parallel (tgt
))
2181 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2183 char const *check
= NULL
;
2185 switch (OMP_CLAUSE_CODE (c
))
2187 case OMP_CLAUSE_GANG
:
2191 case OMP_CLAUSE_WORKER
:
2195 case OMP_CLAUSE_VECTOR
:
2203 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2204 error_at (gimple_location (stmt
),
2205 "argument not permitted on %qs clause in"
2206 " OpenACC %<parallel%>", check
);
2209 if (tgt
&& is_oacc_kernels (tgt
))
2211 /* Strip out reductions, as they are not handled yet. */
2212 tree
*prev_ptr
= &clauses
;
2214 while (tree probe
= *prev_ptr
)
2216 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2218 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2219 *prev_ptr
= *next_ptr
;
2221 prev_ptr
= next_ptr
;
2224 gimple_omp_for_set_clauses (stmt
, clauses
);
2225 check_oacc_kernel_gwv (stmt
, ctx
);
2229 scan_sharing_clauses (clauses
, ctx
);
2231 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2232 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2234 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2235 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2236 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2237 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2239 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2243 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2246 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2247 omp_context
*outer_ctx
)
2249 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2250 gsi_replace (gsi
, bind
, false);
2251 gimple_seq seq
= NULL
;
2252 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2253 tree cond
= create_tmp_var_raw (integer_type_node
);
2254 DECL_CONTEXT (cond
) = current_function_decl
;
2255 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2256 gimple_bind_set_vars (bind
, cond
);
2257 gimple_call_set_lhs (g
, cond
);
2258 gimple_seq_add_stmt (&seq
, g
);
2259 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2260 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2261 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2262 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2263 gimple_seq_add_stmt (&seq
, g
);
2264 g
= gimple_build_label (lab1
);
2265 gimple_seq_add_stmt (&seq
, g
);
2266 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2267 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2268 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2269 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2270 gimple_omp_for_set_clauses (new_stmt
, clause
);
2271 gimple_seq_add_stmt (&seq
, new_stmt
);
2272 g
= gimple_build_goto (lab3
);
2273 gimple_seq_add_stmt (&seq
, g
);
2274 g
= gimple_build_label (lab2
);
2275 gimple_seq_add_stmt (&seq
, g
);
2276 gimple_seq_add_stmt (&seq
, stmt
);
2277 g
= gimple_build_label (lab3
);
2278 gimple_seq_add_stmt (&seq
, g
);
2279 gimple_bind_set_body (bind
, seq
);
2281 scan_omp_for (new_stmt
, outer_ctx
);
2282 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2285 /* Scan an OpenMP sections directive. */
2288 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2292 ctx
= new_omp_context (stmt
, outer_ctx
);
2293 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2294 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2297 /* Scan an OpenMP single directive. */
2300 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2305 ctx
= new_omp_context (stmt
, outer_ctx
);
2306 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2307 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2308 name
= create_tmp_var_name (".omp_copy_s");
2309 name
= build_decl (gimple_location (stmt
),
2310 TYPE_DECL
, name
, ctx
->record_type
);
2311 TYPE_NAME (ctx
->record_type
) = name
;
2313 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2314 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2316 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2317 ctx
->record_type
= NULL
;
2319 layout_type (ctx
->record_type
);
2322 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2323 used in the corresponding offloaded function are restrict. */
2326 omp_target_base_pointers_restrict_p (tree clauses
)
2328 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2330 if (flag_openacc
== 0)
2333 /* I. Basic example:
2337 unsigned int a[2], b[2];
2339 #pragma acc kernels \
2348 After gimplification, we have:
2350 #pragma omp target oacc_kernels \
2351 map(force_from:a [len: 8]) \
2352 map(force_from:b [len: 8])
2358 Because both mappings have the force prefix, we know that they will be
2359 allocated when calling the corresponding offloaded function, which means we
2360 can mark the base pointers for a and b in the offloaded function as
2364 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2366 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
2369 switch (OMP_CLAUSE_MAP_KIND (c
))
2371 case GOMP_MAP_FORCE_ALLOC
:
2372 case GOMP_MAP_FORCE_TO
:
2373 case GOMP_MAP_FORCE_FROM
:
2374 case GOMP_MAP_FORCE_TOFROM
:
2384 /* Scan a GIMPLE_OMP_TARGET. */
2387 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2391 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2392 tree clauses
= gimple_omp_target_clauses (stmt
);
2394 ctx
= new_omp_context (stmt
, outer_ctx
);
2395 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2396 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2397 name
= create_tmp_var_name (".omp_data_t");
2398 name
= build_decl (gimple_location (stmt
),
2399 TYPE_DECL
, name
, ctx
->record_type
);
2400 DECL_ARTIFICIAL (name
) = 1;
2401 DECL_NAMELESS (name
) = 1;
2402 TYPE_NAME (ctx
->record_type
) = name
;
2403 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2405 bool base_pointers_restrict
= false;
2408 create_omp_child_function (ctx
, false);
2409 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2411 base_pointers_restrict
= omp_target_base_pointers_restrict_p (clauses
);
2412 if (base_pointers_restrict
2413 && dump_file
&& (dump_flags
& TDF_DETAILS
))
2415 "Base pointers in offloaded function are restrict\n");
2418 scan_sharing_clauses (clauses
, ctx
, base_pointers_restrict
);
2419 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2421 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2422 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2425 TYPE_FIELDS (ctx
->record_type
)
2426 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2429 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2430 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2432 field
= DECL_CHAIN (field
))
2433 gcc_assert (DECL_ALIGN (field
) == align
);
2435 layout_type (ctx
->record_type
);
2437 fixup_child_record_type (ctx
);
2441 /* Scan an OpenMP teams directive. */
2444 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2446 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2447 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2448 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2451 /* Check nesting restrictions. */
2453 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2457 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2458 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2459 the original copy of its contents. */
2462 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2463 inside an OpenACC CTX. */
2464 if (!(is_gimple_omp (stmt
)
2465 && is_gimple_omp_oacc (stmt
))
2466 /* Except for atomic codes that we share with OpenMP. */
2467 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2468 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2470 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2472 error_at (gimple_location (stmt
),
2473 "non-OpenACC construct inside of OpenACC routine");
2477 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2478 if (is_gimple_omp (octx
->stmt
)
2479 && is_gimple_omp_oacc (octx
->stmt
))
2481 error_at (gimple_location (stmt
),
2482 "non-OpenACC construct inside of OpenACC region");
2489 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2490 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2493 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2495 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2496 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2498 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2499 && (ctx
->outer
== NULL
2500 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2501 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2502 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2503 != GF_OMP_FOR_KIND_FOR
)
2504 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2506 error_at (gimple_location (stmt
),
2507 "%<ordered simd threads%> must be closely "
2508 "nested inside of %<for simd%> region");
2514 error_at (gimple_location (stmt
),
2515 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2516 " may not be nested inside %<simd%> region");
2519 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2521 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2522 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2523 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2524 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2526 error_at (gimple_location (stmt
),
2527 "only %<distribute%> or %<parallel%> regions are "
2528 "allowed to be strictly nested inside %<teams%> "
2534 switch (gimple_code (stmt
))
2536 case GIMPLE_OMP_FOR
:
2537 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2539 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2541 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2543 error_at (gimple_location (stmt
),
2544 "%<distribute%> region must be strictly nested "
2545 "inside %<teams%> construct");
2550 /* We split taskloop into task and nested taskloop in it. */
2551 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2553 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2558 switch (gimple_code (ctx
->stmt
))
2560 case GIMPLE_OMP_FOR
:
2561 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2562 == GF_OMP_FOR_KIND_OACC_LOOP
);
2565 case GIMPLE_OMP_TARGET
:
2566 switch (gimple_omp_target_kind (ctx
->stmt
))
2568 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2569 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2580 else if (oacc_get_fn_attrib (current_function_decl
))
2584 error_at (gimple_location (stmt
),
2585 "OpenACC loop directive must be associated with"
2586 " an OpenACC compute region");
2592 if (is_gimple_call (stmt
)
2593 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2594 == BUILT_IN_GOMP_CANCEL
2595 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2596 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2598 const char *bad
= NULL
;
2599 const char *kind
= NULL
;
2600 const char *construct
2601 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2602 == BUILT_IN_GOMP_CANCEL
)
2603 ? "#pragma omp cancel"
2604 : "#pragma omp cancellation point";
2607 error_at (gimple_location (stmt
), "orphaned %qs construct",
2611 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2612 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2616 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2617 bad
= "#pragma omp parallel";
2618 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2619 == BUILT_IN_GOMP_CANCEL
2620 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2621 ctx
->cancellable
= true;
2625 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2626 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2627 bad
= "#pragma omp for";
2628 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2629 == BUILT_IN_GOMP_CANCEL
2630 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2632 ctx
->cancellable
= true;
2633 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2635 warning_at (gimple_location (stmt
), 0,
2636 "%<#pragma omp cancel for%> inside "
2637 "%<nowait%> for construct");
2638 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2639 OMP_CLAUSE_ORDERED
))
2640 warning_at (gimple_location (stmt
), 0,
2641 "%<#pragma omp cancel for%> inside "
2642 "%<ordered%> for construct");
2647 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2648 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2649 bad
= "#pragma omp sections";
2650 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2651 == BUILT_IN_GOMP_CANCEL
2652 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2654 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2656 ctx
->cancellable
= true;
2657 if (omp_find_clause (gimple_omp_sections_clauses
2660 warning_at (gimple_location (stmt
), 0,
2661 "%<#pragma omp cancel sections%> inside "
2662 "%<nowait%> sections construct");
2666 gcc_assert (ctx
->outer
2667 && gimple_code (ctx
->outer
->stmt
)
2668 == GIMPLE_OMP_SECTIONS
);
2669 ctx
->outer
->cancellable
= true;
2670 if (omp_find_clause (gimple_omp_sections_clauses
2673 warning_at (gimple_location (stmt
), 0,
2674 "%<#pragma omp cancel sections%> inside "
2675 "%<nowait%> sections construct");
2681 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TASK
)
2682 bad
= "#pragma omp task";
2685 for (omp_context
*octx
= ctx
->outer
;
2686 octx
; octx
= octx
->outer
)
2688 switch (gimple_code (octx
->stmt
))
2690 case GIMPLE_OMP_TASKGROUP
:
2692 case GIMPLE_OMP_TARGET
:
2693 if (gimple_omp_target_kind (octx
->stmt
)
2694 != GF_OMP_TARGET_KIND_REGION
)
2697 case GIMPLE_OMP_PARALLEL
:
2698 case GIMPLE_OMP_TEAMS
:
2699 error_at (gimple_location (stmt
),
2700 "%<%s taskgroup%> construct not closely "
2701 "nested inside of %<taskgroup%> region",
2709 ctx
->cancellable
= true;
2714 error_at (gimple_location (stmt
), "invalid arguments");
2719 error_at (gimple_location (stmt
),
2720 "%<%s %s%> construct not closely nested inside of %qs",
2721 construct
, kind
, bad
);
2726 case GIMPLE_OMP_SECTIONS
:
2727 case GIMPLE_OMP_SINGLE
:
2728 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2729 switch (gimple_code (ctx
->stmt
))
2731 case GIMPLE_OMP_FOR
:
2732 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2733 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2736 case GIMPLE_OMP_SECTIONS
:
2737 case GIMPLE_OMP_SINGLE
:
2738 case GIMPLE_OMP_ORDERED
:
2739 case GIMPLE_OMP_MASTER
:
2740 case GIMPLE_OMP_TASK
:
2741 case GIMPLE_OMP_CRITICAL
:
2742 if (is_gimple_call (stmt
))
2744 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2745 != BUILT_IN_GOMP_BARRIER
)
2747 error_at (gimple_location (stmt
),
2748 "barrier region may not be closely nested inside "
2749 "of work-sharing, %<critical%>, %<ordered%>, "
2750 "%<master%>, explicit %<task%> or %<taskloop%> "
2754 error_at (gimple_location (stmt
),
2755 "work-sharing region may not be closely nested inside "
2756 "of work-sharing, %<critical%>, %<ordered%>, "
2757 "%<master%>, explicit %<task%> or %<taskloop%> region");
2759 case GIMPLE_OMP_PARALLEL
:
2760 case GIMPLE_OMP_TEAMS
:
2762 case GIMPLE_OMP_TARGET
:
2763 if (gimple_omp_target_kind (ctx
->stmt
)
2764 == GF_OMP_TARGET_KIND_REGION
)
2771 case GIMPLE_OMP_MASTER
:
2772 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2773 switch (gimple_code (ctx
->stmt
))
2775 case GIMPLE_OMP_FOR
:
2776 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2777 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2780 case GIMPLE_OMP_SECTIONS
:
2781 case GIMPLE_OMP_SINGLE
:
2782 case GIMPLE_OMP_TASK
:
2783 error_at (gimple_location (stmt
),
2784 "%<master%> region may not be closely nested inside "
2785 "of work-sharing, explicit %<task%> or %<taskloop%> "
2788 case GIMPLE_OMP_PARALLEL
:
2789 case GIMPLE_OMP_TEAMS
:
2791 case GIMPLE_OMP_TARGET
:
2792 if (gimple_omp_target_kind (ctx
->stmt
)
2793 == GF_OMP_TARGET_KIND_REGION
)
2800 case GIMPLE_OMP_TASK
:
2801 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2802 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2803 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2804 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2806 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2807 error_at (OMP_CLAUSE_LOCATION (c
),
2808 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2809 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2813 case GIMPLE_OMP_ORDERED
:
2814 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2815 c
; c
= OMP_CLAUSE_CHAIN (c
))
2817 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2819 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2820 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2823 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2824 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2825 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2828 /* Look for containing ordered(N) loop. */
2830 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2832 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2833 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2835 error_at (OMP_CLAUSE_LOCATION (c
),
2836 "%<ordered%> construct with %<depend%> clause "
2837 "must be closely nested inside an %<ordered%> "
2841 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2843 error_at (OMP_CLAUSE_LOCATION (c
),
2844 "%<ordered%> construct with %<depend%> clause "
2845 "must be closely nested inside a loop with "
2846 "%<ordered%> clause with a parameter");
2852 error_at (OMP_CLAUSE_LOCATION (c
),
2853 "invalid depend kind in omp %<ordered%> %<depend%>");
2857 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2858 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2860 /* ordered simd must be closely nested inside of simd region,
2861 and simd region must not encounter constructs other than
2862 ordered simd, therefore ordered simd may be either orphaned,
2863 or ctx->stmt must be simd. The latter case is handled already
2867 error_at (gimple_location (stmt
),
2868 "%<ordered%> %<simd%> must be closely nested inside "
2873 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2874 switch (gimple_code (ctx
->stmt
))
2876 case GIMPLE_OMP_CRITICAL
:
2877 case GIMPLE_OMP_TASK
:
2878 case GIMPLE_OMP_ORDERED
:
2879 ordered_in_taskloop
:
2880 error_at (gimple_location (stmt
),
2881 "%<ordered%> region may not be closely nested inside "
2882 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2883 "%<taskloop%> region");
2885 case GIMPLE_OMP_FOR
:
2886 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2887 goto ordered_in_taskloop
;
2888 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2889 OMP_CLAUSE_ORDERED
) == NULL
)
2891 error_at (gimple_location (stmt
),
2892 "%<ordered%> region must be closely nested inside "
2893 "a loop region with an %<ordered%> clause");
2897 case GIMPLE_OMP_TARGET
:
2898 if (gimple_omp_target_kind (ctx
->stmt
)
2899 != GF_OMP_TARGET_KIND_REGION
)
2902 case GIMPLE_OMP_PARALLEL
:
2903 case GIMPLE_OMP_TEAMS
:
2904 error_at (gimple_location (stmt
),
2905 "%<ordered%> region must be closely nested inside "
2906 "a loop region with an %<ordered%> clause");
2912 case GIMPLE_OMP_CRITICAL
:
2915 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
2916 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2917 if (gomp_critical
*other_crit
2918 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
2919 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
2921 error_at (gimple_location (stmt
),
2922 "%<critical%> region may not be nested inside "
2923 "a %<critical%> region with the same name");
2928 case GIMPLE_OMP_TEAMS
:
2930 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
2931 || gimple_omp_target_kind (ctx
->stmt
) != GF_OMP_TARGET_KIND_REGION
)
2933 error_at (gimple_location (stmt
),
2934 "%<teams%> construct not closely nested inside of "
2935 "%<target%> construct");
2939 case GIMPLE_OMP_TARGET
:
2940 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2941 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2942 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2943 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2945 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2946 error_at (OMP_CLAUSE_LOCATION (c
),
2947 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2948 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2951 if (is_gimple_omp_offloaded (stmt
)
2952 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2954 error_at (gimple_location (stmt
),
2955 "OpenACC region inside of OpenACC routine, nested "
2956 "parallelism not supported yet");
2959 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2961 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
2963 if (is_gimple_omp (stmt
)
2964 && is_gimple_omp_oacc (stmt
)
2965 && is_gimple_omp (ctx
->stmt
))
2967 error_at (gimple_location (stmt
),
2968 "OpenACC construct inside of non-OpenACC region");
2974 const char *stmt_name
, *ctx_stmt_name
;
2975 switch (gimple_omp_target_kind (stmt
))
2977 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
2978 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
2979 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
2980 case GF_OMP_TARGET_KIND_ENTER_DATA
:
2981 stmt_name
= "target enter data"; break;
2982 case GF_OMP_TARGET_KIND_EXIT_DATA
:
2983 stmt_name
= "target exit data"; break;
2984 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
2985 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
2986 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
2987 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
2988 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
2989 stmt_name
= "enter/exit data"; break;
2990 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
2992 default: gcc_unreachable ();
2994 switch (gimple_omp_target_kind (ctx
->stmt
))
2996 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
2997 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
2998 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2999 ctx_stmt_name
= "parallel"; break;
3000 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3001 ctx_stmt_name
= "kernels"; break;
3002 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3003 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3004 ctx_stmt_name
= "host_data"; break;
3005 default: gcc_unreachable ();
3008 /* OpenACC/OpenMP mismatch? */
3009 if (is_gimple_omp_oacc (stmt
)
3010 != is_gimple_omp_oacc (ctx
->stmt
))
3012 error_at (gimple_location (stmt
),
3013 "%s %qs construct inside of %s %qs region",
3014 (is_gimple_omp_oacc (stmt
)
3015 ? "OpenACC" : "OpenMP"), stmt_name
,
3016 (is_gimple_omp_oacc (ctx
->stmt
)
3017 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3020 if (is_gimple_omp_offloaded (ctx
->stmt
))
3022 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3023 if (is_gimple_omp_oacc (ctx
->stmt
))
3025 error_at (gimple_location (stmt
),
3026 "%qs construct inside of %qs region",
3027 stmt_name
, ctx_stmt_name
);
3032 warning_at (gimple_location (stmt
), 0,
3033 "%qs construct inside of %qs region",
3034 stmt_name
, ctx_stmt_name
);
3046 /* Helper function scan_omp.
3048 Callback for walk_tree or operators in walk_gimple_stmt used to
3049 scan for OMP directives in TP. */
3052 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3054 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3055 omp_context
*ctx
= (omp_context
*) wi
->info
;
3058 switch (TREE_CODE (t
))
3066 tree repl
= remap_decl (t
, &ctx
->cb
);
3067 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3073 if (ctx
&& TYPE_P (t
))
3074 *tp
= remap_type (t
, &ctx
->cb
);
3075 else if (!DECL_P (t
))
3080 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3081 if (tem
!= TREE_TYPE (t
))
3083 if (TREE_CODE (t
) == INTEGER_CST
)
3084 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3086 TREE_TYPE (t
) = tem
;
3096 /* Return true if FNDECL is a setjmp or a longjmp. */
3099 setjmp_or_longjmp_p (const_tree fndecl
)
3101 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3102 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SETJMP
3103 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LONGJMP
))
3106 tree declname
= DECL_NAME (fndecl
);
3109 const char *name
= IDENTIFIER_POINTER (declname
);
3110 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3114 /* Helper function for scan_omp.
3116 Callback for walk_gimple_stmt used to scan for OMP directives in
3117 the current statement in GSI. */
3120 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3121 struct walk_stmt_info
*wi
)
3123 gimple
*stmt
= gsi_stmt (*gsi
);
3124 omp_context
*ctx
= (omp_context
*) wi
->info
;
3126 if (gimple_has_location (stmt
))
3127 input_location
= gimple_location (stmt
);
3129 /* Check the nesting restrictions. */
3130 bool remove
= false;
3131 if (is_gimple_omp (stmt
))
3132 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3133 else if (is_gimple_call (stmt
))
3135 tree fndecl
= gimple_call_fndecl (stmt
);
3138 if (setjmp_or_longjmp_p (fndecl
)
3140 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3141 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3144 error_at (gimple_location (stmt
),
3145 "setjmp/longjmp inside simd construct");
3147 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3148 switch (DECL_FUNCTION_CODE (fndecl
))
3150 case BUILT_IN_GOMP_BARRIER
:
3151 case BUILT_IN_GOMP_CANCEL
:
3152 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3153 case BUILT_IN_GOMP_TASKYIELD
:
3154 case BUILT_IN_GOMP_TASKWAIT
:
3155 case BUILT_IN_GOMP_TASKGROUP_START
:
3156 case BUILT_IN_GOMP_TASKGROUP_END
:
3157 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3166 stmt
= gimple_build_nop ();
3167 gsi_replace (gsi
, stmt
, false);
3170 *handled_ops_p
= true;
3172 switch (gimple_code (stmt
))
3174 case GIMPLE_OMP_PARALLEL
:
3175 taskreg_nesting_level
++;
3176 scan_omp_parallel (gsi
, ctx
);
3177 taskreg_nesting_level
--;
3180 case GIMPLE_OMP_TASK
:
3181 taskreg_nesting_level
++;
3182 scan_omp_task (gsi
, ctx
);
3183 taskreg_nesting_level
--;
3186 case GIMPLE_OMP_FOR
:
3187 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3188 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3189 && omp_maybe_offloaded_ctx (ctx
)
3190 && omp_max_simt_vf ())
3191 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3193 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3196 case GIMPLE_OMP_SECTIONS
:
3197 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3200 case GIMPLE_OMP_SINGLE
:
3201 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3204 case GIMPLE_OMP_SECTION
:
3205 case GIMPLE_OMP_MASTER
:
3206 case GIMPLE_OMP_TASKGROUP
:
3207 case GIMPLE_OMP_ORDERED
:
3208 case GIMPLE_OMP_CRITICAL
:
3209 case GIMPLE_OMP_GRID_BODY
:
3210 ctx
= new_omp_context (stmt
, ctx
);
3211 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3214 case GIMPLE_OMP_TARGET
:
3215 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3218 case GIMPLE_OMP_TEAMS
:
3219 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3226 *handled_ops_p
= false;
3228 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3230 var
= DECL_CHAIN (var
))
3231 insert_decl_map (&ctx
->cb
, var
, var
);
3235 *handled_ops_p
= false;
3243 /* Scan all the statements starting at the current statement. CTX
3244 contains context information about the OMP directives and
3245 clauses found during the scan. */
3248 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3250 location_t saved_location
;
3251 struct walk_stmt_info wi
;
3253 memset (&wi
, 0, sizeof (wi
));
3255 wi
.want_locations
= true;
3257 saved_location
= input_location
;
3258 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3259 input_location
= saved_location
;
3262 /* Re-gimplification and code generation routines. */
3264 /* If a context was created for STMT when it was scanned, return it. */
3266 static omp_context
*
3267 maybe_lookup_ctx (gimple
*stmt
)
3270 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3271 return n
? (omp_context
*) n
->value
: NULL
;
3275 /* Find the mapping for DECL in CTX or the immediately enclosing
3276 context that has a mapping for DECL.
3278 If CTX is a nested parallel directive, we may have to use the decl
3279 mappings created in CTX's parent context. Suppose that we have the
3280 following parallel nesting (variable UIDs showed for clarity):
3283 #omp parallel shared(iD.1562) -> outer parallel
3284 iD.1562 = iD.1562 + 1;
3286 #omp parallel shared (iD.1562) -> inner parallel
3287 iD.1562 = iD.1562 - 1;
3289 Each parallel structure will create a distinct .omp_data_s structure
3290 for copying iD.1562 in/out of the directive:
3292 outer parallel .omp_data_s.1.i -> iD.1562
3293 inner parallel .omp_data_s.2.i -> iD.1562
3295 A shared variable mapping will produce a copy-out operation before
3296 the parallel directive and a copy-in operation after it. So, in
3297 this case we would have:
3300 .omp_data_o.1.i = iD.1562;
3301 #omp parallel shared(iD.1562) -> outer parallel
3302 .omp_data_i.1 = &.omp_data_o.1
3303 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3305 .omp_data_o.2.i = iD.1562; -> **
3306 #omp parallel shared(iD.1562) -> inner parallel
3307 .omp_data_i.2 = &.omp_data_o.2
3308 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3311 ** This is a problem. The symbol iD.1562 cannot be referenced
3312 inside the body of the outer parallel region. But since we are
3313 emitting this copy operation while expanding the inner parallel
3314 directive, we need to access the CTX structure of the outer
3315 parallel directive to get the correct mapping:
3317 .omp_data_o.2.i = .omp_data_i.1->i
3319 Since there may be other workshare or parallel directives enclosing
3320 the parallel directive, it may be necessary to walk up the context
3321 parent chain. This is not a problem in general because nested
3322 parallelism happens only rarely. */
3325 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3330 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3331 t
= maybe_lookup_decl (decl
, up
);
3333 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3335 return t
? t
: decl
;
3339 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3340 in outer contexts. */
3343 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3348 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3349 t
= maybe_lookup_decl (decl
, up
);
3351 return t
? t
: decl
;
3355 /* Construct the initialization value for reduction operation OP. */
3358 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3367 case TRUTH_ORIF_EXPR
:
3368 case TRUTH_XOR_EXPR
:
3370 return build_zero_cst (type
);
3373 case TRUTH_AND_EXPR
:
3374 case TRUTH_ANDIF_EXPR
:
3376 return fold_convert_loc (loc
, type
, integer_one_node
);
3379 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3382 if (SCALAR_FLOAT_TYPE_P (type
))
3384 REAL_VALUE_TYPE max
, min
;
3385 if (HONOR_INFINITIES (type
))
3388 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3391 real_maxval (&min
, 1, TYPE_MODE (type
));
3392 return build_real (type
, min
);
3394 else if (POINTER_TYPE_P (type
))
3397 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3398 return wide_int_to_tree (type
, min
);
3402 gcc_assert (INTEGRAL_TYPE_P (type
));
3403 return TYPE_MIN_VALUE (type
);
3407 if (SCALAR_FLOAT_TYPE_P (type
))
3409 REAL_VALUE_TYPE max
;
3410 if (HONOR_INFINITIES (type
))
3413 real_maxval (&max
, 0, TYPE_MODE (type
));
3414 return build_real (type
, max
);
3416 else if (POINTER_TYPE_P (type
))
3419 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3420 return wide_int_to_tree (type
, max
);
3424 gcc_assert (INTEGRAL_TYPE_P (type
));
3425 return TYPE_MAX_VALUE (type
);
3433 /* Construct the initialization value for reduction CLAUSE. */
3436 omp_reduction_init (tree clause
, tree type
)
3438 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3439 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3442 /* Return alignment to be assumed for var in CLAUSE, which should be
3443 OMP_CLAUSE_ALIGNED. */
3446 omp_clause_aligned_alignment (tree clause
)
3448 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3449 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3451 /* Otherwise return implementation defined alignment. */
3452 unsigned int al
= 1;
3453 opt_scalar_mode mode_iter
;
3454 int vs
= targetm
.vectorize
.autovectorize_vector_sizes ();
3456 vs
= 1 << floor_log2 (vs
);
3457 static enum mode_class classes
[]
3458 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3459 for (int i
= 0; i
< 4; i
+= 2)
3460 /* The for loop above dictates that we only walk through scalar classes. */
3461 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3463 scalar_mode mode
= mode_iter
.require ();
3464 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3465 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3468 && GET_MODE_SIZE (vmode
) < vs
3469 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3470 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3472 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3473 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3475 type
= build_vector_type (type
, GET_MODE_SIZE (vmode
)
3476 / GET_MODE_SIZE (mode
));
3477 if (TYPE_MODE (type
) != vmode
)
3479 if (TYPE_ALIGN_UNIT (type
) > al
)
3480 al
= TYPE_ALIGN_UNIT (type
);
3482 return build_int_cst (integer_type_node
, al
);
3486 /* This structure is part of the interface between lower_rec_simd_input_clauses
3487 and lower_rec_input_clauses. */
3489 struct omplow_simd_context
{
3492 vec
<tree
, va_heap
> simt_eargs
;
3493 gimple_seq simt_dlist
;
3498 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3502 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3503 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3505 if (sctx
->max_vf
== 0)
3507 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3508 if (sctx
->max_vf
> 1)
3510 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3511 OMP_CLAUSE_SAFELEN
);
3513 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c
)) != INTEGER_CST
3514 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c
)) != 1))
3516 else if (c
&& compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c
),
3517 sctx
->max_vf
) == -1)
3518 sctx
->max_vf
= tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c
));
3520 if (sctx
->max_vf
> 1)
3522 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3523 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3526 if (sctx
->max_vf
== 1)
3531 if (is_gimple_reg (new_var
))
3533 ivar
= lvar
= new_var
;
3536 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3537 ivar
= lvar
= create_tmp_var (type
);
3538 TREE_ADDRESSABLE (ivar
) = 1;
3539 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3540 NULL
, DECL_ATTRIBUTES (ivar
));
3541 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3542 tree clobber
= build_constructor (type
, NULL
);
3543 TREE_THIS_VOLATILE (clobber
) = 1;
3544 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3545 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3549 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3550 tree avar
= create_tmp_var_raw (atype
);
3551 if (TREE_ADDRESSABLE (new_var
))
3552 TREE_ADDRESSABLE (avar
) = 1;
3553 DECL_ATTRIBUTES (avar
)
3554 = tree_cons (get_identifier ("omp simd array"), NULL
,
3555 DECL_ATTRIBUTES (avar
));
3556 gimple_add_tmp_var (avar
);
3557 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3558 NULL_TREE
, NULL_TREE
);
3559 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3560 NULL_TREE
, NULL_TREE
);
3562 if (DECL_P (new_var
))
3564 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3565 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3570 /* Helper function of lower_rec_input_clauses. For a reference
3571 in simd reduction, add an underlying variable it will reference. */
3574 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3576 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3577 if (TREE_CONSTANT (z
))
3579 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3580 get_name (new_vard
));
3581 gimple_add_tmp_var (z
);
3582 TREE_ADDRESSABLE (z
) = 1;
3583 z
= build_fold_addr_expr_loc (loc
, z
);
3584 gimplify_assign (new_vard
, z
, ilist
);
3588 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3589 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3590 private variables. Initialization statements go in ILIST, while calls
3591 to destructors go in DLIST. */
3594 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3595 omp_context
*ctx
, struct omp_for_data
*fd
)
3597 tree c
, dtor
, copyin_seq
, x
, ptr
;
3598 bool copyin_by_ref
= false;
3599 bool lastprivate_firstprivate
= false;
3600 bool reduction_omp_orig_ref
= false;
3602 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3603 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3604 omplow_simd_context sctx
= omplow_simd_context ();
3605 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3606 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3607 gimple_seq llist
[3] = { };
3610 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3612 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3613 with data sharing clauses referencing variable sized vars. That
3614 is unnecessarily hard to support and very unlikely to result in
3615 vectorized code anyway. */
3617 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3618 switch (OMP_CLAUSE_CODE (c
))
3620 case OMP_CLAUSE_LINEAR
:
3621 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3624 case OMP_CLAUSE_PRIVATE
:
3625 case OMP_CLAUSE_FIRSTPRIVATE
:
3626 case OMP_CLAUSE_LASTPRIVATE
:
3627 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3630 case OMP_CLAUSE_REDUCTION
:
3631 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3632 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3639 /* Add a placeholder for simduid. */
3640 if (sctx
.is_simt
&& sctx
.max_vf
!= 1)
3641 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3643 /* Do all the fixed sized types in the first pass, and the variable sized
3644 types in the second pass. This makes sure that the scalar arguments to
3645 the variable sized types are processed before we use them in the
3646 variable sized operations. */
3647 for (pass
= 0; pass
< 2; ++pass
)
3649 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3651 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3654 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3658 case OMP_CLAUSE_PRIVATE
:
3659 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3662 case OMP_CLAUSE_SHARED
:
3663 /* Ignore shared directives in teams construct. */
3664 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3666 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3668 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3669 || is_global_var (OMP_CLAUSE_DECL (c
)));
3672 case OMP_CLAUSE_FIRSTPRIVATE
:
3673 case OMP_CLAUSE_COPYIN
:
3675 case OMP_CLAUSE_LINEAR
:
3676 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3677 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3678 lastprivate_firstprivate
= true;
3680 case OMP_CLAUSE_REDUCTION
:
3681 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3682 reduction_omp_orig_ref
= true;
3684 case OMP_CLAUSE__LOOPTEMP_
:
3685 /* Handle _looptemp_ clauses only on parallel/task. */
3689 case OMP_CLAUSE_LASTPRIVATE
:
3690 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3692 lastprivate_firstprivate
= true;
3693 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3696 /* Even without corresponding firstprivate, if
3697 decl is Fortran allocatable, it needs outer var
3700 && lang_hooks
.decls
.omp_private_outer_ref
3701 (OMP_CLAUSE_DECL (c
)))
3702 lastprivate_firstprivate
= true;
3704 case OMP_CLAUSE_ALIGNED
:
3707 var
= OMP_CLAUSE_DECL (c
);
3708 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3709 && !is_global_var (var
))
3711 new_var
= maybe_lookup_decl (var
, ctx
);
3712 if (new_var
== NULL_TREE
)
3713 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3714 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3715 tree alarg
= omp_clause_aligned_alignment (c
);
3716 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3717 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
3718 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3719 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
3720 gimplify_and_add (x
, ilist
);
3722 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
3723 && is_global_var (var
))
3725 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
3726 new_var
= lookup_decl (var
, ctx
);
3727 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3728 t
= build_fold_addr_expr_loc (clause_loc
, t
);
3729 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3730 tree alarg
= omp_clause_aligned_alignment (c
);
3731 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3732 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
3733 t
= fold_convert_loc (clause_loc
, ptype
, t
);
3734 x
= create_tmp_var (ptype
);
3735 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
3736 gimplify_and_add (t
, ilist
);
3737 t
= build_simple_mem_ref_loc (clause_loc
, x
);
3738 SET_DECL_VALUE_EXPR (new_var
, t
);
3739 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3746 new_var
= var
= OMP_CLAUSE_DECL (c
);
3747 if (c_kind
== OMP_CLAUSE_REDUCTION
&& TREE_CODE (var
) == MEM_REF
)
3749 var
= TREE_OPERAND (var
, 0);
3750 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
3751 var
= TREE_OPERAND (var
, 0);
3752 if (TREE_CODE (var
) == INDIRECT_REF
3753 || TREE_CODE (var
) == ADDR_EXPR
)
3754 var
= TREE_OPERAND (var
, 0);
3755 if (is_variable_sized (var
))
3757 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
3758 var
= DECL_VALUE_EXPR (var
);
3759 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
3760 var
= TREE_OPERAND (var
, 0);
3761 gcc_assert (DECL_P (var
));
3765 if (c_kind
!= OMP_CLAUSE_COPYIN
)
3766 new_var
= lookup_decl (var
, ctx
);
3768 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
3773 /* C/C++ array section reductions. */
3774 else if (c_kind
== OMP_CLAUSE_REDUCTION
3775 && var
!= OMP_CLAUSE_DECL (c
))
3780 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
3781 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
3782 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
3784 tree b
= TREE_OPERAND (orig_var
, 1);
3785 b
= maybe_lookup_decl (b
, ctx
);
3788 b
= TREE_OPERAND (orig_var
, 1);
3789 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
3791 if (integer_zerop (bias
))
3795 bias
= fold_convert_loc (clause_loc
,
3796 TREE_TYPE (b
), bias
);
3797 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3798 TREE_TYPE (b
), b
, bias
);
3800 orig_var
= TREE_OPERAND (orig_var
, 0);
3802 if (TREE_CODE (orig_var
) == INDIRECT_REF
3803 || TREE_CODE (orig_var
) == ADDR_EXPR
)
3804 orig_var
= TREE_OPERAND (orig_var
, 0);
3805 tree d
= OMP_CLAUSE_DECL (c
);
3806 tree type
= TREE_TYPE (d
);
3807 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
3808 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3809 const char *name
= get_name (orig_var
);
3810 if (TREE_CONSTANT (v
))
3812 x
= create_tmp_var_raw (type
, name
);
3813 gimple_add_tmp_var (x
);
3814 TREE_ADDRESSABLE (x
) = 1;
3815 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3820 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3821 tree t
= maybe_lookup_decl (v
, ctx
);
3825 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
3826 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
3827 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3829 build_int_cst (TREE_TYPE (v
), 1));
3830 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
3832 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3833 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
3834 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
3837 tree ptype
= build_pointer_type (TREE_TYPE (type
));
3838 x
= fold_convert_loc (clause_loc
, ptype
, x
);
3839 tree y
= create_tmp_var (ptype
, name
);
3840 gimplify_assign (y
, x
, ilist
);
3844 if (!integer_zerop (bias
))
3846 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3848 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3850 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
3851 pointer_sized_int_node
, yb
, bias
);
3852 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
3853 yb
= create_tmp_var (ptype
, name
);
3854 gimplify_assign (yb
, x
, ilist
);
3858 d
= TREE_OPERAND (d
, 0);
3859 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
3860 d
= TREE_OPERAND (d
, 0);
3861 if (TREE_CODE (d
) == ADDR_EXPR
)
3863 if (orig_var
!= var
)
3865 gcc_assert (is_variable_sized (orig_var
));
3866 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
3868 gimplify_assign (new_var
, x
, ilist
);
3869 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
3870 tree t
= build_fold_indirect_ref (new_var
);
3871 DECL_IGNORED_P (new_var
) = 0;
3872 TREE_THIS_NOTRAP (t
);
3873 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
3874 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
3878 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
3879 build_int_cst (ptype
, 0));
3880 SET_DECL_VALUE_EXPR (new_var
, x
);
3881 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3886 gcc_assert (orig_var
== var
);
3887 if (TREE_CODE (d
) == INDIRECT_REF
)
3889 x
= create_tmp_var (ptype
, name
);
3890 TREE_ADDRESSABLE (x
) = 1;
3891 gimplify_assign (x
, yb
, ilist
);
3892 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3894 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3895 gimplify_assign (new_var
, x
, ilist
);
3897 tree y1
= create_tmp_var (ptype
, NULL
);
3898 gimplify_assign (y1
, y
, ilist
);
3899 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
3900 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
3901 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
3902 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
3904 y2
= create_tmp_var (ptype
, NULL
);
3905 gimplify_assign (y2
, y
, ilist
);
3906 tree ref
= build_outer_var_ref (var
, ctx
);
3907 /* For ref build_outer_var_ref already performs this. */
3908 if (TREE_CODE (d
) == INDIRECT_REF
)
3909 gcc_assert (omp_is_reference (var
));
3910 else if (TREE_CODE (d
) == ADDR_EXPR
)
3911 ref
= build_fold_addr_expr (ref
);
3912 else if (omp_is_reference (var
))
3913 ref
= build_fold_addr_expr (ref
);
3914 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
3915 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
3916 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3918 y3
= create_tmp_var (ptype
, NULL
);
3919 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
3923 y4
= create_tmp_var (ptype
, NULL
);
3924 gimplify_assign (y4
, ref
, dlist
);
3927 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
3928 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
3929 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
3930 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
3931 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
3934 i2
= create_tmp_var (TREE_TYPE (v
), NULL
);
3935 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
3936 body2
= create_artificial_label (UNKNOWN_LOCATION
);
3937 end2
= create_artificial_label (UNKNOWN_LOCATION
);
3938 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
3940 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
3942 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
3943 tree decl_placeholder
3944 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
3945 SET_DECL_VALUE_EXPR (decl_placeholder
,
3946 build_simple_mem_ref (y1
));
3947 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
3948 SET_DECL_VALUE_EXPR (placeholder
,
3949 y3
? build_simple_mem_ref (y3
)
3951 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
3952 x
= lang_hooks
.decls
.omp_clause_default_ctor
3953 (c
, build_simple_mem_ref (y1
),
3954 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
3956 gimplify_and_add (x
, ilist
);
3957 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
3959 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
3960 lower_omp (&tseq
, ctx
);
3961 gimple_seq_add_seq (ilist
, tseq
);
3963 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
3966 SET_DECL_VALUE_EXPR (decl_placeholder
,
3967 build_simple_mem_ref (y2
));
3968 SET_DECL_VALUE_EXPR (placeholder
,
3969 build_simple_mem_ref (y4
));
3970 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
3971 lower_omp (&tseq
, ctx
);
3972 gimple_seq_add_seq (dlist
, tseq
);
3973 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
3975 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
3976 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
3977 x
= lang_hooks
.decls
.omp_clause_dtor
3978 (c
, build_simple_mem_ref (y2
));
3981 gimple_seq tseq
= NULL
;
3983 gimplify_stmt (&dtor
, &tseq
);
3984 gimple_seq_add_seq (dlist
, tseq
);
3989 x
= omp_reduction_init (c
, TREE_TYPE (type
));
3990 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
3992 /* reduction(-:var) sums up the partial results, so it
3993 acts identically to reduction(+:var). */
3994 if (code
== MINUS_EXPR
)
3997 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4000 x
= build2 (code
, TREE_TYPE (type
),
4001 build_simple_mem_ref (y4
),
4002 build_simple_mem_ref (y2
));
4003 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4007 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4008 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4009 gimple_seq_add_stmt (ilist
, g
);
4012 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4013 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4014 gimple_seq_add_stmt (ilist
, g
);
4016 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4017 build_int_cst (TREE_TYPE (i
), 1));
4018 gimple_seq_add_stmt (ilist
, g
);
4019 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4020 gimple_seq_add_stmt (ilist
, g
);
4021 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4024 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4025 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4026 gimple_seq_add_stmt (dlist
, g
);
4029 g
= gimple_build_assign
4030 (y4
, POINTER_PLUS_EXPR
, y4
,
4031 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4032 gimple_seq_add_stmt (dlist
, g
);
4034 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4035 build_int_cst (TREE_TYPE (i2
), 1));
4036 gimple_seq_add_stmt (dlist
, g
);
4037 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4038 gimple_seq_add_stmt (dlist
, g
);
4039 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4043 else if (is_variable_sized (var
))
4045 /* For variable sized types, we need to allocate the
4046 actual storage here. Call alloca and store the
4047 result in the pointer decl that we created elsewhere. */
4051 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4056 ptr
= DECL_VALUE_EXPR (new_var
);
4057 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4058 ptr
= TREE_OPERAND (ptr
, 0);
4059 gcc_assert (DECL_P (ptr
));
4060 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4062 /* void *tmp = __builtin_alloca */
4063 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4064 stmt
= gimple_build_call (atmp
, 2, x
,
4065 size_int (DECL_ALIGN (var
)));
4066 tmp
= create_tmp_var_raw (ptr_type_node
);
4067 gimple_add_tmp_var (tmp
);
4068 gimple_call_set_lhs (stmt
, tmp
);
4070 gimple_seq_add_stmt (ilist
, stmt
);
4072 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4073 gimplify_assign (ptr
, x
, ilist
);
4076 else if (omp_is_reference (var
))
4078 /* For references that are being privatized for Fortran,
4079 allocate new backing storage for the new pointer
4080 variable. This allows us to avoid changing all the
4081 code that expects a pointer to something that expects
4082 a direct variable. */
4086 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4087 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4089 x
= build_receiver_ref (var
, false, ctx
);
4090 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4092 else if (TREE_CONSTANT (x
))
4094 /* For reduction in SIMD loop, defer adding the
4095 initialization of the reference, because if we decide
4096 to use SIMD array for it, the initilization could cause
4098 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4102 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4104 gimple_add_tmp_var (x
);
4105 TREE_ADDRESSABLE (x
) = 1;
4106 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4112 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4113 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4114 tree al
= size_int (TYPE_ALIGN (rtype
));
4115 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4120 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4121 gimplify_assign (new_var
, x
, ilist
);
4124 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4126 else if (c_kind
== OMP_CLAUSE_REDUCTION
4127 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4135 switch (OMP_CLAUSE_CODE (c
))
4137 case OMP_CLAUSE_SHARED
:
4138 /* Ignore shared directives in teams construct. */
4139 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
4141 /* Shared global vars are just accessed directly. */
4142 if (is_global_var (new_var
))
4144 /* For taskloop firstprivate/lastprivate, represented
4145 as firstprivate and shared clause on the task, new_var
4146 is the firstprivate var. */
4147 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4149 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4150 needs to be delayed until after fixup_child_record_type so
4151 that we get the correct type during the dereference. */
4152 by_ref
= use_pointer_for_field (var
, ctx
);
4153 x
= build_receiver_ref (var
, by_ref
, ctx
);
4154 SET_DECL_VALUE_EXPR (new_var
, x
);
4155 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4157 /* ??? If VAR is not passed by reference, and the variable
4158 hasn't been initialized yet, then we'll get a warning for
4159 the store into the omp_data_s structure. Ideally, we'd be
4160 able to notice this and not store anything at all, but
4161 we're generating code too early. Suppress the warning. */
4163 TREE_NO_WARNING (var
) = 1;
4166 case OMP_CLAUSE_LASTPRIVATE
:
4167 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4171 case OMP_CLAUSE_PRIVATE
:
4172 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4173 x
= build_outer_var_ref (var
, ctx
);
4174 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4176 if (is_task_ctx (ctx
))
4177 x
= build_receiver_ref (var
, false, ctx
);
4179 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4185 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4186 (c
, unshare_expr (new_var
), x
);
4189 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4190 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4191 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4192 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4196 x
= lang_hooks
.decls
.omp_clause_default_ctor
4197 (c
, unshare_expr (ivar
), x
);
4199 gimplify_and_add (x
, &llist
[0]);
4202 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4205 gimple_seq tseq
= NULL
;
4208 gimplify_stmt (&dtor
, &tseq
);
4209 gimple_seq_add_seq (&llist
[1], tseq
);
4216 gimplify_and_add (nx
, ilist
);
4220 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4223 gimple_seq tseq
= NULL
;
4226 gimplify_stmt (&dtor
, &tseq
);
4227 gimple_seq_add_seq (dlist
, tseq
);
4231 case OMP_CLAUSE_LINEAR
:
4232 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4233 goto do_firstprivate
;
4234 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4237 x
= build_outer_var_ref (var
, ctx
);
4240 case OMP_CLAUSE_FIRSTPRIVATE
:
4241 if (is_task_ctx (ctx
))
4243 if (omp_is_reference (var
) || is_variable_sized (var
))
4245 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4247 || use_pointer_for_field (var
, NULL
))
4249 x
= build_receiver_ref (var
, false, ctx
);
4250 SET_DECL_VALUE_EXPR (new_var
, x
);
4251 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4256 x
= build_outer_var_ref (var
, ctx
);
4259 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4260 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4262 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4263 tree stept
= TREE_TYPE (t
);
4264 tree ct
= omp_find_clause (clauses
,
4265 OMP_CLAUSE__LOOPTEMP_
);
4267 tree l
= OMP_CLAUSE_DECL (ct
);
4268 tree n1
= fd
->loop
.n1
;
4269 tree step
= fd
->loop
.step
;
4270 tree itype
= TREE_TYPE (l
);
4271 if (POINTER_TYPE_P (itype
))
4272 itype
= signed_type_for (itype
);
4273 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4274 if (TYPE_UNSIGNED (itype
)
4275 && fd
->loop
.cond_code
== GT_EXPR
)
4276 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4277 fold_build1 (NEGATE_EXPR
, itype
, l
),
4278 fold_build1 (NEGATE_EXPR
,
4281 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4282 t
= fold_build2 (MULT_EXPR
, stept
,
4283 fold_convert (stept
, l
), t
);
4285 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4287 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4289 gimplify_and_add (x
, ilist
);
4293 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4294 x
= fold_build2 (POINTER_PLUS_EXPR
,
4295 TREE_TYPE (x
), x
, t
);
4297 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4300 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4301 || TREE_ADDRESSABLE (new_var
))
4302 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4305 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4307 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4308 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4309 gimplify_and_add (x
, ilist
);
4310 gimple_stmt_iterator gsi
4311 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4313 = gimple_build_assign (unshare_expr (lvar
), iv
);
4314 gsi_insert_before_without_update (&gsi
, g
,
4316 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4317 enum tree_code code
= PLUS_EXPR
;
4318 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4319 code
= POINTER_PLUS_EXPR
;
4320 g
= gimple_build_assign (iv
, code
, iv
, t
);
4321 gsi_insert_before_without_update (&gsi
, g
,
4325 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4326 (c
, unshare_expr (ivar
), x
);
4327 gimplify_and_add (x
, &llist
[0]);
4328 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4331 gimple_seq tseq
= NULL
;
4334 gimplify_stmt (&dtor
, &tseq
);
4335 gimple_seq_add_seq (&llist
[1], tseq
);
4340 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4341 (c
, unshare_expr (new_var
), x
);
4342 gimplify_and_add (x
, ilist
);
4345 case OMP_CLAUSE__LOOPTEMP_
:
4346 gcc_assert (is_taskreg_ctx (ctx
));
4347 x
= build_outer_var_ref (var
, ctx
);
4348 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4349 gimplify_and_add (x
, ilist
);
4352 case OMP_CLAUSE_COPYIN
:
4353 by_ref
= use_pointer_for_field (var
, NULL
);
4354 x
= build_receiver_ref (var
, by_ref
, ctx
);
4355 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4356 append_to_statement_list (x
, ©in_seq
);
4357 copyin_by_ref
|= by_ref
;
4360 case OMP_CLAUSE_REDUCTION
:
4361 /* OpenACC reductions are initialized using the
4362 GOACC_REDUCTION internal function. */
4363 if (is_gimple_omp_oacc (ctx
->stmt
))
4365 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4367 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4369 x
= build_outer_var_ref (var
, ctx
);
4371 if (omp_is_reference (var
)
4372 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
4374 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4375 SET_DECL_VALUE_EXPR (placeholder
, x
);
4376 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4377 tree new_vard
= new_var
;
4378 if (omp_is_reference (var
))
4380 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4381 new_vard
= TREE_OPERAND (new_var
, 0);
4382 gcc_assert (DECL_P (new_vard
));
4385 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4388 if (new_vard
== new_var
)
4390 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4391 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4395 SET_DECL_VALUE_EXPR (new_vard
,
4396 build_fold_addr_expr (ivar
));
4397 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4399 x
= lang_hooks
.decls
.omp_clause_default_ctor
4400 (c
, unshare_expr (ivar
),
4401 build_outer_var_ref (var
, ctx
));
4403 gimplify_and_add (x
, &llist
[0]);
4404 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4406 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4407 lower_omp (&tseq
, ctx
);
4408 gimple_seq_add_seq (&llist
[0], tseq
);
4410 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4411 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4412 lower_omp (&tseq
, ctx
);
4413 gimple_seq_add_seq (&llist
[1], tseq
);
4414 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4415 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4416 if (new_vard
== new_var
)
4417 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4419 SET_DECL_VALUE_EXPR (new_vard
,
4420 build_fold_addr_expr (lvar
));
4421 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4426 gimplify_stmt (&dtor
, &tseq
);
4427 gimple_seq_add_seq (&llist
[1], tseq
);
4431 /* If this is a reference to constant size reduction var
4432 with placeholder, we haven't emitted the initializer
4433 for it because it is undesirable if SIMD arrays are used.
4434 But if they aren't used, we need to emit the deferred
4435 initialization now. */
4436 else if (omp_is_reference (var
) && is_simd
)
4437 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4438 x
= lang_hooks
.decls
.omp_clause_default_ctor
4439 (c
, unshare_expr (new_var
),
4440 build_outer_var_ref (var
, ctx
));
4442 gimplify_and_add (x
, ilist
);
4443 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4445 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4446 lower_omp (&tseq
, ctx
);
4447 gimple_seq_add_seq (ilist
, tseq
);
4449 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4452 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4453 lower_omp (&tseq
, ctx
);
4454 gimple_seq_add_seq (dlist
, tseq
);
4455 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4457 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4462 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
4463 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
4464 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4466 /* reduction(-:var) sums up the partial results, so it
4467 acts identically to reduction(+:var). */
4468 if (code
== MINUS_EXPR
)
4471 tree new_vard
= new_var
;
4472 if (is_simd
&& omp_is_reference (var
))
4474 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4475 new_vard
= TREE_OPERAND (new_var
, 0);
4476 gcc_assert (DECL_P (new_vard
));
4479 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4482 tree ref
= build_outer_var_ref (var
, ctx
);
4484 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
4489 simt_lane
= create_tmp_var (unsigned_type_node
);
4490 x
= build_call_expr_internal_loc
4491 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
4492 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
4493 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
4494 gimplify_assign (ivar
, x
, &llist
[2]);
4496 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
4497 ref
= build_outer_var_ref (var
, ctx
);
4498 gimplify_assign (ref
, x
, &llist
[1]);
4500 if (new_vard
!= new_var
)
4502 SET_DECL_VALUE_EXPR (new_vard
,
4503 build_fold_addr_expr (lvar
));
4504 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4509 if (omp_is_reference (var
) && is_simd
)
4510 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4511 gimplify_assign (new_var
, x
, ilist
);
4514 tree ref
= build_outer_var_ref (var
, ctx
);
4516 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
4517 ref
= build_outer_var_ref (var
, ctx
);
4518 gimplify_assign (ref
, x
, dlist
);
4530 if (sctx
.max_vf
== 1)
4531 sctx
.is_simt
= false;
4533 if (sctx
.lane
|| sctx
.is_simt
)
4535 uid
= create_tmp_var (ptr_type_node
, "simduid");
4536 /* Don't want uninit warnings on simduid, it is always uninitialized,
4537 but we use it not for the value, but for the DECL_UID only. */
4538 TREE_NO_WARNING (uid
) = 1;
4539 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
4540 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
4541 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4542 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4544 /* Emit calls denoting privatized variables and initializing a pointer to
4545 structure that holds private variables as fields after ompdevlow pass. */
4548 sctx
.simt_eargs
[0] = uid
;
4550 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
4551 gimple_call_set_lhs (g
, uid
);
4552 gimple_seq_add_stmt (ilist
, g
);
4553 sctx
.simt_eargs
.release ();
4555 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
4556 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
4557 gimple_call_set_lhs (g
, simtrec
);
4558 gimple_seq_add_stmt (ilist
, g
);
4563 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
4564 gimple_call_set_lhs (g
, sctx
.lane
);
4565 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4566 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
4567 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
4568 build_int_cst (unsigned_type_node
, 0));
4569 gimple_seq_add_stmt (ilist
, g
);
4570 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4573 tree simt_vf
= create_tmp_var (unsigned_type_node
);
4574 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
4575 gimple_call_set_lhs (g
, simt_vf
);
4576 gimple_seq_add_stmt (dlist
, g
);
4578 tree t
= build_int_cst (unsigned_type_node
, 1);
4579 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
4580 gimple_seq_add_stmt (dlist
, g
);
4582 t
= build_int_cst (unsigned_type_node
, 0);
4583 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4584 gimple_seq_add_stmt (dlist
, g
);
4586 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4587 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4588 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4589 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
4590 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
4592 gimple_seq_add_seq (dlist
, llist
[2]);
4594 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
4595 gimple_seq_add_stmt (dlist
, g
);
4597 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
4598 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
4599 gimple_seq_add_stmt (dlist
, g
);
4601 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
4603 for (int i
= 0; i
< 2; i
++)
4606 tree vf
= create_tmp_var (unsigned_type_node
);
4607 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
4608 gimple_call_set_lhs (g
, vf
);
4609 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
4610 gimple_seq_add_stmt (seq
, g
);
4611 tree t
= build_int_cst (unsigned_type_node
, 0);
4612 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4613 gimple_seq_add_stmt (seq
, g
);
4614 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4615 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4616 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4617 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
4618 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
4619 gimple_seq_add_seq (seq
, llist
[i
]);
4620 t
= build_int_cst (unsigned_type_node
, 1);
4621 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
4622 gimple_seq_add_stmt (seq
, g
);
4623 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
4624 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
4625 gimple_seq_add_stmt (seq
, g
);
4626 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
4631 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
4633 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
4634 gimple_seq_add_stmt (dlist
, g
);
4637 /* The copyin sequence is not to be executed by the main thread, since
4638 that would result in self-copies. Perhaps not visible to scalars,
4639 but it certainly is to C++ operator=. */
4642 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
4644 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
4645 build_int_cst (TREE_TYPE (x
), 0));
4646 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
4647 gimplify_and_add (x
, ilist
);
4650 /* If any copyin variable is passed by reference, we must ensure the
4651 master thread doesn't modify it before it is copied over in all
4652 threads. Similarly for variables in both firstprivate and
4653 lastprivate clauses we need to ensure the lastprivate copying
4654 happens after firstprivate copying in all threads. And similarly
4655 for UDRs if initializer expression refers to omp_orig. */
4656 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
4658 /* Don't add any barrier for #pragma omp simd or
4659 #pragma omp distribute. */
4660 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
4661 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
)
4662 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
4665 /* If max_vf is non-zero, then we can use only a vectorization factor
4666 up to the max_vf we chose. So stick it into the safelen clause. */
4669 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4670 OMP_CLAUSE_SAFELEN
);
4672 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c
)) == INTEGER_CST
4673 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c
),
4676 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
4677 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
4679 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4680 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4686 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4687 both parallel and workshare constructs. PREDICATE may be NULL if it's
4691 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
4694 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
4695 bool par_clauses
= false;
4696 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
4698 /* Early exit if there are no lastprivate or linear clauses. */
4699 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
4700 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
4701 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
4702 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
4704 if (clauses
== NULL
)
4706 /* If this was a workshare clause, see if it had been combined
4707 with its parallel. In that case, look for the clauses on the
4708 parallel statement itself. */
4709 if (is_parallel_ctx (ctx
))
4713 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4716 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4717 OMP_CLAUSE_LASTPRIVATE
);
4718 if (clauses
== NULL
)
4723 bool maybe_simt
= false;
4724 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4725 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
4727 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
4728 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
4730 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
4736 tree label_true
, arm1
, arm2
;
4737 enum tree_code pred_code
= TREE_CODE (predicate
);
4739 label
= create_artificial_label (UNKNOWN_LOCATION
);
4740 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4741 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
4743 arm1
= TREE_OPERAND (predicate
, 0);
4744 arm2
= TREE_OPERAND (predicate
, 1);
4745 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4746 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4751 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4752 arm2
= boolean_false_node
;
4753 pred_code
= NE_EXPR
;
4757 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
4758 c
= fold_convert (integer_type_node
, c
);
4759 simtcond
= create_tmp_var (integer_type_node
);
4760 gimplify_assign (simtcond
, c
, stmt_list
);
4761 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
4763 c
= create_tmp_var (integer_type_node
);
4764 gimple_call_set_lhs (g
, c
);
4765 gimple_seq_add_stmt (stmt_list
, g
);
4766 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
4770 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
4771 gimple_seq_add_stmt (stmt_list
, stmt
);
4772 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
4775 for (c
= clauses
; c
;)
4778 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4780 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4781 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4782 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
4784 var
= OMP_CLAUSE_DECL (c
);
4785 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4786 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
4787 && is_taskloop_ctx (ctx
))
4789 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
4790 new_var
= lookup_decl (var
, ctx
->outer
);
4794 new_var
= lookup_decl (var
, ctx
);
4795 /* Avoid uninitialized warnings for lastprivate and
4796 for linear iterators. */
4798 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4799 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
4800 TREE_NO_WARNING (new_var
) = 1;
4803 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
4805 tree val
= DECL_VALUE_EXPR (new_var
);
4806 if (TREE_CODE (val
) == ARRAY_REF
4807 && VAR_P (TREE_OPERAND (val
, 0))
4808 && lookup_attribute ("omp simd array",
4809 DECL_ATTRIBUTES (TREE_OPERAND (val
,
4812 if (lastlane
== NULL
)
4814 lastlane
= create_tmp_var (unsigned_type_node
);
4816 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
4818 TREE_OPERAND (val
, 1));
4819 gimple_call_set_lhs (g
, lastlane
);
4820 gimple_seq_add_stmt (stmt_list
, g
);
4822 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
4823 TREE_OPERAND (val
, 0), lastlane
,
4824 NULL_TREE
, NULL_TREE
);
4827 else if (maybe_simt
)
4829 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
4830 ? DECL_VALUE_EXPR (new_var
)
4832 if (simtlast
== NULL
)
4834 simtlast
= create_tmp_var (unsigned_type_node
);
4835 gcall
*g
= gimple_build_call_internal
4836 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
4837 gimple_call_set_lhs (g
, simtlast
);
4838 gimple_seq_add_stmt (stmt_list
, g
);
4840 x
= build_call_expr_internal_loc
4841 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
4842 TREE_TYPE (val
), 2, val
, simtlast
);
4843 new_var
= unshare_expr (new_var
);
4844 gimplify_assign (new_var
, x
, stmt_list
);
4845 new_var
= unshare_expr (new_var
);
4848 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4849 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
4851 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
4852 gimple_seq_add_seq (stmt_list
,
4853 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
4854 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
4856 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4857 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
4859 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
4860 gimple_seq_add_seq (stmt_list
,
4861 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
4862 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
4866 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4867 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
4869 gcc_checking_assert (is_taskloop_ctx (ctx
));
4870 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
4872 if (is_global_var (ovar
))
4876 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
4877 if (omp_is_reference (var
))
4878 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4879 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
4880 gimplify_and_add (x
, stmt_list
);
4882 c
= OMP_CLAUSE_CHAIN (c
);
4883 if (c
== NULL
&& !par_clauses
)
4885 /* If this was a workshare clause, see if it had been combined
4886 with its parallel. In that case, continue looking for the
4887 clauses also on the parallel statement itself. */
4888 if (is_parallel_ctx (ctx
))
4892 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4895 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4896 OMP_CLAUSE_LASTPRIVATE
);
4902 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
4905 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4906 (which might be a placeholder). INNER is true if this is an inner
4907 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4908 join markers. Generate the before-loop forking sequence in
4909 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4910 general form of these sequences is
4912 GOACC_REDUCTION_SETUP
4914 GOACC_REDUCTION_INIT
4916 GOACC_REDUCTION_FINI
4918 GOACC_REDUCTION_TEARDOWN. */
4921 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
4922 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
4923 gimple_seq
*join_seq
, omp_context
*ctx
)
4925 gimple_seq before_fork
= NULL
;
4926 gimple_seq after_fork
= NULL
;
4927 gimple_seq before_join
= NULL
;
4928 gimple_seq after_join
= NULL
;
4929 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
4930 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
4931 unsigned offset
= 0;
4933 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4934 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4936 tree orig
= OMP_CLAUSE_DECL (c
);
4937 tree var
= maybe_lookup_decl (orig
, ctx
);
4938 tree ref_to_res
= NULL_TREE
;
4939 tree incoming
, outgoing
, v1
, v2
, v3
;
4940 bool is_private
= false;
4942 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
4943 if (rcode
== MINUS_EXPR
)
4945 else if (rcode
== TRUTH_ANDIF_EXPR
)
4946 rcode
= BIT_AND_EXPR
;
4947 else if (rcode
== TRUTH_ORIF_EXPR
)
4948 rcode
= BIT_IOR_EXPR
;
4949 tree op
= build_int_cst (unsigned_type_node
, rcode
);
4954 incoming
= outgoing
= var
;
4958 /* See if an outer construct also reduces this variable. */
4959 omp_context
*outer
= ctx
;
4961 while (omp_context
*probe
= outer
->outer
)
4963 enum gimple_code type
= gimple_code (probe
->stmt
);
4968 case GIMPLE_OMP_FOR
:
4969 cls
= gimple_omp_for_clauses (probe
->stmt
);
4972 case GIMPLE_OMP_TARGET
:
4973 if (gimple_omp_target_kind (probe
->stmt
)
4974 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
4977 cls
= gimple_omp_target_clauses (probe
->stmt
);
4985 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
4986 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
4987 && orig
== OMP_CLAUSE_DECL (cls
))
4989 incoming
= outgoing
= lookup_decl (orig
, probe
);
4990 goto has_outer_reduction
;
4992 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
4993 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
4994 && orig
== OMP_CLAUSE_DECL (cls
))
5002 /* This is the outermost construct with this reduction,
5003 see if there's a mapping for it. */
5004 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
5005 && maybe_lookup_field (orig
, outer
) && !is_private
)
5007 ref_to_res
= build_receiver_ref (orig
, false, outer
);
5008 if (omp_is_reference (orig
))
5009 ref_to_res
= build_simple_mem_ref (ref_to_res
);
5011 tree type
= TREE_TYPE (var
);
5012 if (POINTER_TYPE_P (type
))
5013 type
= TREE_TYPE (type
);
5016 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
5020 /* Try to look at enclosing contexts for reduction var,
5021 use original if no mapping found. */
5023 omp_context
*c
= ctx
->outer
;
5026 t
= maybe_lookup_decl (orig
, c
);
5029 incoming
= outgoing
= (t
? t
: orig
);
5032 has_outer_reduction
:;
5036 ref_to_res
= integer_zero_node
;
5038 if (omp_is_reference (orig
))
5040 tree type
= TREE_TYPE (var
);
5041 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
5045 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
5046 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
5049 v1
= create_tmp_var (type
, id
);
5050 v2
= create_tmp_var (type
, id
);
5051 v3
= create_tmp_var (type
, id
);
5053 gimplify_assign (v1
, var
, fork_seq
);
5054 gimplify_assign (v2
, var
, fork_seq
);
5055 gimplify_assign (v3
, var
, fork_seq
);
5057 var
= build_simple_mem_ref (var
);
5058 v1
= build_simple_mem_ref (v1
);
5059 v2
= build_simple_mem_ref (v2
);
5060 v3
= build_simple_mem_ref (v3
);
5061 outgoing
= build_simple_mem_ref (outgoing
);
5063 if (!TREE_CONSTANT (incoming
))
5064 incoming
= build_simple_mem_ref (incoming
);
5069 /* Determine position in reduction buffer, which may be used
5071 machine_mode mode
= TYPE_MODE (TREE_TYPE (var
));
5072 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
5073 offset
= (offset
+ align
- 1) & ~(align
- 1);
5074 tree off
= build_int_cst (sizetype
, offset
);
5075 offset
+= GET_MODE_SIZE (mode
);
5079 init_code
= build_int_cst (integer_type_node
,
5080 IFN_GOACC_REDUCTION_INIT
);
5081 fini_code
= build_int_cst (integer_type_node
,
5082 IFN_GOACC_REDUCTION_FINI
);
5083 setup_code
= build_int_cst (integer_type_node
,
5084 IFN_GOACC_REDUCTION_SETUP
);
5085 teardown_code
= build_int_cst (integer_type_node
,
5086 IFN_GOACC_REDUCTION_TEARDOWN
);
5090 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5091 TREE_TYPE (var
), 6, setup_code
,
5092 unshare_expr (ref_to_res
),
5093 incoming
, level
, op
, off
);
5095 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5096 TREE_TYPE (var
), 6, init_code
,
5097 unshare_expr (ref_to_res
),
5098 v1
, level
, op
, off
);
5100 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5101 TREE_TYPE (var
), 6, fini_code
,
5102 unshare_expr (ref_to_res
),
5103 v2
, level
, op
, off
);
5105 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5106 TREE_TYPE (var
), 6, teardown_code
,
5107 ref_to_res
, v3
, level
, op
, off
);
5109 gimplify_assign (v1
, setup_call
, &before_fork
);
5110 gimplify_assign (v2
, init_call
, &after_fork
);
5111 gimplify_assign (v3
, fini_call
, &before_join
);
5112 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5115 /* Now stitch things together. */
5116 gimple_seq_add_seq (fork_seq
, before_fork
);
5118 gimple_seq_add_stmt (fork_seq
, fork
);
5119 gimple_seq_add_seq (fork_seq
, after_fork
);
5121 gimple_seq_add_seq (join_seq
, before_join
);
5123 gimple_seq_add_stmt (join_seq
, join
);
5124 gimple_seq_add_seq (join_seq
, after_join
);
5127 /* Generate code to implement the REDUCTION clauses. */
5130 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5132 gimple_seq sub_seq
= NULL
;
5137 /* OpenACC loop reductions are handled elsewhere. */
5138 if (is_gimple_omp_oacc (ctx
->stmt
))
5141 /* SIMD reductions are handled in lower_rec_input_clauses. */
5142 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5143 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5146 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5147 update in that case, otherwise use a lock. */
5148 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5149 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5151 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5152 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5154 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5164 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5166 tree var
, ref
, new_var
, orig_var
;
5167 enum tree_code code
;
5168 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5170 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5173 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
5174 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5175 if (TREE_CODE (var
) == MEM_REF
)
5177 var
= TREE_OPERAND (var
, 0);
5178 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5179 var
= TREE_OPERAND (var
, 0);
5180 if (TREE_CODE (var
) == ADDR_EXPR
)
5181 var
= TREE_OPERAND (var
, 0);
5184 /* If this is a pointer or referenced based array
5185 section, the var could be private in the outer
5186 context e.g. on orphaned loop construct. Pretend this
5187 is private variable's outer reference. */
5188 ccode
= OMP_CLAUSE_PRIVATE
;
5189 if (TREE_CODE (var
) == INDIRECT_REF
)
5190 var
= TREE_OPERAND (var
, 0);
5193 if (is_variable_sized (var
))
5195 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5196 var
= DECL_VALUE_EXPR (var
);
5197 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5198 var
= TREE_OPERAND (var
, 0);
5199 gcc_assert (DECL_P (var
));
5202 new_var
= lookup_decl (var
, ctx
);
5203 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5204 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5205 ref
= build_outer_var_ref (var
, ctx
, ccode
);
5206 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5208 /* reduction(-:var) sums up the partial results, so it acts
5209 identically to reduction(+:var). */
5210 if (code
== MINUS_EXPR
)
5215 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5217 addr
= save_expr (addr
);
5218 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5219 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5220 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5221 gimplify_and_add (x
, stmt_seqp
);
5224 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5226 tree d
= OMP_CLAUSE_DECL (c
);
5227 tree type
= TREE_TYPE (d
);
5228 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5229 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
5230 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5231 tree bias
= TREE_OPERAND (d
, 1);
5232 d
= TREE_OPERAND (d
, 0);
5233 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5235 tree b
= TREE_OPERAND (d
, 1);
5236 b
= maybe_lookup_decl (b
, ctx
);
5239 b
= TREE_OPERAND (d
, 1);
5240 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5242 if (integer_zerop (bias
))
5246 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5247 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5248 TREE_TYPE (b
), b
, bias
);
5250 d
= TREE_OPERAND (d
, 0);
5252 /* For ref build_outer_var_ref already performs this, so
5253 only new_var needs a dereference. */
5254 if (TREE_CODE (d
) == INDIRECT_REF
)
5256 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5257 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5259 else if (TREE_CODE (d
) == ADDR_EXPR
)
5261 if (orig_var
== var
)
5263 new_var
= build_fold_addr_expr (new_var
);
5264 ref
= build_fold_addr_expr (ref
);
5269 gcc_assert (orig_var
== var
);
5270 if (omp_is_reference (var
))
5271 ref
= build_fold_addr_expr (ref
);
5275 tree t
= maybe_lookup_decl (v
, ctx
);
5279 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5280 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5282 if (!integer_zerop (bias
))
5284 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5285 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5286 TREE_TYPE (new_var
), new_var
,
5287 unshare_expr (bias
));
5288 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5289 TREE_TYPE (ref
), ref
, bias
);
5291 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5292 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5293 tree m
= create_tmp_var (ptype
, NULL
);
5294 gimplify_assign (m
, new_var
, stmt_seqp
);
5296 m
= create_tmp_var (ptype
, NULL
);
5297 gimplify_assign (m
, ref
, stmt_seqp
);
5299 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5300 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5301 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5302 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5303 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5304 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5305 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5307 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5308 tree decl_placeholder
5309 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5310 SET_DECL_VALUE_EXPR (placeholder
, out
);
5311 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5312 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5313 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5314 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5315 gimple_seq_add_seq (&sub_seq
,
5316 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5317 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5318 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5319 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5323 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5324 out
= unshare_expr (out
);
5325 gimplify_assign (out
, x
, &sub_seq
);
5327 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5328 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5329 gimple_seq_add_stmt (&sub_seq
, g
);
5330 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5331 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5332 gimple_seq_add_stmt (&sub_seq
, g
);
5333 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5334 build_int_cst (TREE_TYPE (i
), 1));
5335 gimple_seq_add_stmt (&sub_seq
, g
);
5336 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5337 gimple_seq_add_stmt (&sub_seq
, g
);
5338 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5340 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5342 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5344 if (omp_is_reference (var
)
5345 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
5347 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
5348 SET_DECL_VALUE_EXPR (placeholder
, ref
);
5349 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5350 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5351 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5352 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5353 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5357 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5358 ref
= build_outer_var_ref (var
, ctx
);
5359 gimplify_assign (ref
, x
, &sub_seq
);
5363 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
5365 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5367 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
5369 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
5371 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5375 /* Generate code to implement the COPYPRIVATE clauses. */
5378 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
5383 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5385 tree var
, new_var
, ref
, x
;
5387 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5389 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
5392 var
= OMP_CLAUSE_DECL (c
);
5393 by_ref
= use_pointer_for_field (var
, NULL
);
5395 ref
= build_sender_ref (var
, ctx
);
5396 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
5399 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
5400 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
5402 gimplify_assign (ref
, x
, slist
);
5404 ref
= build_receiver_ref (var
, false, ctx
);
5407 ref
= fold_convert_loc (clause_loc
,
5408 build_pointer_type (TREE_TYPE (new_var
)),
5410 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
5412 if (omp_is_reference (var
))
5414 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
5415 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
5416 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5418 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
5419 gimplify_and_add (x
, rlist
);
5424 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5425 and REDUCTION from the sender (aka parent) side. */
5428 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
5432 int ignored_looptemp
= 0;
5433 bool is_taskloop
= false;
5435 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5436 by GOMP_taskloop. */
5437 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
5439 ignored_looptemp
= 2;
5443 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5445 tree val
, ref
, x
, var
;
5446 bool by_ref
, do_in
= false, do_out
= false;
5447 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5449 switch (OMP_CLAUSE_CODE (c
))
5451 case OMP_CLAUSE_PRIVATE
:
5452 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5455 case OMP_CLAUSE_FIRSTPRIVATE
:
5456 case OMP_CLAUSE_COPYIN
:
5457 case OMP_CLAUSE_LASTPRIVATE
:
5458 case OMP_CLAUSE_REDUCTION
:
5460 case OMP_CLAUSE_SHARED
:
5461 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5464 case OMP_CLAUSE__LOOPTEMP_
:
5465 if (ignored_looptemp
)
5475 val
= OMP_CLAUSE_DECL (c
);
5476 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5477 && TREE_CODE (val
) == MEM_REF
)
5479 val
= TREE_OPERAND (val
, 0);
5480 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
5481 val
= TREE_OPERAND (val
, 0);
5482 if (TREE_CODE (val
) == INDIRECT_REF
5483 || TREE_CODE (val
) == ADDR_EXPR
)
5484 val
= TREE_OPERAND (val
, 0);
5485 if (is_variable_sized (val
))
5489 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5490 outer taskloop region. */
5491 omp_context
*ctx_for_o
= ctx
;
5493 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
5494 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5495 ctx_for_o
= ctx
->outer
;
5497 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
5499 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
5500 && is_global_var (var
))
5503 t
= omp_member_access_dummy_var (var
);
5506 var
= DECL_VALUE_EXPR (var
);
5507 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
5509 var
= unshare_and_remap (var
, t
, o
);
5511 var
= unshare_expr (var
);
5514 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
5516 /* Handle taskloop firstprivate/lastprivate, where the
5517 lastprivate on GIMPLE_OMP_TASK is represented as
5518 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5519 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
5520 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
5521 if (use_pointer_for_field (val
, ctx
))
5522 var
= build_fold_addr_expr (var
);
5523 gimplify_assign (x
, var
, ilist
);
5524 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
5528 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5529 || val
== OMP_CLAUSE_DECL (c
))
5530 && is_variable_sized (val
))
5532 by_ref
= use_pointer_for_field (val
, NULL
);
5534 switch (OMP_CLAUSE_CODE (c
))
5536 case OMP_CLAUSE_FIRSTPRIVATE
:
5537 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
5539 && is_task_ctx (ctx
))
5540 TREE_NO_WARNING (var
) = 1;
5544 case OMP_CLAUSE_PRIVATE
:
5545 case OMP_CLAUSE_COPYIN
:
5546 case OMP_CLAUSE__LOOPTEMP_
:
5550 case OMP_CLAUSE_LASTPRIVATE
:
5551 if (by_ref
|| omp_is_reference (val
))
5553 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5560 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
5565 case OMP_CLAUSE_REDUCTION
:
5567 if (val
== OMP_CLAUSE_DECL (c
))
5568 do_out
= !(by_ref
|| omp_is_reference (val
));
5570 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
5579 ref
= build_sender_ref (val
, ctx
);
5580 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
5581 gimplify_assign (ref
, x
, ilist
);
5582 if (is_task_ctx (ctx
))
5583 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
5588 ref
= build_sender_ref (val
, ctx
);
5589 gimplify_assign (var
, ref
, olist
);
5594 /* Generate code to implement SHARED from the sender (aka parent)
5595 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5596 list things that got automatically shared. */
5599 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
5601 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
5603 if (ctx
->record_type
== NULL
)
5606 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
5607 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
5609 ovar
= DECL_ABSTRACT_ORIGIN (f
);
5610 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
5613 nvar
= maybe_lookup_decl (ovar
, ctx
);
5614 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
5617 /* If CTX is a nested parallel directive. Find the immediately
5618 enclosing parallel or workshare construct that contains a
5619 mapping for OVAR. */
5620 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
5622 t
= omp_member_access_dummy_var (var
);
5625 var
= DECL_VALUE_EXPR (var
);
5626 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
5628 var
= unshare_and_remap (var
, t
, o
);
5630 var
= unshare_expr (var
);
5633 if (use_pointer_for_field (ovar
, ctx
))
5635 x
= build_sender_ref (ovar
, ctx
);
5636 var
= build_fold_addr_expr (var
);
5637 gimplify_assign (x
, var
, ilist
);
5641 x
= build_sender_ref (ovar
, ctx
);
5642 gimplify_assign (x
, var
, ilist
);
5644 if (!TREE_READONLY (var
)
5645 /* We don't need to receive a new reference to a result
5646 or parm decl. In fact we may not store to it as we will
5647 invalidate any pending RSO and generate wrong gimple
5649 && !((TREE_CODE (var
) == RESULT_DECL
5650 || TREE_CODE (var
) == PARM_DECL
)
5651 && DECL_BY_REFERENCE (var
)))
5653 x
= build_sender_ref (ovar
, ctx
);
5654 gimplify_assign (var
, x
, olist
);
5660 /* Emit an OpenACC head marker call, encapulating the partitioning and
5661 other information that must be processed by the target compiler.
5662 Return the maximum number of dimensions the associated loop might
5663 be partitioned over. */
5666 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
5667 gimple_seq
*seq
, omp_context
*ctx
)
5669 unsigned levels
= 0;
5671 tree gang_static
= NULL_TREE
;
5672 auto_vec
<tree
, 5> args
;
5674 args
.quick_push (build_int_cst
5675 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
5676 args
.quick_push (ddvar
);
5677 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5679 switch (OMP_CLAUSE_CODE (c
))
5681 case OMP_CLAUSE_GANG
:
5682 tag
|= OLF_DIM_GANG
;
5683 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
5684 /* static:* is represented by -1, and we can ignore it, as
5685 scheduling is always static. */
5686 if (gang_static
&& integer_minus_onep (gang_static
))
5687 gang_static
= NULL_TREE
;
5691 case OMP_CLAUSE_WORKER
:
5692 tag
|= OLF_DIM_WORKER
;
5696 case OMP_CLAUSE_VECTOR
:
5697 tag
|= OLF_DIM_VECTOR
;
5701 case OMP_CLAUSE_SEQ
:
5705 case OMP_CLAUSE_AUTO
:
5709 case OMP_CLAUSE_INDEPENDENT
:
5710 tag
|= OLF_INDEPENDENT
;
5713 case OMP_CLAUSE_TILE
:
5724 if (DECL_P (gang_static
))
5725 gang_static
= build_outer_var_ref (gang_static
, ctx
);
5726 tag
|= OLF_GANG_STATIC
;
5729 /* In a parallel region, loops are implicitly INDEPENDENT. */
5730 omp_context
*tgt
= enclosing_target_ctx (ctx
);
5731 if (!tgt
|| is_oacc_parallel (tgt
))
5732 tag
|= OLF_INDEPENDENT
;
5735 /* Tiling could use all 3 levels. */
5739 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5740 Ensure at least one level, or 2 for possible auto
5742 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
5743 << OLF_DIM_BASE
) | OLF_SEQ
));
5745 if (levels
< 1u + maybe_auto
)
5746 levels
= 1u + maybe_auto
;
5749 args
.quick_push (build_int_cst (integer_type_node
, levels
));
5750 args
.quick_push (build_int_cst (integer_type_node
, tag
));
5752 args
.quick_push (gang_static
);
5754 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
5755 gimple_set_location (call
, loc
);
5756 gimple_set_lhs (call
, ddvar
);
5757 gimple_seq_add_stmt (seq
, call
);
5762 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5763 partitioning level of the enclosed region. */
5766 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
5767 tree tofollow
, gimple_seq
*seq
)
5769 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
5770 : IFN_UNIQUE_OACC_TAIL_MARK
);
5771 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
5772 int nargs
= 2 + (tofollow
!= NULL_TREE
);
5773 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
5774 marker
, ddvar
, tofollow
);
5775 gimple_set_location (call
, loc
);
5776 gimple_set_lhs (call
, ddvar
);
5777 gimple_seq_add_stmt (seq
, call
);
5780 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5781 the loop clauses, from which we extract reductions. Initialize
5785 lower_oacc_head_tail (location_t loc
, tree clauses
,
5786 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
5789 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
5790 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
5792 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
5793 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
5794 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
5797 for (unsigned done
= 1; count
; count
--, done
++)
5799 gimple_seq fork_seq
= NULL
;
5800 gimple_seq join_seq
= NULL
;
5802 tree place
= build_int_cst (integer_type_node
, -1);
5803 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5804 fork_kind
, ddvar
, place
);
5805 gimple_set_location (fork
, loc
);
5806 gimple_set_lhs (fork
, ddvar
);
5808 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5809 join_kind
, ddvar
, place
);
5810 gimple_set_location (join
, loc
);
5811 gimple_set_lhs (join
, ddvar
);
5813 /* Mark the beginning of this level sequence. */
5815 lower_oacc_loop_marker (loc
, ddvar
, true,
5816 build_int_cst (integer_type_node
, count
),
5818 lower_oacc_loop_marker (loc
, ddvar
, false,
5819 build_int_cst (integer_type_node
, done
),
5822 lower_oacc_reductions (loc
, clauses
, place
, inner
,
5823 fork
, join
, &fork_seq
, &join_seq
, ctx
);
5825 /* Append this level to head. */
5826 gimple_seq_add_seq (head
, fork_seq
);
5827 /* Prepend it to tail. */
5828 gimple_seq_add_seq (&join_seq
, *tail
);
5834 /* Mark the end of the sequence. */
5835 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
5836 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
5839 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5840 catch handler and return it. This prevents programs from violating the
5841 structured block semantics with throws. */
5844 maybe_catch_exception (gimple_seq body
)
5849 if (!flag_exceptions
)
5852 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
5853 decl
= lang_hooks
.eh_protect_cleanup_actions ();
5855 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
5857 g
= gimple_build_eh_must_not_throw (decl
);
5858 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
5861 return gimple_seq_alloc_with_stmt (g
);
5865 /* Routines to lower OMP directives into OMP-GIMPLE. */
5867 /* If ctx is a worksharing context inside of a cancellable parallel
5868 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5869 and conditional branch to parallel's cancel_label to handle
5870 cancellation in the implicit barrier. */
5873 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple_seq
*body
)
5875 gimple
*omp_return
= gimple_seq_last_stmt (*body
);
5876 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
5877 if (gimple_omp_return_nowait_p (omp_return
))
5880 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_PARALLEL
5881 && ctx
->outer
->cancellable
)
5883 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5884 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
5885 tree lhs
= create_tmp_var (c_bool_type
);
5886 gimple_omp_return_set_lhs (omp_return
, lhs
);
5887 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5888 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
5889 fold_convert (c_bool_type
,
5890 boolean_false_node
),
5891 ctx
->outer
->cancel_label
, fallthru_label
);
5892 gimple_seq_add_stmt (body
, g
);
5893 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
5897 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5898 CTX is the enclosing OMP context for the current statement. */
5901 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
5903 tree block
, control
;
5904 gimple_stmt_iterator tgsi
;
5905 gomp_sections
*stmt
;
5907 gbind
*new_stmt
, *bind
;
5908 gimple_seq ilist
, dlist
, olist
, new_body
;
5910 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
5912 push_gimplify_context ();
5916 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
5917 &ilist
, &dlist
, ctx
, NULL
);
5919 new_body
= gimple_omp_body (stmt
);
5920 gimple_omp_set_body (stmt
, NULL
);
5921 tgsi
= gsi_start (new_body
);
5922 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
5927 sec_start
= gsi_stmt (tgsi
);
5928 sctx
= maybe_lookup_ctx (sec_start
);
5931 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
5932 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
5933 GSI_CONTINUE_LINKING
);
5934 gimple_omp_set_body (sec_start
, NULL
);
5936 if (gsi_one_before_end_p (tgsi
))
5938 gimple_seq l
= NULL
;
5939 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
5941 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
5942 gimple_omp_section_set_last (sec_start
);
5945 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
5946 GSI_CONTINUE_LINKING
);
5949 block
= make_node (BLOCK
);
5950 bind
= gimple_build_bind (NULL
, new_body
, block
);
5953 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
5955 block
= make_node (BLOCK
);
5956 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
5957 gsi_replace (gsi_p
, new_stmt
, true);
5959 pop_gimplify_context (new_stmt
);
5960 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
5961 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
5962 if (BLOCK_VARS (block
))
5963 TREE_USED (block
) = 1;
5966 gimple_seq_add_seq (&new_body
, ilist
);
5967 gimple_seq_add_stmt (&new_body
, stmt
);
5968 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
5969 gimple_seq_add_stmt (&new_body
, bind
);
5971 control
= create_tmp_var (unsigned_type_node
, ".section");
5972 t
= gimple_build_omp_continue (control
, control
);
5973 gimple_omp_sections_set_control (stmt
, control
);
5974 gimple_seq_add_stmt (&new_body
, t
);
5976 gimple_seq_add_seq (&new_body
, olist
);
5977 if (ctx
->cancellable
)
5978 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
5979 gimple_seq_add_seq (&new_body
, dlist
);
5981 new_body
= maybe_catch_exception (new_body
);
5983 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
5984 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
5985 t
= gimple_build_omp_return (nowait
);
5986 gimple_seq_add_stmt (&new_body
, t
);
5987 maybe_add_implicit_barrier_cancel (ctx
, &new_body
);
5989 gimple_bind_set_body (new_stmt
, new_body
);
5993 /* A subroutine of lower_omp_single. Expand the simple form of
5994 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5996 if (GOMP_single_start ())
5998 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6000 FIXME. It may be better to delay expanding the logic of this until
6001 pass_expand_omp. The expanded logic may make the job more difficult
6002 to a synchronization analysis pass. */
6005 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
6007 location_t loc
= gimple_location (single_stmt
);
6008 tree tlabel
= create_artificial_label (loc
);
6009 tree flabel
= create_artificial_label (loc
);
6010 gimple
*call
, *cond
;
6013 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
6014 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
6015 call
= gimple_build_call (decl
, 0);
6016 gimple_call_set_lhs (call
, lhs
);
6017 gimple_seq_add_stmt (pre_p
, call
);
6019 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
6020 fold_convert_loc (loc
, TREE_TYPE (lhs
),
6023 gimple_seq_add_stmt (pre_p
, cond
);
6024 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
6025 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6026 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
6030 /* A subroutine of lower_omp_single. Expand the simple form of
6031 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6033 #pragma omp single copyprivate (a, b, c)
6035 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6038 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6044 GOMP_single_copy_end (©out);
6055 FIXME. It may be better to delay expanding the logic of this until
6056 pass_expand_omp. The expanded logic may make the job more difficult
6057 to a synchronization analysis pass. */
6060 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
6063 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
6064 gimple_seq copyin_seq
;
6065 location_t loc
= gimple_location (single_stmt
);
6067 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
6069 ptr_type
= build_pointer_type (ctx
->record_type
);
6070 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
6072 l0
= create_artificial_label (loc
);
6073 l1
= create_artificial_label (loc
);
6074 l2
= create_artificial_label (loc
);
6076 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
6077 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
6078 t
= fold_convert_loc (loc
, ptr_type
, t
);
6079 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
6081 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
6082 build_int_cst (ptr_type
, 0));
6083 t
= build3 (COND_EXPR
, void_type_node
, t
,
6084 build_and_jump (&l0
), build_and_jump (&l1
));
6085 gimplify_and_add (t
, pre_p
);
6087 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
6089 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6092 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
6095 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
6096 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
6097 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
6098 gimplify_and_add (t
, pre_p
);
6100 t
= build_and_jump (&l2
);
6101 gimplify_and_add (t
, pre_p
);
6103 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
6105 gimple_seq_add_seq (pre_p
, copyin_seq
);
6107 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
6111 /* Expand code for an OpenMP single directive. */
6114 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6117 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
6119 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6121 push_gimplify_context ();
6123 block
= make_node (BLOCK
);
6124 bind
= gimple_build_bind (NULL
, NULL
, block
);
6125 gsi_replace (gsi_p
, bind
, true);
6128 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6129 &bind_body
, &dlist
, ctx
, NULL
);
6130 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6132 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6134 if (ctx
->record_type
)
6135 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6137 lower_omp_single_simple (single_stmt
, &bind_body
);
6139 gimple_omp_set_body (single_stmt
, NULL
);
6141 gimple_seq_add_seq (&bind_body
, dlist
);
6143 bind_body
= maybe_catch_exception (bind_body
);
6145 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6146 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6147 gimple
*g
= gimple_build_omp_return (nowait
);
6148 gimple_seq_add_stmt (&bind_body_tail
, g
);
6149 maybe_add_implicit_barrier_cancel (ctx
, &bind_body_tail
);
6150 if (ctx
->record_type
)
6152 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6153 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6154 TREE_THIS_VOLATILE (clobber
) = 1;
6155 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6156 clobber
), GSI_SAME_STMT
);
6158 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6159 gimple_bind_set_body (bind
, bind_body
);
6161 pop_gimplify_context (bind
);
6163 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6164 BLOCK_VARS (block
) = ctx
->block_vars
;
6165 if (BLOCK_VARS (block
))
6166 TREE_USED (block
) = 1;
6170 /* Expand code for an OpenMP master directive. */
6173 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6175 tree block
, lab
= NULL
, x
, bfn_decl
;
6176 gimple
*stmt
= gsi_stmt (*gsi_p
);
6178 location_t loc
= gimple_location (stmt
);
6181 push_gimplify_context ();
6183 block
= make_node (BLOCK
);
6184 bind
= gimple_build_bind (NULL
, NULL
, block
);
6185 gsi_replace (gsi_p
, bind
, true);
6186 gimple_bind_add_stmt (bind
, stmt
);
6188 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6189 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6190 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6191 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6193 gimplify_and_add (x
, &tseq
);
6194 gimple_bind_add_seq (bind
, tseq
);
6196 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6197 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6198 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6199 gimple_omp_set_body (stmt
, NULL
);
6201 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6203 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6205 pop_gimplify_context (bind
);
6207 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6208 BLOCK_VARS (block
) = ctx
->block_vars
;
6212 /* Expand code for an OpenMP taskgroup directive. */
6215 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6217 gimple
*stmt
= gsi_stmt (*gsi_p
);
6220 tree block
= make_node (BLOCK
);
6222 bind
= gimple_build_bind (NULL
, NULL
, block
);
6223 gsi_replace (gsi_p
, bind
, true);
6224 gimple_bind_add_stmt (bind
, stmt
);
6226 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
6228 gimple_bind_add_stmt (bind
, x
);
6230 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6231 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6232 gimple_omp_set_body (stmt
, NULL
);
6234 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6236 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6237 BLOCK_VARS (block
) = ctx
->block_vars
;
6241 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6244 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
6247 struct omp_for_data fd
;
6248 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
6251 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
6252 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
6253 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
6257 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6258 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
6259 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
6260 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
6262 /* Merge depend clauses from multiple adjacent
6263 #pragma omp ordered depend(sink:...) constructs
6264 into one #pragma omp ordered depend(sink:...), so that
6265 we can optimize them together. */
6266 gimple_stmt_iterator gsi
= *gsi_p
;
6268 while (!gsi_end_p (gsi
))
6270 gimple
*stmt
= gsi_stmt (gsi
);
6271 if (is_gimple_debug (stmt
)
6272 || gimple_code (stmt
) == GIMPLE_NOP
)
6277 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
6279 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
6280 c
= gimple_omp_ordered_clauses (ord_stmt2
);
6282 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
6283 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6286 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
6288 gsi_remove (&gsi
, true);
6292 /* Canonicalize sink dependence clauses into one folded clause if
6295 The basic algorithm is to create a sink vector whose first
6296 element is the GCD of all the first elements, and whose remaining
6297 elements are the minimum of the subsequent columns.
6299 We ignore dependence vectors whose first element is zero because
6300 such dependencies are known to be executed by the same thread.
6302 We take into account the direction of the loop, so a minimum
6303 becomes a maximum if the loop is iterating forwards. We also
6304 ignore sink clauses where the loop direction is unknown, or where
6305 the offsets are clearly invalid because they are not a multiple
6306 of the loop increment.
6310 #pragma omp for ordered(2)
6311 for (i=0; i < N; ++i)
6312 for (j=0; j < M; ++j)
6314 #pragma omp ordered \
6315 depend(sink:i-8,j-2) \
6316 depend(sink:i,j-1) \ // Completely ignored because i+0.
6317 depend(sink:i-4,j-3) \
6318 depend(sink:i-6,j-4)
6319 #pragma omp ordered depend(source)
6324 depend(sink:-gcd(8,4,6),-min(2,3,4))
6329 /* FIXME: Computing GCD's where the first element is zero is
6330 non-trivial in the presence of collapsed loops. Do this later. */
6331 if (fd
.collapse
> 1)
6334 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
6336 /* wide_int is not a POD so it must be default-constructed. */
6337 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
6338 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
6340 tree folded_dep
= NULL_TREE
;
6341 /* TRUE if the first dimension's offset is negative. */
6342 bool neg_offset_p
= false;
6344 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6346 while ((c
= *list_p
) != NULL
)
6348 bool remove
= false;
6350 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
6351 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6352 goto next_ordered_clause
;
6355 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
6356 vec
&& TREE_CODE (vec
) == TREE_LIST
;
6357 vec
= TREE_CHAIN (vec
), ++i
)
6359 gcc_assert (i
< len
);
6361 /* omp_extract_for_data has canonicalized the condition. */
6362 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
6363 || fd
.loops
[i
].cond_code
== GT_EXPR
);
6364 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
6365 bool maybe_lexically_later
= true;
6367 /* While the committee makes up its mind, bail if we have any
6368 non-constant steps. */
6369 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
6370 goto lower_omp_ordered_ret
;
6372 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
6373 if (POINTER_TYPE_P (itype
))
6375 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
6376 TYPE_PRECISION (itype
),
6379 /* Ignore invalid offsets that are not multiples of the step. */
6380 if (!wi::multiple_of_p (wi::abs (offset
),
6381 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
6384 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
6385 "ignoring sink clause with offset that is not "
6386 "a multiple of the loop step");
6388 goto next_ordered_clause
;
6391 /* Calculate the first dimension. The first dimension of
6392 the folded dependency vector is the GCD of the first
6393 elements, while ignoring any first elements whose offset
6397 /* Ignore dependence vectors whose first dimension is 0. */
6401 goto next_ordered_clause
;
6405 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
6407 error_at (OMP_CLAUSE_LOCATION (c
),
6408 "first offset must be in opposite direction "
6409 "of loop iterations");
6410 goto lower_omp_ordered_ret
;
6414 neg_offset_p
= forward
;
6415 /* Initialize the first time around. */
6416 if (folded_dep
== NULL_TREE
)
6419 folded_deps
[0] = offset
;
6422 folded_deps
[0] = wi::gcd (folded_deps
[0],
6426 /* Calculate minimum for the remaining dimensions. */
6429 folded_deps
[len
+ i
- 1] = offset
;
6430 if (folded_dep
== c
)
6431 folded_deps
[i
] = offset
;
6432 else if (maybe_lexically_later
6433 && !wi::eq_p (folded_deps
[i
], offset
))
6435 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
6439 for (j
= 1; j
<= i
; j
++)
6440 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
6443 maybe_lexically_later
= false;
6447 gcc_assert (i
== len
);
6451 next_ordered_clause
:
6453 *list_p
= OMP_CLAUSE_CHAIN (c
);
6455 list_p
= &OMP_CLAUSE_CHAIN (c
);
6461 folded_deps
[0] = -folded_deps
[0];
6463 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
6464 if (POINTER_TYPE_P (itype
))
6467 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
6468 = wide_int_to_tree (itype
, folded_deps
[0]);
6469 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
6470 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
6473 lower_omp_ordered_ret
:
6475 /* Ordered without clauses is #pragma omp threads, while we want
6476 a nop instead if we remove all clauses. */
6477 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
6478 gsi_replace (gsi_p
, gimple_build_nop (), true);
6482 /* Expand code for an OpenMP ordered directive. */
6485 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6488 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
6489 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
6492 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6494 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6497 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
6498 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6499 OMP_CLAUSE_THREADS
);
6501 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6504 /* FIXME: This is needs to be moved to the expansion to verify various
6505 conditions only testable on cfg with dominators computed, and also
6506 all the depend clauses to be merged still might need to be available
6507 for the runtime checks. */
6509 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
6513 push_gimplify_context ();
6515 block
= make_node (BLOCK
);
6516 bind
= gimple_build_bind (NULL
, NULL
, block
);
6517 gsi_replace (gsi_p
, bind
, true);
6518 gimple_bind_add_stmt (bind
, stmt
);
6522 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
6523 build_int_cst (NULL_TREE
, threads
));
6524 cfun
->has_simduid_loops
= true;
6527 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
6529 gimple_bind_add_stmt (bind
, x
);
6531 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
6534 counter
= create_tmp_var (integer_type_node
);
6535 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
6536 gimple_call_set_lhs (g
, counter
);
6537 gimple_bind_add_stmt (bind
, g
);
6539 body
= create_artificial_label (UNKNOWN_LOCATION
);
6540 test
= create_artificial_label (UNKNOWN_LOCATION
);
6541 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
6543 tree simt_pred
= create_tmp_var (integer_type_node
);
6544 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
6545 gimple_call_set_lhs (g
, simt_pred
);
6546 gimple_bind_add_stmt (bind
, g
);
6548 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
6549 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
6550 gimple_bind_add_stmt (bind
, g
);
6552 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
6554 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6555 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6556 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6557 gimple_omp_set_body (stmt
, NULL
);
6561 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
6562 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
6563 gimple_bind_add_stmt (bind
, g
);
6565 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
6566 tree nonneg
= create_tmp_var (integer_type_node
);
6567 gimple_seq tseq
= NULL
;
6568 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
6569 gimple_bind_add_seq (bind
, tseq
);
6571 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
6572 gimple_call_set_lhs (g
, nonneg
);
6573 gimple_bind_add_stmt (bind
, g
);
6575 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6576 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
6577 gimple_bind_add_stmt (bind
, g
);
6579 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
6582 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
6583 build_int_cst (NULL_TREE
, threads
));
6585 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
6587 gimple_bind_add_stmt (bind
, x
);
6589 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6591 pop_gimplify_context (bind
);
6593 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6594 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6598 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6599 substitution of a couple of function calls. But in the NAMED case,
6600 requires that languages coordinate a symbol name. It is therefore
6601 best put here in common code. */
6603 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
6606 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6609 tree name
, lock
, unlock
;
6610 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
6612 location_t loc
= gimple_location (stmt
);
6615 name
= gimple_omp_critical_name (stmt
);
6620 if (!critical_name_mutexes
)
6621 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
6623 tree
*n
= critical_name_mutexes
->get (name
);
6628 decl
= create_tmp_var_raw (ptr_type_node
);
6630 new_str
= ACONCAT ((".gomp_critical_user_",
6631 IDENTIFIER_POINTER (name
), NULL
));
6632 DECL_NAME (decl
) = get_identifier (new_str
);
6633 TREE_PUBLIC (decl
) = 1;
6634 TREE_STATIC (decl
) = 1;
6635 DECL_COMMON (decl
) = 1;
6636 DECL_ARTIFICIAL (decl
) = 1;
6637 DECL_IGNORED_P (decl
) = 1;
6639 varpool_node::finalize_decl (decl
);
6641 critical_name_mutexes
->put (name
, decl
);
6646 /* If '#pragma omp critical' is inside offloaded region or
6647 inside function marked as offloadable, the symbol must be
6648 marked as offloadable too. */
6650 if (cgraph_node::get (current_function_decl
)->offloadable
)
6651 varpool_node::get_create (decl
)->offloadable
= 1;
6653 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
6654 if (is_gimple_omp_offloaded (octx
->stmt
))
6656 varpool_node::get_create (decl
)->offloadable
= 1;
6660 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
6661 lock
= build_call_expr_loc (loc
, lock
, 1,
6662 build_fold_addr_expr_loc (loc
, decl
));
6664 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
6665 unlock
= build_call_expr_loc (loc
, unlock
, 1,
6666 build_fold_addr_expr_loc (loc
, decl
));
6670 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
6671 lock
= build_call_expr_loc (loc
, lock
, 0);
6673 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
6674 unlock
= build_call_expr_loc (loc
, unlock
, 0);
6677 push_gimplify_context ();
6679 block
= make_node (BLOCK
);
6680 bind
= gimple_build_bind (NULL
, NULL
, block
);
6681 gsi_replace (gsi_p
, bind
, true);
6682 gimple_bind_add_stmt (bind
, stmt
);
6684 tbody
= gimple_bind_body (bind
);
6685 gimplify_and_add (lock
, &tbody
);
6686 gimple_bind_set_body (bind
, tbody
);
6688 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6689 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6690 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6691 gimple_omp_set_body (stmt
, NULL
);
6693 tbody
= gimple_bind_body (bind
);
6694 gimplify_and_add (unlock
, &tbody
);
6695 gimple_bind_set_body (bind
, tbody
);
6697 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6699 pop_gimplify_context (bind
);
6700 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6701 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6704 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6705 for a lastprivate clause. Given a loop control predicate of (V
6706 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6707 is appended to *DLIST, iterator initialization is appended to
6711 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
6712 gimple_seq
*dlist
, struct omp_context
*ctx
)
6714 tree clauses
, cond
, vinit
;
6715 enum tree_code cond_code
;
6718 cond_code
= fd
->loop
.cond_code
;
6719 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
6721 /* When possible, use a strict equality expression. This can let VRP
6722 type optimizations deduce the value and remove a copy. */
6723 if (tree_fits_shwi_p (fd
->loop
.step
))
6725 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
6726 if (step
== 1 || step
== -1)
6727 cond_code
= EQ_EXPR
;
6730 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
6731 || gimple_omp_for_grid_phony (fd
->for_stmt
))
6732 cond
= omp_grid_lastprivate_predicate (fd
);
6735 tree n2
= fd
->loop
.n2
;
6736 if (fd
->collapse
> 1
6737 && TREE_CODE (n2
) != INTEGER_CST
6738 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
6740 struct omp_context
*taskreg_ctx
= NULL
;
6741 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
6743 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
6744 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
6745 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
6747 if (gimple_omp_for_combined_into_p (gfor
))
6749 gcc_assert (ctx
->outer
->outer
6750 && is_parallel_ctx (ctx
->outer
->outer
));
6751 taskreg_ctx
= ctx
->outer
->outer
;
6755 struct omp_for_data outer_fd
;
6756 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
6757 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
6760 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
6761 taskreg_ctx
= ctx
->outer
->outer
;
6763 else if (is_taskreg_ctx (ctx
->outer
))
6764 taskreg_ctx
= ctx
->outer
;
6768 tree taskreg_clauses
6769 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
6770 tree innerc
= omp_find_clause (taskreg_clauses
,
6771 OMP_CLAUSE__LOOPTEMP_
);
6772 gcc_assert (innerc
);
6773 for (i
= 0; i
< fd
->collapse
; i
++)
6775 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6776 OMP_CLAUSE__LOOPTEMP_
);
6777 gcc_assert (innerc
);
6779 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6780 OMP_CLAUSE__LOOPTEMP_
);
6782 n2
= fold_convert (TREE_TYPE (n2
),
6783 lookup_decl (OMP_CLAUSE_DECL (innerc
),
6787 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
6790 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
6792 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
6793 if (!gimple_seq_empty_p (stmts
))
6795 gimple_seq_add_seq (&stmts
, *dlist
);
6798 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6799 vinit
= fd
->loop
.n1
;
6800 if (cond_code
== EQ_EXPR
6801 && tree_fits_shwi_p (fd
->loop
.n2
)
6802 && ! integer_zerop (fd
->loop
.n2
))
6803 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
6805 vinit
= unshare_expr (vinit
);
6807 /* Initialize the iterator variable, so that threads that don't execute
6808 any iterations don't execute the lastprivate clauses by accident. */
6809 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
6814 /* Lower code for an OMP loop directive. */
6817 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6820 struct omp_for_data fd
, *fdp
= NULL
;
6821 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
6823 gimple_seq omp_for_body
, body
, dlist
;
6824 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
6827 push_gimplify_context ();
6829 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
6831 block
= make_node (BLOCK
);
6832 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6833 /* Replace at gsi right away, so that 'stmt' is no member
6834 of a sequence anymore as we're going to add to a different
6836 gsi_replace (gsi_p
, new_stmt
, true);
6838 /* Move declaration of temporaries in the loop body before we make
6840 omp_for_body
= gimple_omp_body (stmt
);
6841 if (!gimple_seq_empty_p (omp_for_body
)
6842 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
6845 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
6846 tree vars
= gimple_bind_vars (inner_bind
);
6847 gimple_bind_append_vars (new_stmt
, vars
);
6848 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6849 keep them on the inner_bind and it's block. */
6850 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
6851 if (gimple_bind_block (inner_bind
))
6852 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
6855 if (gimple_omp_for_combined_into_p (stmt
))
6857 omp_extract_for_data (stmt
, &fd
, NULL
);
6860 /* We need two temporaries with fd.loop.v type (istart/iend)
6861 and then (fd.collapse - 1) temporaries with the same
6862 type for count2 ... countN-1 vars if not constant. */
6864 tree type
= fd
.iter_type
;
6866 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
6867 count
+= fd
.collapse
- 1;
6869 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
6870 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
6871 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
6876 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
6877 OMP_CLAUSE__LOOPTEMP_
);
6879 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
6880 OMP_CLAUSE__LOOPTEMP_
);
6881 for (i
= 0; i
< count
; i
++)
6886 gcc_assert (outerc
);
6887 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
6888 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
6889 OMP_CLAUSE__LOOPTEMP_
);
6893 /* If there are 2 adjacent SIMD stmts, one with _simt_
6894 clause, another without, make sure they have the same
6895 decls in _looptemp_ clauses, because the outer stmt
6896 they are combined into will look up just one inner_stmt. */
6898 temp
= OMP_CLAUSE_DECL (simtc
);
6900 temp
= create_tmp_var (type
);
6901 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
6903 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
6904 OMP_CLAUSE_DECL (*pc
) = temp
;
6905 pc
= &OMP_CLAUSE_CHAIN (*pc
);
6907 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
6908 OMP_CLAUSE__LOOPTEMP_
);
6913 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6916 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
6918 gimple_seq_add_seq (&body
, gimple_omp_for_pre_body (stmt
));
6920 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6922 /* Lower the header expressions. At this point, we can assume that
6923 the header is of the form:
6925 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6927 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6928 using the .omp_data_s mapping, if needed. */
6929 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
6931 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
6932 if (!is_gimple_min_invariant (*rhs_p
))
6933 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6934 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
6935 recompute_tree_invariant_for_addr_expr (*rhs_p
);
6937 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
6938 if (!is_gimple_min_invariant (*rhs_p
))
6939 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6940 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
6941 recompute_tree_invariant_for_addr_expr (*rhs_p
);
6943 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
6944 if (!is_gimple_min_invariant (*rhs_p
))
6945 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6948 /* Once lowered, extract the bounds and clauses. */
6949 omp_extract_for_data (stmt
, &fd
, NULL
);
6951 if (is_gimple_omp_oacc (ctx
->stmt
)
6952 && !ctx_in_oacc_kernels_region (ctx
))
6953 lower_oacc_head_tail (gimple_location (stmt
),
6954 gimple_omp_for_clauses (stmt
),
6955 &oacc_head
, &oacc_tail
, ctx
);
6957 /* Add OpenACC partitioning and reduction markers just before the loop. */
6959 gimple_seq_add_seq (&body
, oacc_head
);
6961 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
6963 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
6964 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
6965 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6966 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6968 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6969 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
6970 OMP_CLAUSE_LINEAR_STEP (c
)
6971 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
6975 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
6976 && gimple_omp_for_grid_phony (stmt
));
6978 gimple_seq_add_stmt (&body
, stmt
);
6979 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
6982 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
6985 /* After the loop, add exit clauses. */
6986 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
6988 if (ctx
->cancellable
)
6989 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
6991 gimple_seq_add_seq (&body
, dlist
);
6993 body
= maybe_catch_exception (body
);
6997 /* Region exit marker goes at the end of the loop body. */
6998 gimple_seq_add_stmt (&body
, gimple_build_omp_return (fd
.have_nowait
));
6999 maybe_add_implicit_barrier_cancel (ctx
, &body
);
7002 /* Add OpenACC joining and reduction markers just after the loop. */
7004 gimple_seq_add_seq (&body
, oacc_tail
);
7006 pop_gimplify_context (new_stmt
);
7008 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7009 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
7010 if (BLOCK_VARS (block
))
7011 TREE_USED (block
) = 1;
7013 gimple_bind_set_body (new_stmt
, body
);
7014 gimple_omp_set_body (stmt
, NULL
);
7015 gimple_omp_for_set_pre_body (stmt
, NULL
);
7018 /* Callback for walk_stmts. Check if the current statement only contains
7019 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7022 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
7023 bool *handled_ops_p
,
7024 struct walk_stmt_info
*wi
)
7026 int *info
= (int *) wi
->info
;
7027 gimple
*stmt
= gsi_stmt (*gsi_p
);
7029 *handled_ops_p
= true;
7030 switch (gimple_code (stmt
))
7034 case GIMPLE_OMP_FOR
:
7035 case GIMPLE_OMP_SECTIONS
:
7036 *info
= *info
== 0 ? 1 : -1;
7045 struct omp_taskcopy_context
7047 /* This field must be at the beginning, as we do "inheritance": Some
7048 callback functions for tree-inline.c (e.g., omp_copy_decl)
7049 receive a copy_body_data pointer that is up-casted to an
7050 omp_context pointer. */
7056 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
7058 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
7060 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
7061 return create_tmp_var (TREE_TYPE (var
));
7067 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
7069 tree name
, new_fields
= NULL
, type
, f
;
7071 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7072 name
= DECL_NAME (TYPE_NAME (orig_type
));
7073 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
7074 TYPE_DECL
, name
, type
);
7075 TYPE_NAME (type
) = name
;
7077 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
7079 tree new_f
= copy_node (f
);
7080 DECL_CONTEXT (new_f
) = type
;
7081 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
7082 TREE_CHAIN (new_f
) = new_fields
;
7083 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7084 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7085 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
7088 tcctx
->cb
.decl_map
->put (f
, new_f
);
7090 TYPE_FIELDS (type
) = nreverse (new_fields
);
7095 /* Create task copyfn. */
7098 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
7100 struct function
*child_cfun
;
7101 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
7102 tree record_type
, srecord_type
, bind
, list
;
7103 bool record_needs_remap
= false, srecord_needs_remap
= false;
7105 struct omp_taskcopy_context tcctx
;
7106 location_t loc
= gimple_location (task_stmt
);
7108 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
7109 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
7110 gcc_assert (child_cfun
->cfg
== NULL
);
7111 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
7113 /* Reset DECL_CONTEXT on function arguments. */
7114 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
7115 DECL_CONTEXT (t
) = child_fn
;
7117 /* Populate the function. */
7118 push_gimplify_context ();
7119 push_cfun (child_cfun
);
7121 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
7122 TREE_SIDE_EFFECTS (bind
) = 1;
7124 DECL_SAVED_TREE (child_fn
) = bind
;
7125 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
7127 /* Remap src and dst argument types if needed. */
7128 record_type
= ctx
->record_type
;
7129 srecord_type
= ctx
->srecord_type
;
7130 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7131 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7133 record_needs_remap
= true;
7136 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
7137 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7139 srecord_needs_remap
= true;
7143 if (record_needs_remap
|| srecord_needs_remap
)
7145 memset (&tcctx
, '\0', sizeof (tcctx
));
7146 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
7147 tcctx
.cb
.dst_fn
= child_fn
;
7148 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
7149 gcc_checking_assert (tcctx
.cb
.src_node
);
7150 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
7151 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
7152 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
7153 tcctx
.cb
.eh_lp_nr
= 0;
7154 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
7155 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
7158 if (record_needs_remap
)
7159 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
7160 if (srecord_needs_remap
)
7161 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
7164 tcctx
.cb
.decl_map
= NULL
;
7166 arg
= DECL_ARGUMENTS (child_fn
);
7167 TREE_TYPE (arg
) = build_pointer_type (record_type
);
7168 sarg
= DECL_CHAIN (arg
);
7169 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
7171 /* First pass: initialize temporaries used in record_type and srecord_type
7172 sizes and field offsets. */
7173 if (tcctx
.cb
.decl_map
)
7174 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7175 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7179 decl
= OMP_CLAUSE_DECL (c
);
7180 p
= tcctx
.cb
.decl_map
->get (decl
);
7183 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7184 sf
= (tree
) n
->value
;
7185 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7186 src
= build_simple_mem_ref_loc (loc
, sarg
);
7187 src
= omp_build_component_ref (src
, sf
);
7188 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
7189 append_to_statement_list (t
, &list
);
7192 /* Second pass: copy shared var pointers and copy construct non-VLA
7193 firstprivate vars. */
7194 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7195 switch (OMP_CLAUSE_CODE (c
))
7198 case OMP_CLAUSE_SHARED
:
7199 decl
= OMP_CLAUSE_DECL (c
);
7200 key
= (splay_tree_key
) decl
;
7201 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7202 key
= (splay_tree_key
) &DECL_UID (decl
);
7203 n
= splay_tree_lookup (ctx
->field_map
, key
);
7206 f
= (tree
) n
->value
;
7207 if (tcctx
.cb
.decl_map
)
7208 f
= *tcctx
.cb
.decl_map
->get (f
);
7209 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
7210 sf
= (tree
) n
->value
;
7211 if (tcctx
.cb
.decl_map
)
7212 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7213 src
= build_simple_mem_ref_loc (loc
, sarg
);
7214 src
= omp_build_component_ref (src
, sf
);
7215 dst
= build_simple_mem_ref_loc (loc
, arg
);
7216 dst
= omp_build_component_ref (dst
, f
);
7217 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7218 append_to_statement_list (t
, &list
);
7220 case OMP_CLAUSE_FIRSTPRIVATE
:
7221 decl
= OMP_CLAUSE_DECL (c
);
7222 if (is_variable_sized (decl
))
7224 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7227 f
= (tree
) n
->value
;
7228 if (tcctx
.cb
.decl_map
)
7229 f
= *tcctx
.cb
.decl_map
->get (f
);
7230 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7233 sf
= (tree
) n
->value
;
7234 if (tcctx
.cb
.decl_map
)
7235 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7236 src
= build_simple_mem_ref_loc (loc
, sarg
);
7237 src
= omp_build_component_ref (src
, sf
);
7238 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
7239 src
= build_simple_mem_ref_loc (loc
, src
);
7243 dst
= build_simple_mem_ref_loc (loc
, arg
);
7244 dst
= omp_build_component_ref (dst
, f
);
7245 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7246 append_to_statement_list (t
, &list
);
7248 case OMP_CLAUSE_PRIVATE
:
7249 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7251 decl
= OMP_CLAUSE_DECL (c
);
7252 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7253 f
= (tree
) n
->value
;
7254 if (tcctx
.cb
.decl_map
)
7255 f
= *tcctx
.cb
.decl_map
->get (f
);
7256 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7259 sf
= (tree
) n
->value
;
7260 if (tcctx
.cb
.decl_map
)
7261 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7262 src
= build_simple_mem_ref_loc (loc
, sarg
);
7263 src
= omp_build_component_ref (src
, sf
);
7264 if (use_pointer_for_field (decl
, NULL
))
7265 src
= build_simple_mem_ref_loc (loc
, src
);
7269 dst
= build_simple_mem_ref_loc (loc
, arg
);
7270 dst
= omp_build_component_ref (dst
, f
);
7271 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7272 append_to_statement_list (t
, &list
);
7278 /* Last pass: handle VLA firstprivates. */
7279 if (tcctx
.cb
.decl_map
)
7280 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7281 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7285 decl
= OMP_CLAUSE_DECL (c
);
7286 if (!is_variable_sized (decl
))
7288 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7291 f
= (tree
) n
->value
;
7292 f
= *tcctx
.cb
.decl_map
->get (f
);
7293 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
7294 ind
= DECL_VALUE_EXPR (decl
);
7295 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
7296 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
7297 n
= splay_tree_lookup (ctx
->sfield_map
,
7298 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7299 sf
= (tree
) n
->value
;
7300 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7301 src
= build_simple_mem_ref_loc (loc
, sarg
);
7302 src
= omp_build_component_ref (src
, sf
);
7303 src
= build_simple_mem_ref_loc (loc
, src
);
7304 dst
= build_simple_mem_ref_loc (loc
, arg
);
7305 dst
= omp_build_component_ref (dst
, f
);
7306 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7307 append_to_statement_list (t
, &list
);
7308 n
= splay_tree_lookup (ctx
->field_map
,
7309 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7310 df
= (tree
) n
->value
;
7311 df
= *tcctx
.cb
.decl_map
->get (df
);
7312 ptr
= build_simple_mem_ref_loc (loc
, arg
);
7313 ptr
= omp_build_component_ref (ptr
, df
);
7314 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
7315 build_fold_addr_expr_loc (loc
, dst
));
7316 append_to_statement_list (t
, &list
);
7319 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
7320 append_to_statement_list (t
, &list
);
7322 if (tcctx
.cb
.decl_map
)
7323 delete tcctx
.cb
.decl_map
;
7324 pop_gimplify_context (NULL
);
7325 BIND_EXPR_BODY (bind
) = list
;
7330 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
7334 size_t n_in
= 0, n_out
= 0, idx
= 2, i
;
7336 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
7337 gcc_assert (clauses
);
7338 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7339 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7340 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7342 case OMP_CLAUSE_DEPEND_IN
:
7345 case OMP_CLAUSE_DEPEND_OUT
:
7346 case OMP_CLAUSE_DEPEND_INOUT
:
7349 case OMP_CLAUSE_DEPEND_SOURCE
:
7350 case OMP_CLAUSE_DEPEND_SINK
:
7355 tree type
= build_array_type_nelts (ptr_type_node
, n_in
+ n_out
+ 2);
7356 tree array
= create_tmp_var (type
);
7357 TREE_ADDRESSABLE (array
) = 1;
7358 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7360 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_in
+ n_out
));
7361 gimple_seq_add_stmt (iseq
, g
);
7362 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7364 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_out
));
7365 gimple_seq_add_stmt (iseq
, g
);
7366 for (i
= 0; i
< 2; i
++)
7368 if ((i
? n_in
: n_out
) == 0)
7370 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7371 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7372 && ((OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_IN
) ^ i
))
7374 tree t
= OMP_CLAUSE_DECL (c
);
7375 t
= fold_convert (ptr_type_node
, t
);
7376 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
7377 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
7378 NULL_TREE
, NULL_TREE
);
7379 g
= gimple_build_assign (r
, t
);
7380 gimple_seq_add_stmt (iseq
, g
);
7383 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
7384 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
7385 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
7387 tree clobber
= build_constructor (type
, NULL
);
7388 TREE_THIS_VOLATILE (clobber
) = 1;
7389 g
= gimple_build_assign (array
, clobber
);
7390 gimple_seq_add_stmt (oseq
, g
);
7393 /* Lower the OpenMP parallel or task directive in the current statement
7394 in GSI_P. CTX holds context information for the directive. */
7397 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7401 gimple
*stmt
= gsi_stmt (*gsi_p
);
7402 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
7403 gimple_seq par_body
, olist
, ilist
, par_olist
, par_rlist
, par_ilist
, new_body
;
7404 location_t loc
= gimple_location (stmt
);
7406 clauses
= gimple_omp_taskreg_clauses (stmt
);
7408 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
7409 par_body
= gimple_bind_body (par_bind
);
7410 child_fn
= ctx
->cb
.dst_fn
;
7411 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7412 && !gimple_omp_parallel_combined_p (stmt
))
7414 struct walk_stmt_info wi
;
7417 memset (&wi
, 0, sizeof (wi
));
7420 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
7422 gimple_omp_parallel_set_combined_p (stmt
, true);
7424 gimple_seq dep_ilist
= NULL
;
7425 gimple_seq dep_olist
= NULL
;
7426 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
7427 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7429 push_gimplify_context ();
7430 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7431 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
7432 &dep_ilist
, &dep_olist
);
7435 if (ctx
->srecord_type
)
7436 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
7438 push_gimplify_context ();
7443 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7444 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
7445 if (phony_construct
&& ctx
->record_type
)
7447 gcc_checking_assert (!ctx
->receiver_decl
);
7448 ctx
->receiver_decl
= create_tmp_var
7449 (build_reference_type (ctx
->record_type
), ".omp_rec");
7451 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
7452 lower_omp (&par_body
, ctx
);
7453 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
7454 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
7456 /* Declare all the variables created by mapping and the variables
7457 declared in the scope of the parallel body. */
7458 record_vars_into (ctx
->block_vars
, child_fn
);
7459 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
7461 if (ctx
->record_type
)
7464 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
7465 : ctx
->record_type
, ".omp_data_o");
7466 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7467 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7468 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
7473 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
7474 lower_send_shared_vars (&ilist
, &olist
, ctx
);
7476 if (ctx
->record_type
)
7478 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
7479 TREE_THIS_VOLATILE (clobber
) = 1;
7480 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
7484 /* Once all the expansions are done, sequence all the different
7485 fragments inside gimple_omp_body. */
7489 if (ctx
->record_type
)
7491 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7492 /* fixup_child_record_type might have changed receiver_decl's type. */
7493 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
7494 gimple_seq_add_stmt (&new_body
,
7495 gimple_build_assign (ctx
->receiver_decl
, t
));
7498 gimple_seq_add_seq (&new_body
, par_ilist
);
7499 gimple_seq_add_seq (&new_body
, par_body
);
7500 gimple_seq_add_seq (&new_body
, par_rlist
);
7501 if (ctx
->cancellable
)
7502 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7503 gimple_seq_add_seq (&new_body
, par_olist
);
7504 new_body
= maybe_catch_exception (new_body
);
7505 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
7506 gimple_seq_add_stmt (&new_body
,
7507 gimple_build_omp_continue (integer_zero_node
,
7508 integer_zero_node
));
7509 if (!phony_construct
)
7511 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
7512 gimple_omp_set_body (stmt
, new_body
);
7515 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
7516 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
7517 gimple_bind_add_seq (bind
, ilist
);
7518 if (!phony_construct
)
7519 gimple_bind_add_stmt (bind
, stmt
);
7521 gimple_bind_add_seq (bind
, new_body
);
7522 gimple_bind_add_seq (bind
, olist
);
7524 pop_gimplify_context (NULL
);
7528 gimple_bind_add_seq (dep_bind
, dep_ilist
);
7529 gimple_bind_add_stmt (dep_bind
, bind
);
7530 gimple_bind_add_seq (dep_bind
, dep_olist
);
7531 pop_gimplify_context (dep_bind
);
7535 /* Lower the GIMPLE_OMP_TARGET in the current statement
7536 in GSI_P. CTX holds context information for the directive. */
7539 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7542 tree child_fn
, t
, c
;
7543 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
7544 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
7545 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
7546 location_t loc
= gimple_location (stmt
);
7547 bool offloaded
, data_region
;
7548 unsigned int map_cnt
= 0;
7550 offloaded
= is_gimple_omp_offloaded (stmt
);
7551 switch (gimple_omp_target_kind (stmt
))
7553 case GF_OMP_TARGET_KIND_REGION
:
7554 case GF_OMP_TARGET_KIND_UPDATE
:
7555 case GF_OMP_TARGET_KIND_ENTER_DATA
:
7556 case GF_OMP_TARGET_KIND_EXIT_DATA
:
7557 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
7558 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
7559 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
7560 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
7561 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
7562 data_region
= false;
7564 case GF_OMP_TARGET_KIND_DATA
:
7565 case GF_OMP_TARGET_KIND_OACC_DATA
:
7566 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
7573 clauses
= gimple_omp_target_clauses (stmt
);
7575 gimple_seq dep_ilist
= NULL
;
7576 gimple_seq dep_olist
= NULL
;
7577 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7579 push_gimplify_context ();
7580 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7581 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
7582 &dep_ilist
, &dep_olist
);
7589 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
7590 tgt_body
= gimple_bind_body (tgt_bind
);
7592 else if (data_region
)
7593 tgt_body
= gimple_omp_body (stmt
);
7594 child_fn
= ctx
->cb
.dst_fn
;
7596 push_gimplify_context ();
7599 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7600 switch (OMP_CLAUSE_CODE (c
))
7606 case OMP_CLAUSE_MAP
:
7608 /* First check what we're prepared to handle in the following. */
7609 switch (OMP_CLAUSE_MAP_KIND (c
))
7611 case GOMP_MAP_ALLOC
:
7614 case GOMP_MAP_TOFROM
:
7615 case GOMP_MAP_POINTER
:
7616 case GOMP_MAP_TO_PSET
:
7617 case GOMP_MAP_DELETE
:
7618 case GOMP_MAP_RELEASE
:
7619 case GOMP_MAP_ALWAYS_TO
:
7620 case GOMP_MAP_ALWAYS_FROM
:
7621 case GOMP_MAP_ALWAYS_TOFROM
:
7622 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
7623 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
7624 case GOMP_MAP_STRUCT
:
7625 case GOMP_MAP_ALWAYS_POINTER
:
7627 case GOMP_MAP_FORCE_ALLOC
:
7628 case GOMP_MAP_FORCE_TO
:
7629 case GOMP_MAP_FORCE_FROM
:
7630 case GOMP_MAP_FORCE_TOFROM
:
7631 case GOMP_MAP_FORCE_PRESENT
:
7632 case GOMP_MAP_FORCE_DEVICEPTR
:
7633 case GOMP_MAP_DEVICE_RESIDENT
:
7635 gcc_assert (is_gimple_omp_oacc (stmt
));
7643 case OMP_CLAUSE_FROM
:
7645 var
= OMP_CLAUSE_DECL (c
);
7648 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
7649 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7650 && (OMP_CLAUSE_MAP_KIND (c
)
7651 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
7657 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
7659 tree var2
= DECL_VALUE_EXPR (var
);
7660 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
7661 var2
= TREE_OPERAND (var2
, 0);
7662 gcc_assert (DECL_P (var2
));
7667 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7668 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7669 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7671 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7673 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
7674 && varpool_node::get_create (var
)->offloadable
)
7677 tree type
= build_pointer_type (TREE_TYPE (var
));
7678 tree new_var
= lookup_decl (var
, ctx
);
7679 x
= create_tmp_var_raw (type
, get_name (new_var
));
7680 gimple_add_tmp_var (x
);
7681 x
= build_simple_mem_ref (x
);
7682 SET_DECL_VALUE_EXPR (new_var
, x
);
7683 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7688 if (!maybe_lookup_field (var
, ctx
))
7691 /* Don't remap oacc parallel reduction variables, because the
7692 intermediate result must be local to each gang. */
7693 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7694 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
7696 x
= build_receiver_ref (var
, true, ctx
);
7697 tree new_var
= lookup_decl (var
, ctx
);
7699 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7700 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7701 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7702 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7703 x
= build_simple_mem_ref (x
);
7704 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7706 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7707 if (omp_is_reference (new_var
))
7709 /* Create a local object to hold the instance
7711 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
7712 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
7713 tree inst
= create_tmp_var (type
, id
);
7714 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
7715 x
= build_fold_addr_expr (inst
);
7717 gimplify_assign (new_var
, x
, &fplist
);
7719 else if (DECL_P (new_var
))
7721 SET_DECL_VALUE_EXPR (new_var
, x
);
7722 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7730 case OMP_CLAUSE_FIRSTPRIVATE
:
7731 if (is_oacc_parallel (ctx
))
7732 goto oacc_firstprivate
;
7734 var
= OMP_CLAUSE_DECL (c
);
7735 if (!omp_is_reference (var
)
7736 && !is_gimple_reg_type (TREE_TYPE (var
)))
7738 tree new_var
= lookup_decl (var
, ctx
);
7739 if (is_variable_sized (var
))
7741 tree pvar
= DECL_VALUE_EXPR (var
);
7742 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7743 pvar
= TREE_OPERAND (pvar
, 0);
7744 gcc_assert (DECL_P (pvar
));
7745 tree new_pvar
= lookup_decl (pvar
, ctx
);
7746 x
= build_fold_indirect_ref (new_pvar
);
7747 TREE_THIS_NOTRAP (x
) = 1;
7750 x
= build_receiver_ref (var
, true, ctx
);
7751 SET_DECL_VALUE_EXPR (new_var
, x
);
7752 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7756 case OMP_CLAUSE_PRIVATE
:
7757 if (is_gimple_omp_oacc (ctx
->stmt
))
7759 var
= OMP_CLAUSE_DECL (c
);
7760 if (is_variable_sized (var
))
7762 tree new_var
= lookup_decl (var
, ctx
);
7763 tree pvar
= DECL_VALUE_EXPR (var
);
7764 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7765 pvar
= TREE_OPERAND (pvar
, 0);
7766 gcc_assert (DECL_P (pvar
));
7767 tree new_pvar
= lookup_decl (pvar
, ctx
);
7768 x
= build_fold_indirect_ref (new_pvar
);
7769 TREE_THIS_NOTRAP (x
) = 1;
7770 SET_DECL_VALUE_EXPR (new_var
, x
);
7771 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7775 case OMP_CLAUSE_USE_DEVICE_PTR
:
7776 case OMP_CLAUSE_IS_DEVICE_PTR
:
7777 var
= OMP_CLAUSE_DECL (c
);
7779 if (is_variable_sized (var
))
7781 tree new_var
= lookup_decl (var
, ctx
);
7782 tree pvar
= DECL_VALUE_EXPR (var
);
7783 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7784 pvar
= TREE_OPERAND (pvar
, 0);
7785 gcc_assert (DECL_P (pvar
));
7786 tree new_pvar
= lookup_decl (pvar
, ctx
);
7787 x
= build_fold_indirect_ref (new_pvar
);
7788 TREE_THIS_NOTRAP (x
) = 1;
7789 SET_DECL_VALUE_EXPR (new_var
, x
);
7790 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7792 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7794 tree new_var
= lookup_decl (var
, ctx
);
7795 tree type
= build_pointer_type (TREE_TYPE (var
));
7796 x
= create_tmp_var_raw (type
, get_name (new_var
));
7797 gimple_add_tmp_var (x
);
7798 x
= build_simple_mem_ref (x
);
7799 SET_DECL_VALUE_EXPR (new_var
, x
);
7800 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7804 tree new_var
= lookup_decl (var
, ctx
);
7805 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
7806 gimple_add_tmp_var (x
);
7807 SET_DECL_VALUE_EXPR (new_var
, x
);
7808 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7815 target_nesting_level
++;
7816 lower_omp (&tgt_body
, ctx
);
7817 target_nesting_level
--;
7819 else if (data_region
)
7820 lower_omp (&tgt_body
, ctx
);
7824 /* Declare all the variables created by mapping and the variables
7825 declared in the scope of the target body. */
7826 record_vars_into (ctx
->block_vars
, child_fn
);
7827 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
7832 if (ctx
->record_type
)
7835 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
7836 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7837 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7838 t
= make_tree_vec (3);
7839 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
7841 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
7843 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
7844 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
7845 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
7846 tree tkind_type
= short_unsigned_type_node
;
7847 int talign_shift
= 8;
7849 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
7851 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
7852 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
7853 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
7854 gimple_omp_target_set_data_arg (stmt
, t
);
7856 vec
<constructor_elt
, va_gc
> *vsize
;
7857 vec
<constructor_elt
, va_gc
> *vkind
;
7858 vec_alloc (vsize
, map_cnt
);
7859 vec_alloc (vkind
, map_cnt
);
7860 unsigned int map_idx
= 0;
7862 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7863 switch (OMP_CLAUSE_CODE (c
))
7865 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
7866 unsigned int talign
;
7871 case OMP_CLAUSE_MAP
:
7873 case OMP_CLAUSE_FROM
:
7874 oacc_firstprivate_map
:
7876 ovar
= OMP_CLAUSE_DECL (c
);
7877 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7878 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7879 || (OMP_CLAUSE_MAP_KIND (c
)
7880 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
7884 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7885 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
7887 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
7888 == get_base_address (ovar
));
7889 nc
= OMP_CLAUSE_CHAIN (c
);
7890 ovar
= OMP_CLAUSE_DECL (nc
);
7894 tree x
= build_sender_ref (ovar
, ctx
);
7896 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
7897 gimplify_assign (x
, v
, &ilist
);
7903 if (DECL_SIZE (ovar
)
7904 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
7906 tree ovar2
= DECL_VALUE_EXPR (ovar
);
7907 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
7908 ovar2
= TREE_OPERAND (ovar2
, 0);
7909 gcc_assert (DECL_P (ovar2
));
7912 if (!maybe_lookup_field (ovar
, ctx
))
7916 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
7917 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
7918 talign
= DECL_ALIGN_UNIT (ovar
);
7921 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7922 x
= build_sender_ref (ovar
, ctx
);
7924 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7925 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7926 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7927 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
7929 gcc_assert (offloaded
);
7931 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
7932 mark_addressable (avar
);
7933 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
7934 talign
= DECL_ALIGN_UNIT (avar
);
7935 avar
= build_fold_addr_expr (avar
);
7936 gimplify_assign (x
, avar
, &ilist
);
7938 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7940 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7941 if (!omp_is_reference (var
))
7943 if (is_gimple_reg (var
)
7944 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7945 TREE_NO_WARNING (var
) = 1;
7946 var
= build_fold_addr_expr (var
);
7949 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7950 gimplify_assign (x
, var
, &ilist
);
7952 else if (is_gimple_reg (var
))
7954 gcc_assert (offloaded
);
7955 tree avar
= create_tmp_var (TREE_TYPE (var
));
7956 mark_addressable (avar
);
7957 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
7958 if (GOMP_MAP_COPY_TO_P (map_kind
)
7959 || map_kind
== GOMP_MAP_POINTER
7960 || map_kind
== GOMP_MAP_TO_PSET
7961 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7963 /* If we need to initialize a temporary
7964 with VAR because it is not addressable, and
7965 the variable hasn't been initialized yet, then
7966 we'll get a warning for the store to avar.
7967 Don't warn in that case, the mapping might
7969 TREE_NO_WARNING (var
) = 1;
7970 gimplify_assign (avar
, var
, &ilist
);
7972 avar
= build_fold_addr_expr (avar
);
7973 gimplify_assign (x
, avar
, &ilist
);
7974 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
7975 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7976 && !TYPE_READONLY (TREE_TYPE (var
)))
7978 x
= unshare_expr (x
);
7979 x
= build_simple_mem_ref (x
);
7980 gimplify_assign (var
, x
, &olist
);
7985 var
= build_fold_addr_expr (var
);
7986 gimplify_assign (x
, var
, &ilist
);
7990 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7992 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7993 s
= TREE_TYPE (ovar
);
7994 if (TREE_CODE (s
) == REFERENCE_TYPE
)
7996 s
= TYPE_SIZE_UNIT (s
);
7999 s
= OMP_CLAUSE_SIZE (c
);
8001 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
8002 s
= fold_convert (size_type_node
, s
);
8003 purpose
= size_int (map_idx
++);
8004 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8005 if (TREE_CODE (s
) != INTEGER_CST
)
8006 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8008 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
8009 switch (OMP_CLAUSE_CODE (c
))
8011 case OMP_CLAUSE_MAP
:
8012 tkind
= OMP_CLAUSE_MAP_KIND (c
);
8014 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
8017 case GOMP_MAP_ALLOC
:
8020 case GOMP_MAP_TOFROM
:
8021 case GOMP_MAP_ALWAYS_TO
:
8022 case GOMP_MAP_ALWAYS_FROM
:
8023 case GOMP_MAP_ALWAYS_TOFROM
:
8024 case GOMP_MAP_RELEASE
:
8025 case GOMP_MAP_FORCE_TO
:
8026 case GOMP_MAP_FORCE_FROM
:
8027 case GOMP_MAP_FORCE_TOFROM
:
8028 case GOMP_MAP_FORCE_PRESENT
:
8029 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
8031 case GOMP_MAP_DELETE
:
8032 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
8036 if (tkind_zero
!= tkind
)
8038 if (integer_zerop (s
))
8040 else if (integer_nonzerop (s
))
8044 case OMP_CLAUSE_FIRSTPRIVATE
:
8045 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
8046 tkind
= GOMP_MAP_TO
;
8050 tkind
= GOMP_MAP_TO
;
8053 case OMP_CLAUSE_FROM
:
8054 tkind
= GOMP_MAP_FROM
;
8060 gcc_checking_assert (tkind
8061 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8062 gcc_checking_assert (tkind_zero
8063 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8064 talign
= ceil_log2 (talign
);
8065 tkind
|= talign
<< talign_shift
;
8066 tkind_zero
|= talign
<< talign_shift
;
8067 gcc_checking_assert (tkind
8068 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8069 gcc_checking_assert (tkind_zero
8070 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8071 if (tkind
== tkind_zero
)
8072 x
= build_int_cstu (tkind_type
, tkind
);
8075 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
8076 x
= build3 (COND_EXPR
, tkind_type
,
8077 fold_build2 (EQ_EXPR
, boolean_type_node
,
8078 unshare_expr (s
), size_zero_node
),
8079 build_int_cstu (tkind_type
, tkind_zero
),
8080 build_int_cstu (tkind_type
, tkind
));
8082 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
8087 case OMP_CLAUSE_FIRSTPRIVATE
:
8088 if (is_oacc_parallel (ctx
))
8089 goto oacc_firstprivate_map
;
8090 ovar
= OMP_CLAUSE_DECL (c
);
8091 if (omp_is_reference (ovar
))
8092 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8094 talign
= DECL_ALIGN_UNIT (ovar
);
8095 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8096 x
= build_sender_ref (ovar
, ctx
);
8097 tkind
= GOMP_MAP_FIRSTPRIVATE
;
8098 type
= TREE_TYPE (ovar
);
8099 if (omp_is_reference (ovar
))
8100 type
= TREE_TYPE (type
);
8101 if ((INTEGRAL_TYPE_P (type
)
8102 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8103 || TREE_CODE (type
) == POINTER_TYPE
)
8105 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8107 if (omp_is_reference (var
))
8108 t
= build_simple_mem_ref (var
);
8109 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8110 TREE_NO_WARNING (var
) = 1;
8111 if (TREE_CODE (type
) != POINTER_TYPE
)
8112 t
= fold_convert (pointer_sized_int_node
, t
);
8113 t
= fold_convert (TREE_TYPE (x
), t
);
8114 gimplify_assign (x
, t
, &ilist
);
8116 else if (omp_is_reference (var
))
8117 gimplify_assign (x
, var
, &ilist
);
8118 else if (is_gimple_reg (var
))
8120 tree avar
= create_tmp_var (TREE_TYPE (var
));
8121 mark_addressable (avar
);
8122 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8123 TREE_NO_WARNING (var
) = 1;
8124 gimplify_assign (avar
, var
, &ilist
);
8125 avar
= build_fold_addr_expr (avar
);
8126 gimplify_assign (x
, avar
, &ilist
);
8130 var
= build_fold_addr_expr (var
);
8131 gimplify_assign (x
, var
, &ilist
);
8133 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
8135 else if (omp_is_reference (ovar
))
8136 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8138 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
8139 s
= fold_convert (size_type_node
, s
);
8140 purpose
= size_int (map_idx
++);
8141 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8142 if (TREE_CODE (s
) != INTEGER_CST
)
8143 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8145 gcc_checking_assert (tkind
8146 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8147 talign
= ceil_log2 (talign
);
8148 tkind
|= talign
<< talign_shift
;
8149 gcc_checking_assert (tkind
8150 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8151 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8152 build_int_cstu (tkind_type
, tkind
));
8155 case OMP_CLAUSE_USE_DEVICE_PTR
:
8156 case OMP_CLAUSE_IS_DEVICE_PTR
:
8157 ovar
= OMP_CLAUSE_DECL (c
);
8158 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8159 x
= build_sender_ref (ovar
, ctx
);
8160 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8161 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
8163 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8164 type
= TREE_TYPE (ovar
);
8165 if (TREE_CODE (type
) == ARRAY_TYPE
)
8166 var
= build_fold_addr_expr (var
);
8169 if (omp_is_reference (ovar
))
8171 type
= TREE_TYPE (type
);
8172 if (TREE_CODE (type
) != ARRAY_TYPE
)
8173 var
= build_simple_mem_ref (var
);
8174 var
= fold_convert (TREE_TYPE (x
), var
);
8177 gimplify_assign (x
, var
, &ilist
);
8179 purpose
= size_int (map_idx
++);
8180 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8181 gcc_checking_assert (tkind
8182 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8183 gcc_checking_assert (tkind
8184 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8185 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8186 build_int_cstu (tkind_type
, tkind
));
8190 gcc_assert (map_idx
== map_cnt
);
8192 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
8193 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
8194 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
8195 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
8196 for (int i
= 1; i
<= 2; i
++)
8197 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
8199 gimple_seq initlist
= NULL
;
8200 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
8201 TREE_VEC_ELT (t
, i
)),
8202 &initlist
, true, NULL_TREE
);
8203 gimple_seq_add_seq (&ilist
, initlist
);
8205 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
8207 TREE_THIS_VOLATILE (clobber
) = 1;
8208 gimple_seq_add_stmt (&olist
,
8209 gimple_build_assign (TREE_VEC_ELT (t
, i
),
8213 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
8214 TREE_THIS_VOLATILE (clobber
) = 1;
8215 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
8219 /* Once all the expansions are done, sequence all the different
8220 fragments inside gimple_omp_body. */
8225 && ctx
->record_type
)
8227 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8228 /* fixup_child_record_type might have changed receiver_decl's type. */
8229 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
8230 gimple_seq_add_stmt (&new_body
,
8231 gimple_build_assign (ctx
->receiver_decl
, t
));
8233 gimple_seq_add_seq (&new_body
, fplist
);
8235 if (offloaded
|| data_region
)
8237 tree prev
= NULL_TREE
;
8238 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8239 switch (OMP_CLAUSE_CODE (c
))
8244 case OMP_CLAUSE_FIRSTPRIVATE
:
8245 if (is_gimple_omp_oacc (ctx
->stmt
))
8247 var
= OMP_CLAUSE_DECL (c
);
8248 if (omp_is_reference (var
)
8249 || is_gimple_reg_type (TREE_TYPE (var
)))
8251 tree new_var
= lookup_decl (var
, ctx
);
8253 type
= TREE_TYPE (var
);
8254 if (omp_is_reference (var
))
8255 type
= TREE_TYPE (type
);
8256 if ((INTEGRAL_TYPE_P (type
)
8257 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8258 || TREE_CODE (type
) == POINTER_TYPE
)
8260 x
= build_receiver_ref (var
, false, ctx
);
8261 if (TREE_CODE (type
) != POINTER_TYPE
)
8262 x
= fold_convert (pointer_sized_int_node
, x
);
8263 x
= fold_convert (type
, x
);
8264 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8266 if (omp_is_reference (var
))
8268 tree v
= create_tmp_var_raw (type
, get_name (var
));
8269 gimple_add_tmp_var (v
);
8270 TREE_ADDRESSABLE (v
) = 1;
8271 gimple_seq_add_stmt (&new_body
,
8272 gimple_build_assign (v
, x
));
8273 x
= build_fold_addr_expr (v
);
8275 gimple_seq_add_stmt (&new_body
,
8276 gimple_build_assign (new_var
, x
));
8280 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
8281 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8283 gimple_seq_add_stmt (&new_body
,
8284 gimple_build_assign (new_var
, x
));
8287 else if (is_variable_sized (var
))
8289 tree pvar
= DECL_VALUE_EXPR (var
);
8290 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8291 pvar
= TREE_OPERAND (pvar
, 0);
8292 gcc_assert (DECL_P (pvar
));
8293 tree new_var
= lookup_decl (pvar
, ctx
);
8294 x
= build_receiver_ref (var
, false, ctx
);
8295 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8296 gimple_seq_add_stmt (&new_body
,
8297 gimple_build_assign (new_var
, x
));
8300 case OMP_CLAUSE_PRIVATE
:
8301 if (is_gimple_omp_oacc (ctx
->stmt
))
8303 var
= OMP_CLAUSE_DECL (c
);
8304 if (omp_is_reference (var
))
8306 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8307 tree new_var
= lookup_decl (var
, ctx
);
8308 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8309 if (TREE_CONSTANT (x
))
8311 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
8313 gimple_add_tmp_var (x
);
8314 TREE_ADDRESSABLE (x
) = 1;
8315 x
= build_fold_addr_expr_loc (clause_loc
, x
);
8320 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8321 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8322 gimple_seq_add_stmt (&new_body
,
8323 gimple_build_assign (new_var
, x
));
8326 case OMP_CLAUSE_USE_DEVICE_PTR
:
8327 case OMP_CLAUSE_IS_DEVICE_PTR
:
8328 var
= OMP_CLAUSE_DECL (c
);
8329 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8330 x
= build_sender_ref (var
, ctx
);
8332 x
= build_receiver_ref (var
, false, ctx
);
8333 if (is_variable_sized (var
))
8335 tree pvar
= DECL_VALUE_EXPR (var
);
8336 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8337 pvar
= TREE_OPERAND (pvar
, 0);
8338 gcc_assert (DECL_P (pvar
));
8339 tree new_var
= lookup_decl (pvar
, ctx
);
8340 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8341 gimple_seq_add_stmt (&new_body
,
8342 gimple_build_assign (new_var
, x
));
8344 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
8346 tree new_var
= lookup_decl (var
, ctx
);
8347 new_var
= DECL_VALUE_EXPR (new_var
);
8348 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
8349 new_var
= TREE_OPERAND (new_var
, 0);
8350 gcc_assert (DECL_P (new_var
));
8351 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8352 gimple_seq_add_stmt (&new_body
,
8353 gimple_build_assign (new_var
, x
));
8357 tree type
= TREE_TYPE (var
);
8358 tree new_var
= lookup_decl (var
, ctx
);
8359 if (omp_is_reference (var
))
8361 type
= TREE_TYPE (type
);
8362 if (TREE_CODE (type
) != ARRAY_TYPE
)
8364 tree v
= create_tmp_var_raw (type
, get_name (var
));
8365 gimple_add_tmp_var (v
);
8366 TREE_ADDRESSABLE (v
) = 1;
8367 x
= fold_convert (type
, x
);
8368 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8370 gimple_seq_add_stmt (&new_body
,
8371 gimple_build_assign (v
, x
));
8372 x
= build_fold_addr_expr (v
);
8375 new_var
= DECL_VALUE_EXPR (new_var
);
8376 x
= fold_convert (TREE_TYPE (new_var
), x
);
8377 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8378 gimple_seq_add_stmt (&new_body
,
8379 gimple_build_assign (new_var
, x
));
8383 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8384 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8385 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8386 or references to VLAs. */
8387 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8388 switch (OMP_CLAUSE_CODE (c
))
8393 case OMP_CLAUSE_MAP
:
8394 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8395 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8397 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8398 HOST_WIDE_INT offset
= 0;
8400 var
= OMP_CLAUSE_DECL (c
);
8402 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
8403 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
8405 && varpool_node::get_create (var
)->offloadable
)
8407 if (TREE_CODE (var
) == INDIRECT_REF
8408 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
8409 var
= TREE_OPERAND (var
, 0);
8410 if (TREE_CODE (var
) == COMPONENT_REF
)
8412 var
= get_addr_base_and_unit_offset (var
, &offset
);
8413 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
8415 else if (DECL_SIZE (var
)
8416 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
8418 tree var2
= DECL_VALUE_EXPR (var
);
8419 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
8420 var2
= TREE_OPERAND (var2
, 0);
8421 gcc_assert (DECL_P (var2
));
8424 tree new_var
= lookup_decl (var
, ctx
), x
;
8425 tree type
= TREE_TYPE (new_var
);
8427 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
8428 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8431 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
8433 new_var
= build2 (MEM_REF
, type
,
8434 build_fold_addr_expr (new_var
),
8435 build_int_cst (build_pointer_type (type
),
8438 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
8440 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
8441 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
8442 new_var
= build2 (MEM_REF
, type
,
8443 build_fold_addr_expr (new_var
),
8444 build_int_cst (build_pointer_type (type
),
8448 is_ref
= omp_is_reference (var
);
8449 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8451 bool ref_to_array
= false;
8454 type
= TREE_TYPE (type
);
8455 if (TREE_CODE (type
) == ARRAY_TYPE
)
8457 type
= build_pointer_type (type
);
8458 ref_to_array
= true;
8461 else if (TREE_CODE (type
) == ARRAY_TYPE
)
8463 tree decl2
= DECL_VALUE_EXPR (new_var
);
8464 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
8465 decl2
= TREE_OPERAND (decl2
, 0);
8466 gcc_assert (DECL_P (decl2
));
8468 type
= TREE_TYPE (new_var
);
8470 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
8471 x
= fold_convert_loc (clause_loc
, type
, x
);
8472 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
8474 tree bias
= OMP_CLAUSE_SIZE (c
);
8476 bias
= lookup_decl (bias
, ctx
);
8477 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
8478 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
8480 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
8481 TREE_TYPE (x
), x
, bias
);
8484 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8485 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8486 if (is_ref
&& !ref_to_array
)
8488 tree t
= create_tmp_var_raw (type
, get_name (var
));
8489 gimple_add_tmp_var (t
);
8490 TREE_ADDRESSABLE (t
) = 1;
8491 gimple_seq_add_stmt (&new_body
,
8492 gimple_build_assign (t
, x
));
8493 x
= build_fold_addr_expr_loc (clause_loc
, t
);
8495 gimple_seq_add_stmt (&new_body
,
8496 gimple_build_assign (new_var
, x
));
8499 else if (OMP_CLAUSE_CHAIN (c
)
8500 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
8502 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8503 == GOMP_MAP_FIRSTPRIVATE_POINTER
8504 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8505 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
8508 case OMP_CLAUSE_PRIVATE
:
8509 var
= OMP_CLAUSE_DECL (c
);
8510 if (is_variable_sized (var
))
8512 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8513 tree new_var
= lookup_decl (var
, ctx
);
8514 tree pvar
= DECL_VALUE_EXPR (var
);
8515 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8516 pvar
= TREE_OPERAND (pvar
, 0);
8517 gcc_assert (DECL_P (pvar
));
8518 tree new_pvar
= lookup_decl (pvar
, ctx
);
8519 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8520 tree al
= size_int (DECL_ALIGN (var
));
8521 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
8522 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8523 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
8524 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8525 gimple_seq_add_stmt (&new_body
,
8526 gimple_build_assign (new_pvar
, x
));
8528 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
8530 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8531 tree new_var
= lookup_decl (var
, ctx
);
8532 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8533 if (TREE_CONSTANT (x
))
8538 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8539 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
8540 tree al
= size_int (TYPE_ALIGN (rtype
));
8541 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8544 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8545 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8546 gimple_seq_add_stmt (&new_body
,
8547 gimple_build_assign (new_var
, x
));
8552 gimple_seq fork_seq
= NULL
;
8553 gimple_seq join_seq
= NULL
;
8555 if (is_oacc_parallel (ctx
))
8557 /* If there are reductions on the offloaded region itself, treat
8558 them as a dummy GANG loop. */
8559 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
8561 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
8562 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
8565 gimple_seq_add_seq (&new_body
, fork_seq
);
8566 gimple_seq_add_seq (&new_body
, tgt_body
);
8567 gimple_seq_add_seq (&new_body
, join_seq
);
8570 new_body
= maybe_catch_exception (new_body
);
8572 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
8573 gimple_omp_set_body (stmt
, new_body
);
8576 bind
= gimple_build_bind (NULL
, NULL
,
8577 tgt_bind
? gimple_bind_block (tgt_bind
)
8579 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
8580 gimple_bind_add_seq (bind
, ilist
);
8581 gimple_bind_add_stmt (bind
, stmt
);
8582 gimple_bind_add_seq (bind
, olist
);
8584 pop_gimplify_context (NULL
);
8588 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8589 gimple_bind_add_stmt (dep_bind
, bind
);
8590 gimple_bind_add_seq (dep_bind
, dep_olist
);
8591 pop_gimplify_context (dep_bind
);
8595 /* Expand code for an OpenMP teams directive. */
8598 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8600 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
8601 push_gimplify_context ();
8603 tree block
= make_node (BLOCK
);
8604 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
8605 gsi_replace (gsi_p
, bind
, true);
8606 gimple_seq bind_body
= NULL
;
8607 gimple_seq dlist
= NULL
;
8608 gimple_seq olist
= NULL
;
8610 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8611 OMP_CLAUSE_NUM_TEAMS
);
8612 if (num_teams
== NULL_TREE
)
8613 num_teams
= build_int_cst (unsigned_type_node
, 0);
8616 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
8617 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
8618 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
8620 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8621 OMP_CLAUSE_THREAD_LIMIT
);
8622 if (thread_limit
== NULL_TREE
)
8623 thread_limit
= build_int_cst (unsigned_type_node
, 0);
8626 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
8627 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
8628 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
8632 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
8633 &bind_body
, &dlist
, ctx
, NULL
);
8634 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
8635 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
8636 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8638 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
8639 location_t loc
= gimple_location (teams_stmt
);
8640 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
8641 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
8642 gimple_set_location (call
, loc
);
8643 gimple_seq_add_stmt (&bind_body
, call
);
8646 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
8647 gimple_omp_set_body (teams_stmt
, NULL
);
8648 gimple_seq_add_seq (&bind_body
, olist
);
8649 gimple_seq_add_seq (&bind_body
, dlist
);
8650 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8651 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
8652 gimple_bind_set_body (bind
, bind_body
);
8654 pop_gimplify_context (bind
);
8656 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8657 BLOCK_VARS (block
) = ctx
->block_vars
;
8658 if (BLOCK_VARS (block
))
8659 TREE_USED (block
) = 1;
8662 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8665 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8667 gimple
*stmt
= gsi_stmt (*gsi_p
);
8668 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8669 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
8670 gimple_build_omp_return (false));
8674 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8675 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8676 of OMP context, but with task_shared_vars set. */
8679 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
8684 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8685 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
8688 if (task_shared_vars
8690 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
8693 /* If a global variable has been privatized, TREE_CONSTANT on
8694 ADDR_EXPR might be wrong. */
8695 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
8696 recompute_tree_invariant_for_addr_expr (t
);
8698 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
8702 /* Data to be communicated between lower_omp_regimplify_operands and
8703 lower_omp_regimplify_operands_p. */
8705 struct lower_omp_regimplify_operands_data
8711 /* Helper function for lower_omp_regimplify_operands. Find
8712 omp_member_access_dummy_var vars and adjust temporarily their
8713 DECL_VALUE_EXPRs if needed. */
8716 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
8719 tree t
= omp_member_access_dummy_var (*tp
);
8722 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8723 lower_omp_regimplify_operands_data
*ldata
8724 = (lower_omp_regimplify_operands_data
*) wi
->info
;
8725 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
8728 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
8729 ldata
->decls
->safe_push (*tp
);
8730 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
8731 SET_DECL_VALUE_EXPR (*tp
, v
);
8734 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
8738 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8739 of omp_member_access_dummy_var vars during regimplification. */
8742 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
8743 gimple_stmt_iterator
*gsi_p
)
8745 auto_vec
<tree
, 10> decls
;
8748 struct walk_stmt_info wi
;
8749 memset (&wi
, '\0', sizeof (wi
));
8750 struct lower_omp_regimplify_operands_data data
;
8752 data
.decls
= &decls
;
8754 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
8756 gimple_regimplify_operands (stmt
, gsi_p
);
8757 while (!decls
.is_empty ())
8759 tree t
= decls
.pop ();
8760 tree v
= decls
.pop ();
8761 SET_DECL_VALUE_EXPR (t
, v
);
8766 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8768 gimple
*stmt
= gsi_stmt (*gsi_p
);
8769 struct walk_stmt_info wi
;
8772 if (gimple_has_location (stmt
))
8773 input_location
= gimple_location (stmt
);
8775 if (task_shared_vars
)
8776 memset (&wi
, '\0', sizeof (wi
));
8778 /* If we have issued syntax errors, avoid doing any heavy lifting.
8779 Just replace the OMP directives with a NOP to avoid
8780 confusing RTL expansion. */
8781 if (seen_error () && is_gimple_omp (stmt
))
8783 gsi_replace (gsi_p
, gimple_build_nop (), true);
8787 switch (gimple_code (stmt
))
8791 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
8792 if ((ctx
|| task_shared_vars
)
8793 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
8794 lower_omp_regimplify_p
,
8795 ctx
? NULL
: &wi
, NULL
)
8796 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
8797 lower_omp_regimplify_p
,
8798 ctx
? NULL
: &wi
, NULL
)))
8799 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
8803 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
8805 case GIMPLE_EH_FILTER
:
8806 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
8809 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
8810 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
8812 case GIMPLE_TRANSACTION
:
8813 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
8817 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
8819 case GIMPLE_OMP_PARALLEL
:
8820 case GIMPLE_OMP_TASK
:
8821 ctx
= maybe_lookup_ctx (stmt
);
8823 if (ctx
->cancellable
)
8824 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8825 lower_omp_taskreg (gsi_p
, ctx
);
8827 case GIMPLE_OMP_FOR
:
8828 ctx
= maybe_lookup_ctx (stmt
);
8830 if (ctx
->cancellable
)
8831 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8832 lower_omp_for (gsi_p
, ctx
);
8834 case GIMPLE_OMP_SECTIONS
:
8835 ctx
= maybe_lookup_ctx (stmt
);
8837 if (ctx
->cancellable
)
8838 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8839 lower_omp_sections (gsi_p
, ctx
);
8841 case GIMPLE_OMP_SINGLE
:
8842 ctx
= maybe_lookup_ctx (stmt
);
8844 lower_omp_single (gsi_p
, ctx
);
8846 case GIMPLE_OMP_MASTER
:
8847 ctx
= maybe_lookup_ctx (stmt
);
8849 lower_omp_master (gsi_p
, ctx
);
8851 case GIMPLE_OMP_TASKGROUP
:
8852 ctx
= maybe_lookup_ctx (stmt
);
8854 lower_omp_taskgroup (gsi_p
, ctx
);
8856 case GIMPLE_OMP_ORDERED
:
8857 ctx
= maybe_lookup_ctx (stmt
);
8859 lower_omp_ordered (gsi_p
, ctx
);
8861 case GIMPLE_OMP_CRITICAL
:
8862 ctx
= maybe_lookup_ctx (stmt
);
8864 lower_omp_critical (gsi_p
, ctx
);
8866 case GIMPLE_OMP_ATOMIC_LOAD
:
8867 if ((ctx
|| task_shared_vars
)
8868 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8869 as_a
<gomp_atomic_load
*> (stmt
)),
8870 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
8871 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8873 case GIMPLE_OMP_TARGET
:
8874 ctx
= maybe_lookup_ctx (stmt
);
8876 lower_omp_target (gsi_p
, ctx
);
8878 case GIMPLE_OMP_TEAMS
:
8879 ctx
= maybe_lookup_ctx (stmt
);
8881 lower_omp_teams (gsi_p
, ctx
);
8883 case GIMPLE_OMP_GRID_BODY
:
8884 ctx
= maybe_lookup_ctx (stmt
);
8886 lower_omp_grid_body (gsi_p
, ctx
);
8890 call_stmt
= as_a
<gcall
*> (stmt
);
8891 fndecl
= gimple_call_fndecl (call_stmt
);
8893 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8894 switch (DECL_FUNCTION_CODE (fndecl
))
8896 case BUILT_IN_GOMP_BARRIER
:
8900 case BUILT_IN_GOMP_CANCEL
:
8901 case BUILT_IN_GOMP_CANCELLATION_POINT
:
8904 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
8906 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
8907 if (!cctx
->cancellable
)
8909 if (DECL_FUNCTION_CODE (fndecl
)
8910 == BUILT_IN_GOMP_CANCELLATION_POINT
)
8912 stmt
= gimple_build_nop ();
8913 gsi_replace (gsi_p
, stmt
, false);
8917 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
8919 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
8920 gimple_call_set_fndecl (call_stmt
, fndecl
);
8921 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
8924 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
8925 gimple_call_set_lhs (call_stmt
, lhs
);
8926 tree fallthru_label
;
8927 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8929 g
= gimple_build_label (fallthru_label
);
8930 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8931 g
= gimple_build_cond (NE_EXPR
, lhs
,
8932 fold_convert (TREE_TYPE (lhs
),
8933 boolean_false_node
),
8934 cctx
->cancel_label
, fallthru_label
);
8935 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8942 if ((ctx
|| task_shared_vars
)
8943 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
8946 /* Just remove clobbers, this should happen only if we have
8947 "privatized" local addressable variables in SIMD regions,
8948 the clobber isn't needed in that case and gimplifying address
8949 of the ARRAY_REF into a pointer and creating MEM_REF based
8950 clobber would create worse code than we get with the clobber
8952 if (gimple_clobber_p (stmt
))
8954 gsi_replace (gsi_p
, gimple_build_nop (), true);
8957 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8964 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
8966 location_t saved_location
= input_location
;
8967 gimple_stmt_iterator gsi
;
8968 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8969 lower_omp_1 (&gsi
, ctx
);
8970 /* During gimplification, we haven't folded statments inside offloading
8971 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8972 if (target_nesting_level
|| taskreg_nesting_level
)
8973 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8975 input_location
= saved_location
;
8978 /* Main entry point. */
8981 execute_lower_omp (void)
8987 /* This pass always runs, to provide PROP_gimple_lomp.
8988 But often, there is nothing to do. */
8989 if (flag_cilkplus
== 0 && flag_openacc
== 0 && flag_openmp
== 0
8990 && flag_openmp_simd
== 0)
8993 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
8994 delete_omp_context
);
8996 body
= gimple_body (current_function_decl
);
8998 if (hsa_gen_requested_p ())
8999 omp_grid_gridify_all_targets (&body
);
9001 scan_omp (&body
, NULL
);
9002 gcc_assert (taskreg_nesting_level
== 0);
9003 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
9004 finish_taskreg_scan (ctx
);
9005 taskreg_contexts
.release ();
9007 if (all_contexts
->root
)
9009 if (task_shared_vars
)
9010 push_gimplify_context ();
9011 lower_omp (&body
, NULL
);
9012 if (task_shared_vars
)
9013 pop_gimplify_context (NULL
);
9018 splay_tree_delete (all_contexts
);
9019 all_contexts
= NULL
;
9021 BITMAP_FREE (task_shared_vars
);
9027 const pass_data pass_data_lower_omp
=
9029 GIMPLE_PASS
, /* type */
9030 "omplower", /* name */
9031 OPTGROUP_OMP
, /* optinfo_flags */
9032 TV_NONE
, /* tv_id */
9033 PROP_gimple_any
, /* properties_required */
9034 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
9035 0, /* properties_destroyed */
9036 0, /* todo_flags_start */
9037 0, /* todo_flags_finish */
9040 class pass_lower_omp
: public gimple_opt_pass
9043 pass_lower_omp (gcc::context
*ctxt
)
9044 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
9047 /* opt_pass methods: */
9048 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
9050 }; // class pass_lower_omp
9055 make_pass_lower_omp (gcc::context
*ctxt
)
9057 return new pass_lower_omp (ctxt
);
9060 /* The following is a utility to diagnose structured block violations.
9061 It is not part of the "omplower" pass, as that's invoked too late. It
9062 should be invoked by the respective front ends after gimplification. */
9064 static splay_tree all_labels
;
9066 /* Check for mismatched contexts and generate an error if needed. Return
9067 true if an error is detected. */
9070 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
9071 gimple
*branch_ctx
, gimple
*label_ctx
)
9073 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
9074 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
9076 if (label_ctx
== branch_ctx
)
9079 const char* kind
= NULL
;
9084 && gimple_code (branch_ctx
) == GIMPLE_OMP_FOR
9085 && gimple_omp_for_kind (branch_ctx
) == GF_OMP_FOR_KIND_CILKSIMD
)
9087 && gimple_code (label_ctx
) == GIMPLE_OMP_FOR
9088 && gimple_omp_for_kind (label_ctx
) == GF_OMP_FOR_KIND_CILKSIMD
))
9093 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
9094 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
9096 gcc_checking_assert (kind
== NULL
);
9102 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
9106 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9107 so we could traverse it and issue a correct "exit" or "enter" error
9108 message upon a structured block violation.
9110 We built the context by building a list with tree_cons'ing, but there is
9111 no easy counterpart in gimple tuples. It seems like far too much work
9112 for issuing exit/enter error messages. If someone really misses the
9113 distinct error message... patches welcome. */
9116 /* Try to avoid confusing the user by producing and error message
9117 with correct "exit" or "enter" verbiage. We prefer "exit"
9118 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9119 if (branch_ctx
== NULL
)
9125 if (TREE_VALUE (label_ctx
) == branch_ctx
)
9130 label_ctx
= TREE_CHAIN (label_ctx
);
9135 error ("invalid exit from %s structured block", kind
);
9137 error ("invalid entry to %s structured block", kind
);
9140 /* If it's obvious we have an invalid entry, be specific about the error. */
9141 if (branch_ctx
== NULL
)
9142 error ("invalid entry to %s structured block", kind
);
9145 /* Otherwise, be vague and lazy, but efficient. */
9146 error ("invalid branch to/from %s structured block", kind
);
9149 gsi_replace (gsi_p
, gimple_build_nop (), false);
9153 /* Pass 1: Create a minimal tree of structured blocks, and record
9154 where each label is found. */
9157 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9158 struct walk_stmt_info
*wi
)
9160 gimple
*context
= (gimple
*) wi
->info
;
9161 gimple
*inner_context
;
9162 gimple
*stmt
= gsi_stmt (*gsi_p
);
9164 *handled_ops_p
= true;
9166 switch (gimple_code (stmt
))
9170 case GIMPLE_OMP_PARALLEL
:
9171 case GIMPLE_OMP_TASK
:
9172 case GIMPLE_OMP_SECTIONS
:
9173 case GIMPLE_OMP_SINGLE
:
9174 case GIMPLE_OMP_SECTION
:
9175 case GIMPLE_OMP_MASTER
:
9176 case GIMPLE_OMP_ORDERED
:
9177 case GIMPLE_OMP_CRITICAL
:
9178 case GIMPLE_OMP_TARGET
:
9179 case GIMPLE_OMP_TEAMS
:
9180 case GIMPLE_OMP_TASKGROUP
:
9181 /* The minimal context here is just the current OMP construct. */
9182 inner_context
= stmt
;
9183 wi
->info
= inner_context
;
9184 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9188 case GIMPLE_OMP_FOR
:
9189 inner_context
= stmt
;
9190 wi
->info
= inner_context
;
9191 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9193 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9194 diagnose_sb_1
, NULL
, wi
);
9195 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9200 splay_tree_insert (all_labels
,
9201 (splay_tree_key
) gimple_label_label (
9202 as_a
<glabel
*> (stmt
)),
9203 (splay_tree_value
) context
);
9213 /* Pass 2: Check each branch and see if its context differs from that of
9214 the destination label's context. */
9217 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9218 struct walk_stmt_info
*wi
)
9220 gimple
*context
= (gimple
*) wi
->info
;
9222 gimple
*stmt
= gsi_stmt (*gsi_p
);
9224 *handled_ops_p
= true;
9226 switch (gimple_code (stmt
))
9230 case GIMPLE_OMP_PARALLEL
:
9231 case GIMPLE_OMP_TASK
:
9232 case GIMPLE_OMP_SECTIONS
:
9233 case GIMPLE_OMP_SINGLE
:
9234 case GIMPLE_OMP_SECTION
:
9235 case GIMPLE_OMP_MASTER
:
9236 case GIMPLE_OMP_ORDERED
:
9237 case GIMPLE_OMP_CRITICAL
:
9238 case GIMPLE_OMP_TARGET
:
9239 case GIMPLE_OMP_TEAMS
:
9240 case GIMPLE_OMP_TASKGROUP
:
9242 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9246 case GIMPLE_OMP_FOR
:
9248 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9250 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
9251 diagnose_sb_2
, NULL
, wi
);
9252 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9258 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
9259 tree lab
= gimple_cond_true_label (cond_stmt
);
9262 n
= splay_tree_lookup (all_labels
,
9263 (splay_tree_key
) lab
);
9264 diagnose_sb_0 (gsi_p
, context
,
9265 n
? (gimple
*) n
->value
: NULL
);
9267 lab
= gimple_cond_false_label (cond_stmt
);
9270 n
= splay_tree_lookup (all_labels
,
9271 (splay_tree_key
) lab
);
9272 diagnose_sb_0 (gsi_p
, context
,
9273 n
? (gimple
*) n
->value
: NULL
);
9280 tree lab
= gimple_goto_dest (stmt
);
9281 if (TREE_CODE (lab
) != LABEL_DECL
)
9284 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9285 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
9291 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
9293 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
9295 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
9296 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9297 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
9304 diagnose_sb_0 (gsi_p
, context
, NULL
);
9315 diagnose_omp_structured_block_errors (void)
9317 struct walk_stmt_info wi
;
9318 gimple_seq body
= gimple_body (current_function_decl
);
9320 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
9322 memset (&wi
, 0, sizeof (wi
));
9323 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
9325 memset (&wi
, 0, sizeof (wi
));
9326 wi
.want_locations
= true;
9327 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
9329 gimple_set_body (current_function_decl
, body
);
9331 splay_tree_delete (all_labels
);
9339 const pass_data pass_data_diagnose_omp_blocks
=
9341 GIMPLE_PASS
, /* type */
9342 "*diagnose_omp_blocks", /* name */
9343 OPTGROUP_OMP
, /* optinfo_flags */
9344 TV_NONE
, /* tv_id */
9345 PROP_gimple_any
, /* properties_required */
9346 0, /* properties_provided */
9347 0, /* properties_destroyed */
9348 0, /* todo_flags_start */
9349 0, /* todo_flags_finish */
9352 class pass_diagnose_omp_blocks
: public gimple_opt_pass
9355 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9356 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
9359 /* opt_pass methods: */
9360 virtual bool gate (function
*)
9362 return flag_cilkplus
|| flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
9364 virtual unsigned int execute (function
*)
9366 return diagnose_omp_structured_block_errors ();
9369 }; // class pass_diagnose_omp_blocks
9374 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9376 return new pass_diagnose_omp_blocks (ctxt
);
9380 #include "gt-omp-low.h"