1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
122 /* True if this parallel directive is nested within another. */
125 /* True if this construct can be cancelled. */
129 static splay_tree all_contexts
;
130 static int taskreg_nesting_level
;
131 static int target_nesting_level
;
132 static bitmap task_shared_vars
;
133 static vec
<omp_context
*> taskreg_contexts
;
135 static void scan_omp (gimple_seq
*, omp_context
*);
136 static tree
scan_omp_1_op (tree
*, int *, void *);
138 #define WALK_SUBSTMTS \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
148 /* Return true if CTX corresponds to an oacc parallel region. */
151 is_oacc_parallel (omp_context
*ctx
)
153 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
154 return ((outer_type
== GIMPLE_OMP_TARGET
)
155 && (gimple_omp_target_kind (ctx
->stmt
)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
159 /* Return true if CTX corresponds to an oacc kernels region. */
162 is_oacc_kernels (omp_context
*ctx
)
164 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
165 return ((outer_type
== GIMPLE_OMP_TARGET
)
166 && (gimple_omp_target_kind (ctx
->stmt
)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
175 omp_member_access_dummy_var (tree decl
)
178 || !DECL_ARTIFICIAL (decl
)
179 || !DECL_IGNORED_P (decl
)
180 || !DECL_HAS_VALUE_EXPR_P (decl
)
181 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
184 tree v
= DECL_VALUE_EXPR (decl
);
185 if (TREE_CODE (v
) != COMPONENT_REF
)
189 switch (TREE_CODE (v
))
195 case POINTER_PLUS_EXPR
:
196 v
= TREE_OPERAND (v
, 0);
199 if (DECL_CONTEXT (v
) == current_function_decl
200 && DECL_ARTIFICIAL (v
)
201 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
209 /* Helper for unshare_and_remap, called through walk_tree. */
212 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
214 tree
*pair
= (tree
*) data
;
217 *tp
= unshare_expr (pair
[1]);
220 else if (IS_TYPE_OR_DECL_P (*tp
))
225 /* Return unshare_expr (X) with all occurrences of FROM
229 unshare_and_remap (tree x
, tree from
, tree to
)
231 tree pair
[2] = { from
, to
};
232 x
= unshare_expr (x
);
233 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
240 scan_omp_op (tree
*tp
, omp_context
*ctx
)
242 struct walk_stmt_info wi
;
244 memset (&wi
, 0, sizeof (wi
));
246 wi
.want_locations
= true;
248 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
251 static void lower_omp (gimple_seq
*, omp_context
*);
252 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
253 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
255 /* Return true if CTX is for an omp parallel. */
258 is_parallel_ctx (omp_context
*ctx
)
260 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
264 /* Return true if CTX is for an omp task. */
267 is_task_ctx (omp_context
*ctx
)
269 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
273 /* Return true if CTX is for an omp taskloop. */
276 is_taskloop_ctx (omp_context
*ctx
)
278 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
283 /* Return true if CTX is for an omp parallel or omp task. */
286 is_taskreg_ctx (omp_context
*ctx
)
288 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
);
291 /* Return true if EXPR is variable sized. */
294 is_variable_sized (const_tree expr
)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
304 lookup_decl (tree var
, omp_context
*ctx
)
306 tree
*n
= ctx
->cb
.decl_map
->get (var
);
311 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
313 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
314 return n
? *n
: NULL_TREE
;
318 lookup_field (tree var
, omp_context
*ctx
)
321 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
322 return (tree
) n
->value
;
326 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
329 n
= splay_tree_lookup (ctx
->sfield_map
330 ? ctx
->sfield_map
: ctx
->field_map
, key
);
331 return (tree
) n
->value
;
335 lookup_sfield (tree var
, omp_context
*ctx
)
337 return lookup_sfield ((splay_tree_key
) var
, ctx
);
341 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
344 n
= splay_tree_lookup (ctx
->field_map
, key
);
345 return n
? (tree
) n
->value
: NULL_TREE
;
349 maybe_lookup_field (tree var
, omp_context
*ctx
)
351 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
358 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
361 || TYPE_ATOMIC (TREE_TYPE (decl
)))
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
384 /* Do not use copy-in/copy-out for variables that have their
386 if (TREE_ADDRESSABLE (decl
))
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
391 if (TREE_READONLY (decl
)
392 || ((TREE_CODE (decl
) == RESULT_DECL
393 || TREE_CODE (decl
) == PARM_DECL
)
394 && DECL_BY_REFERENCE (decl
)))
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx
->is_nested
)
406 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
407 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
414 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
415 c
; c
= OMP_CLAUSE_CHAIN (c
))
416 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c
) == decl
)
421 goto maybe_mark_addressable_and_ret
;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx
))
431 maybe_mark_addressable_and_ret
:
432 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
433 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
438 if (!task_shared_vars
)
439 task_shared_vars
= BITMAP_ALLOC (NULL
);
440 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
441 TREE_ADDRESSABLE (outer
) = 1;
450 /* Construct a new automatic decl similar to VAR. */
453 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
455 tree copy
= copy_var_decl (var
, name
, type
);
457 DECL_CONTEXT (copy
) = current_function_decl
;
458 DECL_CHAIN (copy
) = ctx
->block_vars
;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
463 if (TREE_ADDRESSABLE (var
)
465 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
466 TREE_ADDRESSABLE (copy
) = 0;
467 ctx
->block_vars
= copy
;
473 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
475 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 omp_build_component_ref (tree obj
, tree field
)
483 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
484 if (TREE_THIS_VOLATILE (field
))
485 TREE_THIS_VOLATILE (ret
) |= 1;
486 if (TREE_READONLY (field
))
487 TREE_READONLY (ret
) |= 1;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
494 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
496 tree x
, field
= lookup_field (var
, ctx
);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x
= maybe_lookup_field (field
, ctx
);
504 x
= build_simple_mem_ref (ctx
->receiver_decl
);
505 TREE_THIS_NOTRAP (x
) = 1;
506 x
= omp_build_component_ref (x
, field
);
509 x
= build_simple_mem_ref (x
);
510 TREE_THIS_NOTRAP (x
) = 1;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
521 build_outer_var_ref (tree var
, omp_context
*ctx
,
522 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
528 else if (is_variable_sized (var
))
530 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
531 x
= build_outer_var_ref (x
, ctx
, code
);
532 x
= build_simple_mem_ref (x
);
534 else if (is_taskreg_ctx (ctx
))
536 bool by_ref
= use_pointer_for_field (var
, NULL
);
537 x
= build_receiver_ref (var
, by_ref
, ctx
);
539 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
541 || (code
== OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
543 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
551 if (ctx
->outer
&& is_taskreg_ctx (ctx
))
552 x
= lookup_decl (var
, ctx
->outer
);
554 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
558 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
560 gcc_assert (ctx
->outer
);
562 = splay_tree_lookup (ctx
->outer
->field_map
,
563 (splay_tree_key
) &DECL_UID (var
));
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
->outer
)))
569 x
= lookup_decl (var
, ctx
->outer
);
573 tree field
= (tree
) n
->value
;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x
= maybe_lookup_field (field
, ctx
->outer
);
580 x
= build_simple_mem_ref (ctx
->outer
->receiver_decl
);
581 x
= omp_build_component_ref (x
, field
);
582 if (use_pointer_for_field (var
, ctx
->outer
))
583 x
= build_simple_mem_ref (x
);
588 omp_context
*outer
= ctx
->outer
;
589 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
591 outer
= outer
->outer
;
593 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
595 x
= lookup_decl (var
, outer
);
597 else if (omp_is_reference (var
))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
601 else if (omp_member_access_dummy_var (var
))
608 tree t
= omp_member_access_dummy_var (var
);
611 x
= DECL_VALUE_EXPR (var
);
612 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
614 x
= unshare_and_remap (x
, t
, o
);
616 x
= unshare_expr (x
);
620 if (omp_is_reference (var
))
621 x
= build_simple_mem_ref (x
);
626 /* Build tree nodes to access the field for VAR on the sender side. */
629 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
631 tree field
= lookup_sfield (key
, ctx
);
632 return omp_build_component_ref (ctx
->sender_decl
, field
);
636 build_sender_ref (tree var
, omp_context
*ctx
)
638 return build_sender_ref ((splay_tree_key
) var
, ctx
);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
645 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
,
646 bool base_pointers_restrict
= false)
648 tree field
, type
, sfield
= NULL_TREE
;
649 splay_tree_key key
= (splay_tree_key
) var
;
653 key
= (splay_tree_key
) &DECL_UID (var
);
654 gcc_checking_assert (key
!= (splay_tree_key
) var
);
656 gcc_assert ((mask
& 1) == 0
657 || !splay_tree_lookup (ctx
->field_map
, key
));
658 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
659 || !splay_tree_lookup (ctx
->sfield_map
, key
));
660 gcc_assert ((mask
& 3) == 3
661 || !is_gimple_omp_oacc (ctx
->stmt
));
663 type
= TREE_TYPE (var
);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type
)
668 && TYPE_RESTRICT (type
))
669 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
673 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
674 type
= build_pointer_type (build_pointer_type (type
));
678 type
= build_pointer_type (type
);
679 if (base_pointers_restrict
)
680 type
= build_qualified_type (type
, TYPE_QUAL_RESTRICT
);
682 else if ((mask
& 3) == 1 && omp_is_reference (var
))
683 type
= TREE_TYPE (type
);
685 field
= build_decl (DECL_SOURCE_LOCATION (var
),
686 FIELD_DECL
, DECL_NAME (var
), type
);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field
) = var
;
692 if (type
== TREE_TYPE (var
))
694 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
695 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
696 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
699 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
703 insert_field_into_struct (ctx
->record_type
, field
);
704 if (ctx
->srecord_type
)
706 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
707 FIELD_DECL
, DECL_NAME (var
), type
);
708 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
709 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
710 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
711 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
712 insert_field_into_struct (ctx
->srecord_type
, sfield
);
717 if (ctx
->srecord_type
== NULL_TREE
)
721 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
722 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
723 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
725 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
726 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
727 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
728 insert_field_into_struct (ctx
->srecord_type
, sfield
);
729 splay_tree_insert (ctx
->sfield_map
,
730 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
731 (splay_tree_value
) sfield
);
735 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
736 : ctx
->srecord_type
, field
);
740 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
741 if ((mask
& 2) && ctx
->sfield_map
)
742 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
746 install_var_local (tree var
, omp_context
*ctx
)
748 tree new_var
= omp_copy_decl_1 (var
, ctx
);
749 insert_decl_map (&ctx
->cb
, var
, new_var
);
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
757 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
761 new_decl
= lookup_decl (decl
, ctx
);
763 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
766 && DECL_HAS_VALUE_EXPR_P (decl
))
768 tree ve
= DECL_VALUE_EXPR (decl
);
769 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
770 SET_DECL_VALUE_EXPR (new_decl
, ve
);
771 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
776 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
777 if (size
== error_mark_node
)
778 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
779 DECL_SIZE (new_decl
) = size
;
781 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
782 if (size
== error_mark_node
)
783 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
784 DECL_SIZE_UNIT (new_decl
) = size
;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
794 omp_copy_decl (tree var
, copy_body_data
*cb
)
796 omp_context
*ctx
= (omp_context
*) cb
;
799 if (TREE_CODE (var
) == LABEL_DECL
)
801 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
803 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
804 DECL_CONTEXT (new_var
) = current_function_decl
;
805 insert_decl_map (&ctx
->cb
, var
, new_var
);
809 while (!is_taskreg_ctx (ctx
))
814 new_var
= maybe_lookup_decl (var
, ctx
);
819 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
822 return error_mark_node
;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
828 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
830 omp_context
*ctx
= XCNEW (omp_context
);
832 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
833 (splay_tree_value
) ctx
);
838 ctx
->outer
= outer_ctx
;
839 ctx
->cb
= outer_ctx
->cb
;
840 ctx
->cb
.block
= NULL
;
841 ctx
->depth
= outer_ctx
->depth
+ 1;
845 ctx
->cb
.src_fn
= current_function_decl
;
846 ctx
->cb
.dst_fn
= current_function_decl
;
847 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
848 gcc_checking_assert (ctx
->cb
.src_node
);
849 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
850 ctx
->cb
.src_cfun
= cfun
;
851 ctx
->cb
.copy_decl
= omp_copy_decl
;
852 ctx
->cb
.eh_lp_nr
= 0;
853 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
857 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
862 static gimple_seq
maybe_catch_exception (gimple_seq
);
864 /* Finalize task copyfn. */
867 finalize_task_copyfn (gomp_task
*task_stmt
)
869 struct function
*child_cfun
;
871 gimple_seq seq
= NULL
, new_seq
;
874 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
875 if (child_fn
== NULL_TREE
)
878 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
879 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
881 push_cfun (child_cfun
);
882 bind
= gimplify_body (child_fn
, false);
883 gimple_seq_add_stmt (&seq
, bind
);
884 new_seq
= maybe_catch_exception (seq
);
887 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
889 gimple_seq_add_stmt (&seq
, bind
);
891 gimple_set_body (child_fn
, seq
);
894 /* Inform the callgraph about the new function. */
895 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
896 node
->parallelized_function
= 1;
897 cgraph_node::add_new_function (child_fn
, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
904 delete_omp_context (splay_tree_value value
)
906 omp_context
*ctx
= (omp_context
*) value
;
908 delete ctx
->cb
.decl_map
;
911 splay_tree_delete (ctx
->field_map
);
913 splay_tree_delete (ctx
->sfield_map
);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx
->record_type
)
920 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
921 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
923 if (ctx
->srecord_type
)
926 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
927 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
930 if (is_task_ctx (ctx
))
931 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
940 fixup_child_record_type (omp_context
*ctx
)
942 tree f
, type
= ctx
->record_type
;
944 if (!ctx
->receiver_decl
)
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
951 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
955 tree name
, new_fields
= NULL
;
957 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
958 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
959 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
960 TYPE_DECL
, name
, type
);
961 TYPE_NAME (type
) = name
;
963 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
965 tree new_f
= copy_node (f
);
966 DECL_CONTEXT (new_f
) = type
;
967 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
968 DECL_CHAIN (new_f
) = new_fields
;
969 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
970 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
972 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
979 (splay_tree_value
) new_f
);
981 TYPE_FIELDS (type
) = nreverse (new_fields
);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx
->stmt
))
988 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
990 TREE_TYPE (ctx
->receiver_decl
)
991 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
999 scan_sharing_clauses (tree clauses
, omp_context
*ctx
,
1000 bool base_pointers_restrict
= false)
1003 bool scan_array_reductions
= false;
1005 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1009 switch (OMP_CLAUSE_CODE (c
))
1011 case OMP_CLAUSE_PRIVATE
:
1012 decl
= OMP_CLAUSE_DECL (c
);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1015 else if (!is_variable_sized (decl
))
1016 install_var_local (decl
, ctx
);
1019 case OMP_CLAUSE_SHARED
:
1020 decl
= OMP_CLAUSE_DECL (c
);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1027 if (is_global_var (odecl
))
1029 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1032 gcc_assert (is_taskreg_ctx (ctx
));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1034 || !is_variable_sized (decl
));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1041 use_pointer_for_field (decl
, ctx
);
1044 by_ref
= use_pointer_for_field (decl
, NULL
);
1045 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1046 || TREE_ADDRESSABLE (decl
)
1048 || omp_is_reference (decl
))
1050 by_ref
= use_pointer_for_field (decl
, ctx
);
1051 install_var_field (decl
, by_ref
, 3, ctx
);
1052 install_var_local (decl
, ctx
);
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1059 case OMP_CLAUSE_REDUCTION
:
1060 decl
= OMP_CLAUSE_DECL (c
);
1061 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl
) == MEM_REF
)
1064 tree t
= TREE_OPERAND (decl
, 0);
1065 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1066 t
= TREE_OPERAND (t
, 0);
1067 if (TREE_CODE (t
) == INDIRECT_REF
1068 || TREE_CODE (t
) == ADDR_EXPR
)
1069 t
= TREE_OPERAND (t
, 0);
1070 install_var_local (t
, ctx
);
1071 if (is_taskreg_ctx (ctx
)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1073 && !is_variable_sized (t
))
1075 by_ref
= use_pointer_for_field (t
, ctx
);
1076 install_var_field (t
, by_ref
, 3, ctx
);
1082 case OMP_CLAUSE_LASTPRIVATE
:
1083 /* Let the corresponding firstprivate clause create
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1089 case OMP_CLAUSE_FIRSTPRIVATE
:
1090 case OMP_CLAUSE_LINEAR
:
1091 decl
= OMP_CLAUSE_DECL (c
);
1093 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1095 && is_gimple_omp_offloaded (ctx
->stmt
))
1097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1098 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1099 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1100 install_var_field (decl
, true, 3, ctx
);
1102 install_var_field (decl
, false, 3, ctx
);
1104 if (is_variable_sized (decl
))
1106 if (is_task_ctx (ctx
))
1107 install_var_field (decl
, false, 1, ctx
);
1110 else if (is_taskreg_ctx (ctx
))
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1114 by_ref
= use_pointer_for_field (decl
, NULL
);
1116 if (is_task_ctx (ctx
)
1117 && (global
|| by_ref
|| omp_is_reference (decl
)))
1119 install_var_field (decl
, false, 1, ctx
);
1121 install_var_field (decl
, by_ref
, 2, ctx
);
1124 install_var_field (decl
, by_ref
, 3, ctx
);
1126 install_var_local (decl
, ctx
);
1129 case OMP_CLAUSE_USE_DEVICE_PTR
:
1130 decl
= OMP_CLAUSE_DECL (c
);
1131 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1132 install_var_field (decl
, true, 3, ctx
);
1134 install_var_field (decl
, false, 3, ctx
);
1135 if (DECL_SIZE (decl
)
1136 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1138 tree decl2
= DECL_VALUE_EXPR (decl
);
1139 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1140 decl2
= TREE_OPERAND (decl2
, 0);
1141 gcc_assert (DECL_P (decl2
));
1142 install_var_local (decl2
, ctx
);
1144 install_var_local (decl
, ctx
);
1147 case OMP_CLAUSE_IS_DEVICE_PTR
:
1148 decl
= OMP_CLAUSE_DECL (c
);
1151 case OMP_CLAUSE__LOOPTEMP_
:
1152 gcc_assert (is_taskreg_ctx (ctx
));
1153 decl
= OMP_CLAUSE_DECL (c
);
1154 install_var_field (decl
, false, 3, ctx
);
1155 install_var_local (decl
, ctx
);
1158 case OMP_CLAUSE_COPYPRIVATE
:
1159 case OMP_CLAUSE_COPYIN
:
1160 decl
= OMP_CLAUSE_DECL (c
);
1161 by_ref
= use_pointer_for_field (decl
, NULL
);
1162 install_var_field (decl
, by_ref
, 3, ctx
);
1165 case OMP_CLAUSE_FINAL
:
1167 case OMP_CLAUSE_NUM_THREADS
:
1168 case OMP_CLAUSE_NUM_TEAMS
:
1169 case OMP_CLAUSE_THREAD_LIMIT
:
1170 case OMP_CLAUSE_DEVICE
:
1171 case OMP_CLAUSE_SCHEDULE
:
1172 case OMP_CLAUSE_DIST_SCHEDULE
:
1173 case OMP_CLAUSE_DEPEND
:
1174 case OMP_CLAUSE_PRIORITY
:
1175 case OMP_CLAUSE_GRAINSIZE
:
1176 case OMP_CLAUSE_NUM_TASKS
:
1177 case OMP_CLAUSE_NUM_GANGS
:
1178 case OMP_CLAUSE_NUM_WORKERS
:
1179 case OMP_CLAUSE_VECTOR_LENGTH
:
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1185 case OMP_CLAUSE_FROM
:
1186 case OMP_CLAUSE_MAP
:
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1189 decl
= OMP_CLAUSE_DECL (c
);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
1194 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1196 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c
)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1199 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1201 && varpool_node::get_create (decl
)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl
)))
1205 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1206 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1211 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1215 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1216 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c
)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1220 if (TREE_CODE (decl
) == COMPONENT_REF
1221 || (TREE_CODE (decl
) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1224 == REFERENCE_TYPE
)))
1226 if (DECL_SIZE (decl
)
1227 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1229 tree decl2
= DECL_VALUE_EXPR (decl
);
1230 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1231 decl2
= TREE_OPERAND (decl2
, 0);
1232 gcc_assert (DECL_P (decl2
));
1233 install_var_local (decl2
, ctx
);
1235 install_var_local (decl
, ctx
);
1240 if (DECL_SIZE (decl
)
1241 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1243 tree decl2
= DECL_VALUE_EXPR (decl
);
1244 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1245 decl2
= TREE_OPERAND (decl2
, 0);
1246 gcc_assert (DECL_P (decl2
));
1247 install_var_field (decl2
, true, 3, ctx
);
1248 install_var_local (decl2
, ctx
);
1249 install_var_local (decl
, ctx
);
1253 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1254 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1256 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1257 install_var_field (decl
, true, 7, ctx
);
1259 install_var_field (decl
, true, 3, ctx
,
1260 base_pointers_restrict
);
1261 if (is_gimple_omp_offloaded (ctx
->stmt
)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1263 install_var_local (decl
, ctx
);
1268 tree base
= get_base_address (decl
);
1269 tree nc
= OMP_CLAUSE_CHAIN (c
);
1272 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc
) == base
1274 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1284 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1285 decl
= OMP_CLAUSE_DECL (c
);
1287 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1288 (splay_tree_key
) decl
));
1290 = build_decl (OMP_CLAUSE_LOCATION (c
),
1291 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1292 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1293 insert_field_into_struct (ctx
->record_type
, field
);
1294 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1295 (splay_tree_value
) field
);
1300 case OMP_CLAUSE__GRIDDIM_
:
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1308 case OMP_CLAUSE_NOWAIT
:
1309 case OMP_CLAUSE_ORDERED
:
1310 case OMP_CLAUSE_COLLAPSE
:
1311 case OMP_CLAUSE_UNTIED
:
1312 case OMP_CLAUSE_MERGEABLE
:
1313 case OMP_CLAUSE_PROC_BIND
:
1314 case OMP_CLAUSE_SAFELEN
:
1315 case OMP_CLAUSE_SIMDLEN
:
1316 case OMP_CLAUSE_THREADS
:
1317 case OMP_CLAUSE_SIMD
:
1318 case OMP_CLAUSE_NOGROUP
:
1319 case OMP_CLAUSE_DEFAULTMAP
:
1320 case OMP_CLAUSE_ASYNC
:
1321 case OMP_CLAUSE_WAIT
:
1322 case OMP_CLAUSE_GANG
:
1323 case OMP_CLAUSE_WORKER
:
1324 case OMP_CLAUSE_VECTOR
:
1325 case OMP_CLAUSE_INDEPENDENT
:
1326 case OMP_CLAUSE_AUTO
:
1327 case OMP_CLAUSE_SEQ
:
1328 case OMP_CLAUSE_TILE
:
1329 case OMP_CLAUSE__SIMT_
:
1330 case OMP_CLAUSE_DEFAULT
:
1333 case OMP_CLAUSE_ALIGNED
:
1334 decl
= OMP_CLAUSE_DECL (c
);
1335 if (is_global_var (decl
)
1336 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1337 install_var_local (decl
, ctx
);
1340 case OMP_CLAUSE__CACHE_
:
1346 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1348 switch (OMP_CLAUSE_CODE (c
))
1350 case OMP_CLAUSE_LASTPRIVATE
:
1351 /* Let the corresponding firstprivate clause create
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1354 scan_array_reductions
= true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1359 case OMP_CLAUSE_FIRSTPRIVATE
:
1360 case OMP_CLAUSE_PRIVATE
:
1361 case OMP_CLAUSE_LINEAR
:
1362 case OMP_CLAUSE_IS_DEVICE_PTR
:
1363 decl
= OMP_CLAUSE_DECL (c
);
1364 if (is_variable_sized (decl
))
1366 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1368 && is_gimple_omp_offloaded (ctx
->stmt
))
1370 tree decl2
= DECL_VALUE_EXPR (decl
);
1371 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1372 decl2
= TREE_OPERAND (decl2
, 0);
1373 gcc_assert (DECL_P (decl2
));
1374 install_var_local (decl2
, ctx
);
1375 fixup_remapped_decl (decl2
, ctx
, false);
1377 install_var_local (decl
, ctx
);
1379 fixup_remapped_decl (decl
, ctx
,
1380 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1382 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1384 scan_array_reductions
= true;
1387 case OMP_CLAUSE_REDUCTION
:
1388 decl
= OMP_CLAUSE_DECL (c
);
1389 if (TREE_CODE (decl
) != MEM_REF
)
1391 if (is_variable_sized (decl
))
1392 install_var_local (decl
, ctx
);
1393 fixup_remapped_decl (decl
, ctx
, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1396 scan_array_reductions
= true;
1399 case OMP_CLAUSE_SHARED
:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1403 decl
= OMP_CLAUSE_DECL (c
);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1411 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1412 install_var_field (decl
, by_ref
, 11, ctx
);
1415 fixup_remapped_decl (decl
, ctx
, false);
1418 case OMP_CLAUSE_MAP
:
1419 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1421 decl
= OMP_CLAUSE_DECL (c
);
1423 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c
)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1426 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1428 && varpool_node::get_create (decl
)->offloadable
)
1432 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1434 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1437 tree new_decl
= lookup_decl (decl
, ctx
);
1438 TREE_TYPE (new_decl
)
1439 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1441 else if (DECL_SIZE (decl
)
1442 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1444 tree decl2
= DECL_VALUE_EXPR (decl
);
1445 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1446 decl2
= TREE_OPERAND (decl2
, 0);
1447 gcc_assert (DECL_P (decl2
));
1448 fixup_remapped_decl (decl2
, ctx
, false);
1449 fixup_remapped_decl (decl
, ctx
, true);
1452 fixup_remapped_decl (decl
, ctx
, false);
1456 case OMP_CLAUSE_COPYPRIVATE
:
1457 case OMP_CLAUSE_COPYIN
:
1458 case OMP_CLAUSE_DEFAULT
:
1460 case OMP_CLAUSE_NUM_THREADS
:
1461 case OMP_CLAUSE_NUM_TEAMS
:
1462 case OMP_CLAUSE_THREAD_LIMIT
:
1463 case OMP_CLAUSE_DEVICE
:
1464 case OMP_CLAUSE_SCHEDULE
:
1465 case OMP_CLAUSE_DIST_SCHEDULE
:
1466 case OMP_CLAUSE_NOWAIT
:
1467 case OMP_CLAUSE_ORDERED
:
1468 case OMP_CLAUSE_COLLAPSE
:
1469 case OMP_CLAUSE_UNTIED
:
1470 case OMP_CLAUSE_FINAL
:
1471 case OMP_CLAUSE_MERGEABLE
:
1472 case OMP_CLAUSE_PROC_BIND
:
1473 case OMP_CLAUSE_SAFELEN
:
1474 case OMP_CLAUSE_SIMDLEN
:
1475 case OMP_CLAUSE_ALIGNED
:
1476 case OMP_CLAUSE_DEPEND
:
1477 case OMP_CLAUSE__LOOPTEMP_
:
1479 case OMP_CLAUSE_FROM
:
1480 case OMP_CLAUSE_PRIORITY
:
1481 case OMP_CLAUSE_GRAINSIZE
:
1482 case OMP_CLAUSE_NUM_TASKS
:
1483 case OMP_CLAUSE_THREADS
:
1484 case OMP_CLAUSE_SIMD
:
1485 case OMP_CLAUSE_NOGROUP
:
1486 case OMP_CLAUSE_DEFAULTMAP
:
1487 case OMP_CLAUSE_USE_DEVICE_PTR
:
1488 case OMP_CLAUSE_ASYNC
:
1489 case OMP_CLAUSE_WAIT
:
1490 case OMP_CLAUSE_NUM_GANGS
:
1491 case OMP_CLAUSE_NUM_WORKERS
:
1492 case OMP_CLAUSE_VECTOR_LENGTH
:
1493 case OMP_CLAUSE_GANG
:
1494 case OMP_CLAUSE_WORKER
:
1495 case OMP_CLAUSE_VECTOR
:
1496 case OMP_CLAUSE_INDEPENDENT
:
1497 case OMP_CLAUSE_AUTO
:
1498 case OMP_CLAUSE_SEQ
:
1499 case OMP_CLAUSE_TILE
:
1500 case OMP_CLAUSE__GRIDDIM_
:
1501 case OMP_CLAUSE__SIMT_
:
1504 case OMP_CLAUSE__CACHE_
:
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx
->stmt
));
1512 if (scan_array_reductions
)
1514 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1515 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1521 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1524 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1530 /* Create a new name for omp child function. Returns an identifier. */
1533 create_omp_child_function_name (bool task_copy
)
1535 return clone_function_name (current_function_decl
,
1536 task_copy
? "_omp_cpyfn" : "_omp_fn");
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1543 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1545 if (cgraph_node::get (current_function_decl
)->offloadable
)
1547 for (; ctx
; ctx
= ctx
->outer
)
1548 if (is_gimple_omp_offloaded (ctx
->stmt
))
1553 /* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1557 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1559 tree decl
, type
, name
, t
;
1561 name
= create_omp_child_function_name (task_copy
);
1563 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1564 ptr_type_node
, NULL_TREE
);
1566 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1568 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1573 ctx
->cb
.dst_fn
= decl
;
1575 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1577 TREE_STATIC (decl
) = 1;
1578 TREE_USED (decl
) = 1;
1579 DECL_ARTIFICIAL (decl
) = 1;
1580 DECL_IGNORED_P (decl
) = 0;
1581 TREE_PUBLIC (decl
) = 0;
1582 DECL_UNINLINABLE (decl
) = 1;
1583 DECL_EXTERNAL (decl
) = 0;
1584 DECL_CONTEXT (decl
) = NULL_TREE
;
1585 DECL_INITIAL (decl
) = make_node (BLOCK
);
1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1587 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1588 /* Remove omp declare simd attribute from the new attributes. */
1589 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1591 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1594 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1595 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1596 *p
= TREE_CHAIN (*p
);
1599 tree chain
= TREE_CHAIN (*p
);
1600 *p
= copy_node (*p
);
1601 p
= &TREE_CHAIN (*p
);
1605 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1606 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1607 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1608 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1609 DECL_FUNCTION_VERSIONED (decl
)
1610 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1612 if (omp_maybe_offloaded_ctx (ctx
))
1614 cgraph_node::get_create (decl
)->offloadable
= 1;
1615 if (ENABLE_OFFLOADING
)
1616 g
->have_offload
= true;
1619 if (cgraph_node::get_create (decl
)->offloadable
1620 && !lookup_attribute ("omp declare target",
1621 DECL_ATTRIBUTES (current_function_decl
)))
1623 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1624 ? "omp target entrypoint"
1625 : "omp declare target");
1626 DECL_ATTRIBUTES (decl
)
1627 = tree_cons (get_identifier (target_attr
),
1628 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1631 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1632 RESULT_DECL
, NULL_TREE
, void_type_node
);
1633 DECL_ARTIFICIAL (t
) = 1;
1634 DECL_IGNORED_P (t
) = 1;
1635 DECL_CONTEXT (t
) = decl
;
1636 DECL_RESULT (decl
) = t
;
1638 tree data_name
= get_identifier (".omp_data_i");
1639 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1641 DECL_ARTIFICIAL (t
) = 1;
1642 DECL_NAMELESS (t
) = 1;
1643 DECL_ARG_TYPE (t
) = ptr_type_node
;
1644 DECL_CONTEXT (t
) = current_function_decl
;
1646 TREE_READONLY (t
) = 1;
1647 DECL_ARGUMENTS (decl
) = t
;
1649 ctx
->receiver_decl
= t
;
1652 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1653 PARM_DECL
, get_identifier (".omp_data_o"),
1655 DECL_ARTIFICIAL (t
) = 1;
1656 DECL_NAMELESS (t
) = 1;
1657 DECL_ARG_TYPE (t
) = ptr_type_node
;
1658 DECL_CONTEXT (t
) = current_function_decl
;
1660 TREE_ADDRESSABLE (t
) = 1;
1661 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1662 DECL_ARGUMENTS (decl
) = t
;
1665 /* Allocate memory for the function structure. The call to
1666 allocate_struct_function clobbers CFUN, so we need to restore
1668 push_struct_function (decl
);
1669 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1670 init_tree_ssa (cfun
);
1674 /* Callback for walk_gimple_seq. Check if combined parallel
1675 contains gimple_omp_for_combined_into_p OMP_FOR. */
1678 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1679 bool *handled_ops_p
,
1680 struct walk_stmt_info
*wi
)
1682 gimple
*stmt
= gsi_stmt (*gsi_p
);
1684 *handled_ops_p
= true;
1685 switch (gimple_code (stmt
))
1689 case GIMPLE_OMP_FOR
:
1690 if (gimple_omp_for_combined_into_p (stmt
)
1691 && gimple_omp_for_kind (stmt
)
1692 == *(const enum gf_mask
*) (wi
->info
))
1695 return integer_zero_node
;
1704 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1707 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1708 omp_context
*outer_ctx
)
1710 struct walk_stmt_info wi
;
1712 memset (&wi
, 0, sizeof (wi
));
1714 wi
.info
= (void *) &msk
;
1715 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1716 if (wi
.info
!= (void *) &msk
)
1718 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1719 struct omp_for_data fd
;
1720 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1721 /* We need two temporaries with fd.loop.v type (istart/iend)
1722 and then (fd.collapse - 1) temporaries with the same
1723 type for count2 ... countN-1 vars if not constant. */
1724 size_t count
= 2, i
;
1725 tree type
= fd
.iter_type
;
1727 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1729 count
+= fd
.collapse
- 1;
1730 /* If there are lastprivate clauses on the inner
1731 GIMPLE_OMP_FOR, add one more temporaries for the total number
1732 of iterations (product of count1 ... countN-1). */
1733 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1734 OMP_CLAUSE_LASTPRIVATE
))
1736 else if (msk
== GF_OMP_FOR_KIND_FOR
1737 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1738 OMP_CLAUSE_LASTPRIVATE
))
1741 for (i
= 0; i
< count
; i
++)
1743 tree temp
= create_tmp_var (type
);
1744 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1745 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1746 OMP_CLAUSE_DECL (c
) = temp
;
1747 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1748 gimple_omp_taskreg_set_clauses (stmt
, c
);
1753 /* Scan an OpenMP parallel directive. */
1756 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1760 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1762 /* Ignore parallel directives with empty bodies, unless there
1763 are copyin clauses. */
1765 && empty_body_p (gimple_omp_body (stmt
))
1766 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1767 OMP_CLAUSE_COPYIN
) == NULL
)
1769 gsi_replace (gsi
, gimple_build_nop (), false);
1773 if (gimple_omp_parallel_combined_p (stmt
))
1774 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1776 ctx
= new_omp_context (stmt
, outer_ctx
);
1777 taskreg_contexts
.safe_push (ctx
);
1778 if (taskreg_nesting_level
> 1)
1779 ctx
->is_nested
= true;
1780 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1781 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1782 name
= create_tmp_var_name (".omp_data_s");
1783 name
= build_decl (gimple_location (stmt
),
1784 TYPE_DECL
, name
, ctx
->record_type
);
1785 DECL_ARTIFICIAL (name
) = 1;
1786 DECL_NAMELESS (name
) = 1;
1787 TYPE_NAME (ctx
->record_type
) = name
;
1788 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1789 if (!gimple_omp_parallel_grid_phony (stmt
))
1791 create_omp_child_function (ctx
, false);
1792 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1795 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1796 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1798 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1799 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1802 /* Scan an OpenMP task directive. */
1805 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1809 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1811 /* Ignore task directives with empty bodies, unless they have depend
1814 && empty_body_p (gimple_omp_body (stmt
))
1815 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1817 gsi_replace (gsi
, gimple_build_nop (), false);
1821 if (gimple_omp_task_taskloop_p (stmt
))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1824 ctx
= new_omp_context (stmt
, outer_ctx
);
1825 taskreg_contexts
.safe_push (ctx
);
1826 if (taskreg_nesting_level
> 1)
1827 ctx
->is_nested
= true;
1828 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1829 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1830 name
= create_tmp_var_name (".omp_data_s");
1831 name
= build_decl (gimple_location (stmt
),
1832 TYPE_DECL
, name
, ctx
->record_type
);
1833 DECL_ARTIFICIAL (name
) = 1;
1834 DECL_NAMELESS (name
) = 1;
1835 TYPE_NAME (ctx
->record_type
) = name
;
1836 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1837 create_omp_child_function (ctx
, false);
1838 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1840 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1842 if (ctx
->srecord_type
)
1844 name
= create_tmp_var_name (".omp_data_a");
1845 name
= build_decl (gimple_location (stmt
),
1846 TYPE_DECL
, name
, ctx
->srecord_type
);
1847 DECL_ARTIFICIAL (name
) = 1;
1848 DECL_NAMELESS (name
) = 1;
1849 TYPE_NAME (ctx
->srecord_type
) = name
;
1850 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1851 create_omp_child_function (ctx
, true);
1854 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1856 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1858 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1859 t
= build_int_cst (long_integer_type_node
, 0);
1860 gimple_omp_task_set_arg_size (stmt
, t
);
1861 t
= build_int_cst (long_integer_type_node
, 1);
1862 gimple_omp_task_set_arg_align (stmt
, t
);
1866 /* Helper function for finish_taskreg_scan, called through walk_tree.
1867 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1868 tree, replace it in the expression. */
1871 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
1875 omp_context
*ctx
= (omp_context
*) data
;
1876 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
1879 if (DECL_HAS_VALUE_EXPR_P (t
))
1880 t
= unshare_expr (DECL_VALUE_EXPR (t
));
1885 else if (IS_TYPE_OR_DECL_P (*tp
))
1890 /* If any decls have been made addressable during scan_omp,
1891 adjust their fields if needed, and layout record types
1892 of parallel/task constructs. */
1895 finish_taskreg_scan (omp_context
*ctx
)
1897 if (ctx
->record_type
== NULL_TREE
)
1900 /* If any task_shared_vars were needed, verify all
1901 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1902 statements if use_pointer_for_field hasn't changed
1903 because of that. If it did, update field types now. */
1904 if (task_shared_vars
)
1908 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
1909 c
; c
= OMP_CLAUSE_CHAIN (c
))
1910 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1911 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1913 tree decl
= OMP_CLAUSE_DECL (c
);
1915 /* Global variables don't need to be copied,
1916 the receiver side will use them directly. */
1917 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1919 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
1920 || !use_pointer_for_field (decl
, ctx
))
1922 tree field
= lookup_field (decl
, ctx
);
1923 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
1924 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
1926 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
1927 TREE_THIS_VOLATILE (field
) = 0;
1928 DECL_USER_ALIGN (field
) = 0;
1929 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
1930 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
1931 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
1932 if (ctx
->srecord_type
)
1934 tree sfield
= lookup_sfield (decl
, ctx
);
1935 TREE_TYPE (sfield
) = TREE_TYPE (field
);
1936 TREE_THIS_VOLATILE (sfield
) = 0;
1937 DECL_USER_ALIGN (sfield
) = 0;
1938 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
1939 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
1940 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
1945 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1947 layout_type (ctx
->record_type
);
1948 fixup_child_record_type (ctx
);
1952 location_t loc
= gimple_location (ctx
->stmt
);
1953 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
1954 /* Move VLA fields to the end. */
1955 p
= &TYPE_FIELDS (ctx
->record_type
);
1957 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
1958 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
1961 *p
= TREE_CHAIN (*p
);
1962 TREE_CHAIN (*q
) = NULL_TREE
;
1963 q
= &TREE_CHAIN (*q
);
1966 p
= &DECL_CHAIN (*p
);
1968 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
1970 /* Move fields corresponding to first and second _looptemp_
1971 clause first. There are filled by GOMP_taskloop
1972 and thus need to be in specific positions. */
1973 tree c1
= gimple_omp_task_clauses (ctx
->stmt
);
1974 c1
= omp_find_clause (c1
, OMP_CLAUSE__LOOPTEMP_
);
1975 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
1976 OMP_CLAUSE__LOOPTEMP_
);
1977 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
1978 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
1979 p
= &TYPE_FIELDS (ctx
->record_type
);
1981 if (*p
== f1
|| *p
== f2
)
1982 *p
= DECL_CHAIN (*p
);
1984 p
= &DECL_CHAIN (*p
);
1985 DECL_CHAIN (f1
) = f2
;
1986 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
1987 TYPE_FIELDS (ctx
->record_type
) = f1
;
1988 if (ctx
->srecord_type
)
1990 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
1991 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
1992 p
= &TYPE_FIELDS (ctx
->srecord_type
);
1994 if (*p
== f1
|| *p
== f2
)
1995 *p
= DECL_CHAIN (*p
);
1997 p
= &DECL_CHAIN (*p
);
1998 DECL_CHAIN (f1
) = f2
;
1999 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2000 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2003 layout_type (ctx
->record_type
);
2004 fixup_child_record_type (ctx
);
2005 if (ctx
->srecord_type
)
2006 layout_type (ctx
->srecord_type
);
2007 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2008 TYPE_SIZE_UNIT (ctx
->record_type
));
2009 if (TREE_CODE (t
) != INTEGER_CST
)
2011 t
= unshare_expr (t
);
2012 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2014 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2015 t
= build_int_cst (long_integer_type_node
,
2016 TYPE_ALIGN_UNIT (ctx
->record_type
));
2017 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2021 /* Find the enclosing offload context. */
2023 static omp_context
*
2024 enclosing_target_ctx (omp_context
*ctx
)
2026 for (; ctx
; ctx
= ctx
->outer
)
2027 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2033 /* Return true if ctx is part of an oacc kernels region. */
2036 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2038 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2040 gimple
*stmt
= ctx
->stmt
;
2041 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2042 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2049 /* Check the parallelism clauses inside a kernels regions.
2050 Until kernels handling moves to use the same loop indirection
2051 scheme as parallel, we need to do this checking early. */
2054 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2056 bool checking
= true;
2057 unsigned outer_mask
= 0;
2058 unsigned this_mask
= 0;
2059 bool has_seq
= false, has_auto
= false;
2062 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2066 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2068 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2071 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2073 switch (OMP_CLAUSE_CODE (c
))
2075 case OMP_CLAUSE_GANG
:
2076 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2078 case OMP_CLAUSE_WORKER
:
2079 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2081 case OMP_CLAUSE_VECTOR
:
2082 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2084 case OMP_CLAUSE_SEQ
:
2087 case OMP_CLAUSE_AUTO
:
2097 if (has_seq
&& (this_mask
|| has_auto
))
2098 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2099 " OpenACC loop specifiers");
2100 else if (has_auto
&& this_mask
)
2101 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2102 " OpenACC loop specifiers");
2104 if (this_mask
& outer_mask
)
2105 error_at (gimple_location (stmt
), "inner loop uses same"
2106 " OpenACC parallelism as containing loop");
2109 return outer_mask
| this_mask
;
2112 /* Scan a GIMPLE_OMP_FOR. */
2114 static omp_context
*
2115 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2119 tree clauses
= gimple_omp_for_clauses (stmt
);
2121 ctx
= new_omp_context (stmt
, outer_ctx
);
2123 if (is_gimple_omp_oacc (stmt
))
2125 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2127 if (!tgt
|| is_oacc_parallel (tgt
))
2128 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2130 char const *check
= NULL
;
2132 switch (OMP_CLAUSE_CODE (c
))
2134 case OMP_CLAUSE_GANG
:
2138 case OMP_CLAUSE_WORKER
:
2142 case OMP_CLAUSE_VECTOR
:
2150 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2151 error_at (gimple_location (stmt
),
2152 "argument not permitted on %qs clause in"
2153 " OpenACC %<parallel%>", check
);
2156 if (tgt
&& is_oacc_kernels (tgt
))
2158 /* Strip out reductions, as they are not handled yet. */
2159 tree
*prev_ptr
= &clauses
;
2161 while (tree probe
= *prev_ptr
)
2163 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2165 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2166 *prev_ptr
= *next_ptr
;
2168 prev_ptr
= next_ptr
;
2171 gimple_omp_for_set_clauses (stmt
, clauses
);
2172 check_oacc_kernel_gwv (stmt
, ctx
);
2176 scan_sharing_clauses (clauses
, ctx
);
2178 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2179 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2181 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2182 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2183 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2184 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2186 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2190 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2193 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2194 omp_context
*outer_ctx
)
2196 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2197 gsi_replace (gsi
, bind
, false);
2198 gimple_seq seq
= NULL
;
2199 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2200 tree cond
= create_tmp_var_raw (integer_type_node
);
2201 DECL_CONTEXT (cond
) = current_function_decl
;
2202 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2203 gimple_bind_set_vars (bind
, cond
);
2204 gimple_call_set_lhs (g
, cond
);
2205 gimple_seq_add_stmt (&seq
, g
);
2206 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2207 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2208 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2209 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2210 gimple_seq_add_stmt (&seq
, g
);
2211 g
= gimple_build_label (lab1
);
2212 gimple_seq_add_stmt (&seq
, g
);
2213 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2214 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2215 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2216 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2217 gimple_omp_for_set_clauses (new_stmt
, clause
);
2218 gimple_seq_add_stmt (&seq
, new_stmt
);
2219 g
= gimple_build_goto (lab3
);
2220 gimple_seq_add_stmt (&seq
, g
);
2221 g
= gimple_build_label (lab2
);
2222 gimple_seq_add_stmt (&seq
, g
);
2223 gimple_seq_add_stmt (&seq
, stmt
);
2224 g
= gimple_build_label (lab3
);
2225 gimple_seq_add_stmt (&seq
, g
);
2226 gimple_bind_set_body (bind
, seq
);
2228 scan_omp_for (new_stmt
, outer_ctx
);
2229 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2232 /* Scan an OpenMP sections directive. */
2235 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2239 ctx
= new_omp_context (stmt
, outer_ctx
);
2240 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2241 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2244 /* Scan an OpenMP single directive. */
2247 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2252 ctx
= new_omp_context (stmt
, outer_ctx
);
2253 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2254 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2255 name
= create_tmp_var_name (".omp_copy_s");
2256 name
= build_decl (gimple_location (stmt
),
2257 TYPE_DECL
, name
, ctx
->record_type
);
2258 TYPE_NAME (ctx
->record_type
) = name
;
2260 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2261 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2263 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2264 ctx
->record_type
= NULL
;
2266 layout_type (ctx
->record_type
);
2269 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2270 used in the corresponding offloaded function are restrict. */
2273 omp_target_base_pointers_restrict_p (tree clauses
)
2275 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2277 if (flag_openacc
== 0)
2280 /* I. Basic example:
2284 unsigned int a[2], b[2];
2286 #pragma acc kernels \
2295 After gimplification, we have:
2297 #pragma omp target oacc_kernels \
2298 map(force_from:a [len: 8]) \
2299 map(force_from:b [len: 8])
2305 Because both mappings have the force prefix, we know that they will be
2306 allocated when calling the corresponding offloaded function, which means we
2307 can mark the base pointers for a and b in the offloaded function as
2311 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2313 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
2316 switch (OMP_CLAUSE_MAP_KIND (c
))
2318 case GOMP_MAP_FORCE_ALLOC
:
2319 case GOMP_MAP_FORCE_TO
:
2320 case GOMP_MAP_FORCE_FROM
:
2321 case GOMP_MAP_FORCE_TOFROM
:
2331 /* Scan a GIMPLE_OMP_TARGET. */
2334 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2338 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2339 tree clauses
= gimple_omp_target_clauses (stmt
);
2341 ctx
= new_omp_context (stmt
, outer_ctx
);
2342 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2343 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2344 name
= create_tmp_var_name (".omp_data_t");
2345 name
= build_decl (gimple_location (stmt
),
2346 TYPE_DECL
, name
, ctx
->record_type
);
2347 DECL_ARTIFICIAL (name
) = 1;
2348 DECL_NAMELESS (name
) = 1;
2349 TYPE_NAME (ctx
->record_type
) = name
;
2350 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2352 bool base_pointers_restrict
= false;
2355 create_omp_child_function (ctx
, false);
2356 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2358 base_pointers_restrict
= omp_target_base_pointers_restrict_p (clauses
);
2359 if (base_pointers_restrict
2360 && dump_file
&& (dump_flags
& TDF_DETAILS
))
2362 "Base pointers in offloaded function are restrict\n");
2365 scan_sharing_clauses (clauses
, ctx
, base_pointers_restrict
);
2366 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2368 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2369 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2372 TYPE_FIELDS (ctx
->record_type
)
2373 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2376 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2377 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2379 field
= DECL_CHAIN (field
))
2380 gcc_assert (DECL_ALIGN (field
) == align
);
2382 layout_type (ctx
->record_type
);
2384 fixup_child_record_type (ctx
);
2388 /* Scan an OpenMP teams directive. */
2391 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2393 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2394 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2395 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2398 /* Check nesting restrictions. */
2400 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2404 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2405 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2406 the original copy of its contents. */
2409 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2410 inside an OpenACC CTX. */
2411 if (!(is_gimple_omp (stmt
)
2412 && is_gimple_omp_oacc (stmt
))
2413 /* Except for atomic codes that we share with OpenMP. */
2414 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2415 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2417 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2419 error_at (gimple_location (stmt
),
2420 "non-OpenACC construct inside of OpenACC routine");
2424 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2425 if (is_gimple_omp (octx
->stmt
)
2426 && is_gimple_omp_oacc (octx
->stmt
))
2428 error_at (gimple_location (stmt
),
2429 "non-OpenACC construct inside of OpenACC region");
2436 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2437 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2440 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2442 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2443 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2445 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2446 && (ctx
->outer
== NULL
2447 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2448 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2449 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2450 != GF_OMP_FOR_KIND_FOR
)
2451 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2453 error_at (gimple_location (stmt
),
2454 "%<ordered simd threads%> must be closely "
2455 "nested inside of %<for simd%> region");
2461 error_at (gimple_location (stmt
),
2462 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2463 " may not be nested inside %<simd%> region");
2466 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2468 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2469 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2470 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2471 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2473 error_at (gimple_location (stmt
),
2474 "only %<distribute%> or %<parallel%> regions are "
2475 "allowed to be strictly nested inside %<teams%> "
2481 switch (gimple_code (stmt
))
2483 case GIMPLE_OMP_FOR
:
2484 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2486 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2488 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2490 error_at (gimple_location (stmt
),
2491 "%<distribute%> region must be strictly nested "
2492 "inside %<teams%> construct");
2497 /* We split taskloop into task and nested taskloop in it. */
2498 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2500 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2505 switch (gimple_code (ctx
->stmt
))
2507 case GIMPLE_OMP_FOR
:
2508 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2509 == GF_OMP_FOR_KIND_OACC_LOOP
);
2512 case GIMPLE_OMP_TARGET
:
2513 switch (gimple_omp_target_kind (ctx
->stmt
))
2515 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2516 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2527 else if (oacc_get_fn_attrib (current_function_decl
))
2531 error_at (gimple_location (stmt
),
2532 "OpenACC loop directive must be associated with"
2533 " an OpenACC compute region");
2539 if (is_gimple_call (stmt
)
2540 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2541 == BUILT_IN_GOMP_CANCEL
2542 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2543 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2545 const char *bad
= NULL
;
2546 const char *kind
= NULL
;
2547 const char *construct
2548 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2549 == BUILT_IN_GOMP_CANCEL
)
2550 ? "#pragma omp cancel"
2551 : "#pragma omp cancellation point";
2554 error_at (gimple_location (stmt
), "orphaned %qs construct",
2558 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2559 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2563 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2564 bad
= "#pragma omp parallel";
2565 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2566 == BUILT_IN_GOMP_CANCEL
2567 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2568 ctx
->cancellable
= true;
2572 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2573 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2574 bad
= "#pragma omp for";
2575 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2576 == BUILT_IN_GOMP_CANCEL
2577 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2579 ctx
->cancellable
= true;
2580 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2582 warning_at (gimple_location (stmt
), 0,
2583 "%<#pragma omp cancel for%> inside "
2584 "%<nowait%> for construct");
2585 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2586 OMP_CLAUSE_ORDERED
))
2587 warning_at (gimple_location (stmt
), 0,
2588 "%<#pragma omp cancel for%> inside "
2589 "%<ordered%> for construct");
2594 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2595 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2596 bad
= "#pragma omp sections";
2597 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2598 == BUILT_IN_GOMP_CANCEL
2599 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2601 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2603 ctx
->cancellable
= true;
2604 if (omp_find_clause (gimple_omp_sections_clauses
2607 warning_at (gimple_location (stmt
), 0,
2608 "%<#pragma omp cancel sections%> inside "
2609 "%<nowait%> sections construct");
2613 gcc_assert (ctx
->outer
2614 && gimple_code (ctx
->outer
->stmt
)
2615 == GIMPLE_OMP_SECTIONS
);
2616 ctx
->outer
->cancellable
= true;
2617 if (omp_find_clause (gimple_omp_sections_clauses
2620 warning_at (gimple_location (stmt
), 0,
2621 "%<#pragma omp cancel sections%> inside "
2622 "%<nowait%> sections construct");
2628 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TASK
)
2629 bad
= "#pragma omp task";
2632 for (omp_context
*octx
= ctx
->outer
;
2633 octx
; octx
= octx
->outer
)
2635 switch (gimple_code (octx
->stmt
))
2637 case GIMPLE_OMP_TASKGROUP
:
2639 case GIMPLE_OMP_TARGET
:
2640 if (gimple_omp_target_kind (octx
->stmt
)
2641 != GF_OMP_TARGET_KIND_REGION
)
2644 case GIMPLE_OMP_PARALLEL
:
2645 case GIMPLE_OMP_TEAMS
:
2646 error_at (gimple_location (stmt
),
2647 "%<%s taskgroup%> construct not closely "
2648 "nested inside of %<taskgroup%> region",
2656 ctx
->cancellable
= true;
2661 error_at (gimple_location (stmt
), "invalid arguments");
2666 error_at (gimple_location (stmt
),
2667 "%<%s %s%> construct not closely nested inside of %qs",
2668 construct
, kind
, bad
);
2673 case GIMPLE_OMP_SECTIONS
:
2674 case GIMPLE_OMP_SINGLE
:
2675 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2676 switch (gimple_code (ctx
->stmt
))
2678 case GIMPLE_OMP_FOR
:
2679 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2680 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2683 case GIMPLE_OMP_SECTIONS
:
2684 case GIMPLE_OMP_SINGLE
:
2685 case GIMPLE_OMP_ORDERED
:
2686 case GIMPLE_OMP_MASTER
:
2687 case GIMPLE_OMP_TASK
:
2688 case GIMPLE_OMP_CRITICAL
:
2689 if (is_gimple_call (stmt
))
2691 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2692 != BUILT_IN_GOMP_BARRIER
)
2694 error_at (gimple_location (stmt
),
2695 "barrier region may not be closely nested inside "
2696 "of work-sharing, %<critical%>, %<ordered%>, "
2697 "%<master%>, explicit %<task%> or %<taskloop%> "
2701 error_at (gimple_location (stmt
),
2702 "work-sharing region may not be closely nested inside "
2703 "of work-sharing, %<critical%>, %<ordered%>, "
2704 "%<master%>, explicit %<task%> or %<taskloop%> region");
2706 case GIMPLE_OMP_PARALLEL
:
2707 case GIMPLE_OMP_TEAMS
:
2709 case GIMPLE_OMP_TARGET
:
2710 if (gimple_omp_target_kind (ctx
->stmt
)
2711 == GF_OMP_TARGET_KIND_REGION
)
2718 case GIMPLE_OMP_MASTER
:
2719 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2720 switch (gimple_code (ctx
->stmt
))
2722 case GIMPLE_OMP_FOR
:
2723 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2724 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2727 case GIMPLE_OMP_SECTIONS
:
2728 case GIMPLE_OMP_SINGLE
:
2729 case GIMPLE_OMP_TASK
:
2730 error_at (gimple_location (stmt
),
2731 "%<master%> region may not be closely nested inside "
2732 "of work-sharing, explicit %<task%> or %<taskloop%> "
2735 case GIMPLE_OMP_PARALLEL
:
2736 case GIMPLE_OMP_TEAMS
:
2738 case GIMPLE_OMP_TARGET
:
2739 if (gimple_omp_target_kind (ctx
->stmt
)
2740 == GF_OMP_TARGET_KIND_REGION
)
2747 case GIMPLE_OMP_TASK
:
2748 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2749 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2750 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2751 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2753 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2754 error_at (OMP_CLAUSE_LOCATION (c
),
2755 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2756 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2760 case GIMPLE_OMP_ORDERED
:
2761 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2762 c
; c
= OMP_CLAUSE_CHAIN (c
))
2764 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2766 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2767 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2770 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2771 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2772 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2775 /* Look for containing ordered(N) loop. */
2777 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2779 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2780 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2782 error_at (OMP_CLAUSE_LOCATION (c
),
2783 "%<ordered%> construct with %<depend%> clause "
2784 "must be closely nested inside an %<ordered%> "
2788 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2790 error_at (OMP_CLAUSE_LOCATION (c
),
2791 "%<ordered%> construct with %<depend%> clause "
2792 "must be closely nested inside a loop with "
2793 "%<ordered%> clause with a parameter");
2799 error_at (OMP_CLAUSE_LOCATION (c
),
2800 "invalid depend kind in omp %<ordered%> %<depend%>");
2804 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2805 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2807 /* ordered simd must be closely nested inside of simd region,
2808 and simd region must not encounter constructs other than
2809 ordered simd, therefore ordered simd may be either orphaned,
2810 or ctx->stmt must be simd. The latter case is handled already
2814 error_at (gimple_location (stmt
),
2815 "%<ordered%> %<simd%> must be closely nested inside "
2820 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2821 switch (gimple_code (ctx
->stmt
))
2823 case GIMPLE_OMP_CRITICAL
:
2824 case GIMPLE_OMP_TASK
:
2825 case GIMPLE_OMP_ORDERED
:
2826 ordered_in_taskloop
:
2827 error_at (gimple_location (stmt
),
2828 "%<ordered%> region may not be closely nested inside "
2829 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2830 "%<taskloop%> region");
2832 case GIMPLE_OMP_FOR
:
2833 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2834 goto ordered_in_taskloop
;
2835 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2836 OMP_CLAUSE_ORDERED
) == NULL
)
2838 error_at (gimple_location (stmt
),
2839 "%<ordered%> region must be closely nested inside "
2840 "a loop region with an %<ordered%> clause");
2844 case GIMPLE_OMP_TARGET
:
2845 if (gimple_omp_target_kind (ctx
->stmt
)
2846 != GF_OMP_TARGET_KIND_REGION
)
2849 case GIMPLE_OMP_PARALLEL
:
2850 case GIMPLE_OMP_TEAMS
:
2851 error_at (gimple_location (stmt
),
2852 "%<ordered%> region must be closely nested inside "
2853 "a loop region with an %<ordered%> clause");
2859 case GIMPLE_OMP_CRITICAL
:
2862 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
2863 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2864 if (gomp_critical
*other_crit
2865 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
2866 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
2868 error_at (gimple_location (stmt
),
2869 "%<critical%> region may not be nested inside "
2870 "a %<critical%> region with the same name");
2875 case GIMPLE_OMP_TEAMS
:
2877 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
2878 || gimple_omp_target_kind (ctx
->stmt
) != GF_OMP_TARGET_KIND_REGION
)
2880 error_at (gimple_location (stmt
),
2881 "%<teams%> construct not closely nested inside of "
2882 "%<target%> construct");
2886 case GIMPLE_OMP_TARGET
:
2887 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2888 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2889 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2890 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2892 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2893 error_at (OMP_CLAUSE_LOCATION (c
),
2894 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2895 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2898 if (is_gimple_omp_offloaded (stmt
)
2899 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2901 error_at (gimple_location (stmt
),
2902 "OpenACC region inside of OpenACC routine, nested "
2903 "parallelism not supported yet");
2906 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2908 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
2910 if (is_gimple_omp (stmt
)
2911 && is_gimple_omp_oacc (stmt
)
2912 && is_gimple_omp (ctx
->stmt
))
2914 error_at (gimple_location (stmt
),
2915 "OpenACC construct inside of non-OpenACC region");
2921 const char *stmt_name
, *ctx_stmt_name
;
2922 switch (gimple_omp_target_kind (stmt
))
2924 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
2925 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
2926 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
2927 case GF_OMP_TARGET_KIND_ENTER_DATA
:
2928 stmt_name
= "target enter data"; break;
2929 case GF_OMP_TARGET_KIND_EXIT_DATA
:
2930 stmt_name
= "target exit data"; break;
2931 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
2932 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
2933 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
2934 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
2935 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
2936 stmt_name
= "enter/exit data"; break;
2937 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
2939 default: gcc_unreachable ();
2941 switch (gimple_omp_target_kind (ctx
->stmt
))
2943 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
2944 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
2945 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2946 ctx_stmt_name
= "parallel"; break;
2947 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2948 ctx_stmt_name
= "kernels"; break;
2949 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
2950 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
2951 ctx_stmt_name
= "host_data"; break;
2952 default: gcc_unreachable ();
2955 /* OpenACC/OpenMP mismatch? */
2956 if (is_gimple_omp_oacc (stmt
)
2957 != is_gimple_omp_oacc (ctx
->stmt
))
2959 error_at (gimple_location (stmt
),
2960 "%s %qs construct inside of %s %qs region",
2961 (is_gimple_omp_oacc (stmt
)
2962 ? "OpenACC" : "OpenMP"), stmt_name
,
2963 (is_gimple_omp_oacc (ctx
->stmt
)
2964 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
2967 if (is_gimple_omp_offloaded (ctx
->stmt
))
2969 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2970 if (is_gimple_omp_oacc (ctx
->stmt
))
2972 error_at (gimple_location (stmt
),
2973 "%qs construct inside of %qs region",
2974 stmt_name
, ctx_stmt_name
);
2979 warning_at (gimple_location (stmt
), 0,
2980 "%qs construct inside of %qs region",
2981 stmt_name
, ctx_stmt_name
);
2993 /* Helper function scan_omp.
2995 Callback for walk_tree or operators in walk_gimple_stmt used to
2996 scan for OMP directives in TP. */
2999 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3001 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3002 omp_context
*ctx
= (omp_context
*) wi
->info
;
3005 switch (TREE_CODE (t
))
3013 tree repl
= remap_decl (t
, &ctx
->cb
);
3014 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3020 if (ctx
&& TYPE_P (t
))
3021 *tp
= remap_type (t
, &ctx
->cb
);
3022 else if (!DECL_P (t
))
3027 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3028 if (tem
!= TREE_TYPE (t
))
3030 if (TREE_CODE (t
) == INTEGER_CST
)
3031 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3033 TREE_TYPE (t
) = tem
;
3043 /* Return true if FNDECL is a setjmp or a longjmp. */
3046 setjmp_or_longjmp_p (const_tree fndecl
)
3048 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3049 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SETJMP
3050 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LONGJMP
))
3053 tree declname
= DECL_NAME (fndecl
);
3056 const char *name
= IDENTIFIER_POINTER (declname
);
3057 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3061 /* Helper function for scan_omp.
3063 Callback for walk_gimple_stmt used to scan for OMP directives in
3064 the current statement in GSI. */
3067 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3068 struct walk_stmt_info
*wi
)
3070 gimple
*stmt
= gsi_stmt (*gsi
);
3071 omp_context
*ctx
= (omp_context
*) wi
->info
;
3073 if (gimple_has_location (stmt
))
3074 input_location
= gimple_location (stmt
);
3076 /* Check the nesting restrictions. */
3077 bool remove
= false;
3078 if (is_gimple_omp (stmt
))
3079 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3080 else if (is_gimple_call (stmt
))
3082 tree fndecl
= gimple_call_fndecl (stmt
);
3085 if (setjmp_or_longjmp_p (fndecl
)
3087 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3088 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3091 error_at (gimple_location (stmt
),
3092 "setjmp/longjmp inside simd construct");
3094 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3095 switch (DECL_FUNCTION_CODE (fndecl
))
3097 case BUILT_IN_GOMP_BARRIER
:
3098 case BUILT_IN_GOMP_CANCEL
:
3099 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3100 case BUILT_IN_GOMP_TASKYIELD
:
3101 case BUILT_IN_GOMP_TASKWAIT
:
3102 case BUILT_IN_GOMP_TASKGROUP_START
:
3103 case BUILT_IN_GOMP_TASKGROUP_END
:
3104 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3113 stmt
= gimple_build_nop ();
3114 gsi_replace (gsi
, stmt
, false);
3117 *handled_ops_p
= true;
3119 switch (gimple_code (stmt
))
3121 case GIMPLE_OMP_PARALLEL
:
3122 taskreg_nesting_level
++;
3123 scan_omp_parallel (gsi
, ctx
);
3124 taskreg_nesting_level
--;
3127 case GIMPLE_OMP_TASK
:
3128 taskreg_nesting_level
++;
3129 scan_omp_task (gsi
, ctx
);
3130 taskreg_nesting_level
--;
3133 case GIMPLE_OMP_FOR
:
3134 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3135 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3136 && omp_maybe_offloaded_ctx (ctx
)
3137 && omp_max_simt_vf ())
3138 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3140 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3143 case GIMPLE_OMP_SECTIONS
:
3144 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3147 case GIMPLE_OMP_SINGLE
:
3148 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3151 case GIMPLE_OMP_SECTION
:
3152 case GIMPLE_OMP_MASTER
:
3153 case GIMPLE_OMP_TASKGROUP
:
3154 case GIMPLE_OMP_ORDERED
:
3155 case GIMPLE_OMP_CRITICAL
:
3156 case GIMPLE_OMP_GRID_BODY
:
3157 ctx
= new_omp_context (stmt
, ctx
);
3158 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3161 case GIMPLE_OMP_TARGET
:
3162 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3165 case GIMPLE_OMP_TEAMS
:
3166 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3173 *handled_ops_p
= false;
3175 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3177 var
= DECL_CHAIN (var
))
3178 insert_decl_map (&ctx
->cb
, var
, var
);
3182 *handled_ops_p
= false;
3190 /* Scan all the statements starting at the current statement. CTX
3191 contains context information about the OMP directives and
3192 clauses found during the scan. */
3195 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3197 location_t saved_location
;
3198 struct walk_stmt_info wi
;
3200 memset (&wi
, 0, sizeof (wi
));
3202 wi
.want_locations
= true;
3204 saved_location
= input_location
;
3205 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3206 input_location
= saved_location
;
3209 /* Re-gimplification and code generation routines. */
3211 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3212 of BIND if in a method. */
3215 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3217 if (DECL_ARGUMENTS (current_function_decl
)
3218 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3219 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3222 tree vars
= gimple_bind_vars (bind
);
3223 for (tree
*pvar
= &vars
; *pvar
; )
3224 if (omp_member_access_dummy_var (*pvar
))
3225 *pvar
= DECL_CHAIN (*pvar
);
3227 pvar
= &DECL_CHAIN (*pvar
);
3228 gimple_bind_set_vars (bind
, vars
);
3232 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3233 block and its subblocks. */
3236 remove_member_access_dummy_vars (tree block
)
3238 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3239 if (omp_member_access_dummy_var (*pvar
))
3240 *pvar
= DECL_CHAIN (*pvar
);
3242 pvar
= &DECL_CHAIN (*pvar
);
3244 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3245 remove_member_access_dummy_vars (block
);
3248 /* If a context was created for STMT when it was scanned, return it. */
3250 static omp_context
*
3251 maybe_lookup_ctx (gimple
*stmt
)
3254 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3255 return n
? (omp_context
*) n
->value
: NULL
;
3259 /* Find the mapping for DECL in CTX or the immediately enclosing
3260 context that has a mapping for DECL.
3262 If CTX is a nested parallel directive, we may have to use the decl
3263 mappings created in CTX's parent context. Suppose that we have the
3264 following parallel nesting (variable UIDs showed for clarity):
3267 #omp parallel shared(iD.1562) -> outer parallel
3268 iD.1562 = iD.1562 + 1;
3270 #omp parallel shared (iD.1562) -> inner parallel
3271 iD.1562 = iD.1562 - 1;
3273 Each parallel structure will create a distinct .omp_data_s structure
3274 for copying iD.1562 in/out of the directive:
3276 outer parallel .omp_data_s.1.i -> iD.1562
3277 inner parallel .omp_data_s.2.i -> iD.1562
3279 A shared variable mapping will produce a copy-out operation before
3280 the parallel directive and a copy-in operation after it. So, in
3281 this case we would have:
3284 .omp_data_o.1.i = iD.1562;
3285 #omp parallel shared(iD.1562) -> outer parallel
3286 .omp_data_i.1 = &.omp_data_o.1
3287 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3289 .omp_data_o.2.i = iD.1562; -> **
3290 #omp parallel shared(iD.1562) -> inner parallel
3291 .omp_data_i.2 = &.omp_data_o.2
3292 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3295 ** This is a problem. The symbol iD.1562 cannot be referenced
3296 inside the body of the outer parallel region. But since we are
3297 emitting this copy operation while expanding the inner parallel
3298 directive, we need to access the CTX structure of the outer
3299 parallel directive to get the correct mapping:
3301 .omp_data_o.2.i = .omp_data_i.1->i
3303 Since there may be other workshare or parallel directives enclosing
3304 the parallel directive, it may be necessary to walk up the context
3305 parent chain. This is not a problem in general because nested
3306 parallelism happens only rarely. */
3309 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3314 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3315 t
= maybe_lookup_decl (decl
, up
);
3317 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3319 return t
? t
: decl
;
3323 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3324 in outer contexts. */
3327 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3332 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3333 t
= maybe_lookup_decl (decl
, up
);
3335 return t
? t
: decl
;
3339 /* Construct the initialization value for reduction operation OP. */
3342 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3351 case TRUTH_ORIF_EXPR
:
3352 case TRUTH_XOR_EXPR
:
3354 return build_zero_cst (type
);
3357 case TRUTH_AND_EXPR
:
3358 case TRUTH_ANDIF_EXPR
:
3360 return fold_convert_loc (loc
, type
, integer_one_node
);
3363 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3366 if (SCALAR_FLOAT_TYPE_P (type
))
3368 REAL_VALUE_TYPE max
, min
;
3369 if (HONOR_INFINITIES (type
))
3372 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3375 real_maxval (&min
, 1, TYPE_MODE (type
));
3376 return build_real (type
, min
);
3378 else if (POINTER_TYPE_P (type
))
3381 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3382 return wide_int_to_tree (type
, min
);
3386 gcc_assert (INTEGRAL_TYPE_P (type
));
3387 return TYPE_MIN_VALUE (type
);
3391 if (SCALAR_FLOAT_TYPE_P (type
))
3393 REAL_VALUE_TYPE max
;
3394 if (HONOR_INFINITIES (type
))
3397 real_maxval (&max
, 0, TYPE_MODE (type
));
3398 return build_real (type
, max
);
3400 else if (POINTER_TYPE_P (type
))
3403 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3404 return wide_int_to_tree (type
, max
);
3408 gcc_assert (INTEGRAL_TYPE_P (type
));
3409 return TYPE_MAX_VALUE (type
);
3417 /* Construct the initialization value for reduction CLAUSE. */
3420 omp_reduction_init (tree clause
, tree type
)
3422 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3423 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3426 /* Return alignment to be assumed for var in CLAUSE, which should be
3427 OMP_CLAUSE_ALIGNED. */
3430 omp_clause_aligned_alignment (tree clause
)
3432 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3433 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3435 /* Otherwise return implementation defined alignment. */
3436 unsigned int al
= 1;
3437 opt_scalar_mode mode_iter
;
3438 auto_vector_sizes sizes
;
3439 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
);
3441 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3442 vs
= ordered_max (vs
, sizes
[i
]);
3443 static enum mode_class classes
[]
3444 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3445 for (int i
= 0; i
< 4; i
+= 2)
3446 /* The for loop above dictates that we only walk through scalar classes. */
3447 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3449 scalar_mode mode
= mode_iter
.require ();
3450 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3451 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3453 while (maybe_ne (vs
, 0U)
3454 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3455 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3456 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3458 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3459 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3461 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3462 GET_MODE_SIZE (mode
));
3463 type
= build_vector_type (type
, nelts
);
3464 if (TYPE_MODE (type
) != vmode
)
3466 if (TYPE_ALIGN_UNIT (type
) > al
)
3467 al
= TYPE_ALIGN_UNIT (type
);
3469 return build_int_cst (integer_type_node
, al
);
3473 /* This structure is part of the interface between lower_rec_simd_input_clauses
3474 and lower_rec_input_clauses. */
3476 struct omplow_simd_context
{
3477 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3480 vec
<tree
, va_heap
> simt_eargs
;
3481 gimple_seq simt_dlist
;
3482 poly_uint64_pod max_vf
;
3486 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3490 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3491 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3493 if (known_eq (sctx
->max_vf
, 0U))
3495 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3496 if (maybe_gt (sctx
->max_vf
, 1U))
3498 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3499 OMP_CLAUSE_SAFELEN
);
3502 poly_uint64 safe_len
;
3503 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3504 || maybe_lt (safe_len
, 1U))
3507 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3510 if (maybe_gt (sctx
->max_vf
, 1U))
3512 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3513 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3516 if (known_eq (sctx
->max_vf
, 1U))
3521 if (is_gimple_reg (new_var
))
3523 ivar
= lvar
= new_var
;
3526 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3527 ivar
= lvar
= create_tmp_var (type
);
3528 TREE_ADDRESSABLE (ivar
) = 1;
3529 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3530 NULL
, DECL_ATTRIBUTES (ivar
));
3531 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3532 tree clobber
= build_constructor (type
, NULL
);
3533 TREE_THIS_VOLATILE (clobber
) = 1;
3534 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3535 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3539 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3540 tree avar
= create_tmp_var_raw (atype
);
3541 if (TREE_ADDRESSABLE (new_var
))
3542 TREE_ADDRESSABLE (avar
) = 1;
3543 DECL_ATTRIBUTES (avar
)
3544 = tree_cons (get_identifier ("omp simd array"), NULL
,
3545 DECL_ATTRIBUTES (avar
));
3546 gimple_add_tmp_var (avar
);
3547 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3548 NULL_TREE
, NULL_TREE
);
3549 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3550 NULL_TREE
, NULL_TREE
);
3552 if (DECL_P (new_var
))
3554 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3555 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3560 /* Helper function of lower_rec_input_clauses. For a reference
3561 in simd reduction, add an underlying variable it will reference. */
3564 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3566 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3567 if (TREE_CONSTANT (z
))
3569 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3570 get_name (new_vard
));
3571 gimple_add_tmp_var (z
);
3572 TREE_ADDRESSABLE (z
) = 1;
3573 z
= build_fold_addr_expr_loc (loc
, z
);
3574 gimplify_assign (new_vard
, z
, ilist
);
3578 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3579 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3580 private variables. Initialization statements go in ILIST, while calls
3581 to destructors go in DLIST. */
3584 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3585 omp_context
*ctx
, struct omp_for_data
*fd
)
3587 tree c
, dtor
, copyin_seq
, x
, ptr
;
3588 bool copyin_by_ref
= false;
3589 bool lastprivate_firstprivate
= false;
3590 bool reduction_omp_orig_ref
= false;
3592 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3593 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3594 omplow_simd_context sctx
= omplow_simd_context ();
3595 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3596 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3597 gimple_seq llist
[3] = { };
3600 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3602 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3603 with data sharing clauses referencing variable sized vars. That
3604 is unnecessarily hard to support and very unlikely to result in
3605 vectorized code anyway. */
3607 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3608 switch (OMP_CLAUSE_CODE (c
))
3610 case OMP_CLAUSE_LINEAR
:
3611 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3614 case OMP_CLAUSE_PRIVATE
:
3615 case OMP_CLAUSE_FIRSTPRIVATE
:
3616 case OMP_CLAUSE_LASTPRIVATE
:
3617 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3620 case OMP_CLAUSE_REDUCTION
:
3621 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3622 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3629 /* Add a placeholder for simduid. */
3630 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
3631 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3633 /* Do all the fixed sized types in the first pass, and the variable sized
3634 types in the second pass. This makes sure that the scalar arguments to
3635 the variable sized types are processed before we use them in the
3636 variable sized operations. */
3637 for (pass
= 0; pass
< 2; ++pass
)
3639 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3641 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3644 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3648 case OMP_CLAUSE_PRIVATE
:
3649 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3652 case OMP_CLAUSE_SHARED
:
3653 /* Ignore shared directives in teams construct. */
3654 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3656 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3658 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3659 || is_global_var (OMP_CLAUSE_DECL (c
)));
3662 case OMP_CLAUSE_FIRSTPRIVATE
:
3663 case OMP_CLAUSE_COPYIN
:
3665 case OMP_CLAUSE_LINEAR
:
3666 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3667 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3668 lastprivate_firstprivate
= true;
3670 case OMP_CLAUSE_REDUCTION
:
3671 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3672 reduction_omp_orig_ref
= true;
3674 case OMP_CLAUSE__LOOPTEMP_
:
3675 /* Handle _looptemp_ clauses only on parallel/task. */
3679 case OMP_CLAUSE_LASTPRIVATE
:
3680 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3682 lastprivate_firstprivate
= true;
3683 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3686 /* Even without corresponding firstprivate, if
3687 decl is Fortran allocatable, it needs outer var
3690 && lang_hooks
.decls
.omp_private_outer_ref
3691 (OMP_CLAUSE_DECL (c
)))
3692 lastprivate_firstprivate
= true;
3694 case OMP_CLAUSE_ALIGNED
:
3697 var
= OMP_CLAUSE_DECL (c
);
3698 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3699 && !is_global_var (var
))
3701 new_var
= maybe_lookup_decl (var
, ctx
);
3702 if (new_var
== NULL_TREE
)
3703 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3704 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3705 tree alarg
= omp_clause_aligned_alignment (c
);
3706 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3707 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
3708 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3709 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
3710 gimplify_and_add (x
, ilist
);
3712 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
3713 && is_global_var (var
))
3715 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
3716 new_var
= lookup_decl (var
, ctx
);
3717 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3718 t
= build_fold_addr_expr_loc (clause_loc
, t
);
3719 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3720 tree alarg
= omp_clause_aligned_alignment (c
);
3721 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3722 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
3723 t
= fold_convert_loc (clause_loc
, ptype
, t
);
3724 x
= create_tmp_var (ptype
);
3725 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
3726 gimplify_and_add (t
, ilist
);
3727 t
= build_simple_mem_ref_loc (clause_loc
, x
);
3728 SET_DECL_VALUE_EXPR (new_var
, t
);
3729 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3736 new_var
= var
= OMP_CLAUSE_DECL (c
);
3737 if (c_kind
== OMP_CLAUSE_REDUCTION
&& TREE_CODE (var
) == MEM_REF
)
3739 var
= TREE_OPERAND (var
, 0);
3740 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
3741 var
= TREE_OPERAND (var
, 0);
3742 if (TREE_CODE (var
) == INDIRECT_REF
3743 || TREE_CODE (var
) == ADDR_EXPR
)
3744 var
= TREE_OPERAND (var
, 0);
3745 if (is_variable_sized (var
))
3747 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
3748 var
= DECL_VALUE_EXPR (var
);
3749 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
3750 var
= TREE_OPERAND (var
, 0);
3751 gcc_assert (DECL_P (var
));
3755 if (c_kind
!= OMP_CLAUSE_COPYIN
)
3756 new_var
= lookup_decl (var
, ctx
);
3758 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
3763 /* C/C++ array section reductions. */
3764 else if (c_kind
== OMP_CLAUSE_REDUCTION
3765 && var
!= OMP_CLAUSE_DECL (c
))
3770 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
3771 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
3772 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
3774 tree b
= TREE_OPERAND (orig_var
, 1);
3775 b
= maybe_lookup_decl (b
, ctx
);
3778 b
= TREE_OPERAND (orig_var
, 1);
3779 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
3781 if (integer_zerop (bias
))
3785 bias
= fold_convert_loc (clause_loc
,
3786 TREE_TYPE (b
), bias
);
3787 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3788 TREE_TYPE (b
), b
, bias
);
3790 orig_var
= TREE_OPERAND (orig_var
, 0);
3792 if (TREE_CODE (orig_var
) == INDIRECT_REF
3793 || TREE_CODE (orig_var
) == ADDR_EXPR
)
3794 orig_var
= TREE_OPERAND (orig_var
, 0);
3795 tree d
= OMP_CLAUSE_DECL (c
);
3796 tree type
= TREE_TYPE (d
);
3797 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
3798 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3799 const char *name
= get_name (orig_var
);
3800 if (TREE_CONSTANT (v
))
3802 x
= create_tmp_var_raw (type
, name
);
3803 gimple_add_tmp_var (x
);
3804 TREE_ADDRESSABLE (x
) = 1;
3805 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3810 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3811 tree t
= maybe_lookup_decl (v
, ctx
);
3815 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
3816 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
3817 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3819 build_int_cst (TREE_TYPE (v
), 1));
3820 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
3822 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3823 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
3824 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
3827 tree ptype
= build_pointer_type (TREE_TYPE (type
));
3828 x
= fold_convert_loc (clause_loc
, ptype
, x
);
3829 tree y
= create_tmp_var (ptype
, name
);
3830 gimplify_assign (y
, x
, ilist
);
3834 if (!integer_zerop (bias
))
3836 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3838 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3840 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
3841 pointer_sized_int_node
, yb
, bias
);
3842 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
3843 yb
= create_tmp_var (ptype
, name
);
3844 gimplify_assign (yb
, x
, ilist
);
3848 d
= TREE_OPERAND (d
, 0);
3849 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
3850 d
= TREE_OPERAND (d
, 0);
3851 if (TREE_CODE (d
) == ADDR_EXPR
)
3853 if (orig_var
!= var
)
3855 gcc_assert (is_variable_sized (orig_var
));
3856 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
3858 gimplify_assign (new_var
, x
, ilist
);
3859 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
3860 tree t
= build_fold_indirect_ref (new_var
);
3861 DECL_IGNORED_P (new_var
) = 0;
3862 TREE_THIS_NOTRAP (t
);
3863 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
3864 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
3868 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
3869 build_int_cst (ptype
, 0));
3870 SET_DECL_VALUE_EXPR (new_var
, x
);
3871 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3876 gcc_assert (orig_var
== var
);
3877 if (TREE_CODE (d
) == INDIRECT_REF
)
3879 x
= create_tmp_var (ptype
, name
);
3880 TREE_ADDRESSABLE (x
) = 1;
3881 gimplify_assign (x
, yb
, ilist
);
3882 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3884 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3885 gimplify_assign (new_var
, x
, ilist
);
3887 tree y1
= create_tmp_var (ptype
, NULL
);
3888 gimplify_assign (y1
, y
, ilist
);
3889 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
3890 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
3891 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
3892 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
3894 y2
= create_tmp_var (ptype
, NULL
);
3895 gimplify_assign (y2
, y
, ilist
);
3896 tree ref
= build_outer_var_ref (var
, ctx
);
3897 /* For ref build_outer_var_ref already performs this. */
3898 if (TREE_CODE (d
) == INDIRECT_REF
)
3899 gcc_assert (omp_is_reference (var
));
3900 else if (TREE_CODE (d
) == ADDR_EXPR
)
3901 ref
= build_fold_addr_expr (ref
);
3902 else if (omp_is_reference (var
))
3903 ref
= build_fold_addr_expr (ref
);
3904 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
3905 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
3906 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3908 y3
= create_tmp_var (ptype
, NULL
);
3909 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
3913 y4
= create_tmp_var (ptype
, NULL
);
3914 gimplify_assign (y4
, ref
, dlist
);
3917 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
3918 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
3919 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
3920 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
3921 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
3924 i2
= create_tmp_var (TREE_TYPE (v
), NULL
);
3925 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
3926 body2
= create_artificial_label (UNKNOWN_LOCATION
);
3927 end2
= create_artificial_label (UNKNOWN_LOCATION
);
3928 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
3930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
3932 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
3933 tree decl_placeholder
3934 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
3935 SET_DECL_VALUE_EXPR (decl_placeholder
,
3936 build_simple_mem_ref (y1
));
3937 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
3938 SET_DECL_VALUE_EXPR (placeholder
,
3939 y3
? build_simple_mem_ref (y3
)
3941 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
3942 x
= lang_hooks
.decls
.omp_clause_default_ctor
3943 (c
, build_simple_mem_ref (y1
),
3944 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
3946 gimplify_and_add (x
, ilist
);
3947 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
3949 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
3950 lower_omp (&tseq
, ctx
);
3951 gimple_seq_add_seq (ilist
, tseq
);
3953 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
3956 SET_DECL_VALUE_EXPR (decl_placeholder
,
3957 build_simple_mem_ref (y2
));
3958 SET_DECL_VALUE_EXPR (placeholder
,
3959 build_simple_mem_ref (y4
));
3960 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
3961 lower_omp (&tseq
, ctx
);
3962 gimple_seq_add_seq (dlist
, tseq
);
3963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
3965 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
3966 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
3967 x
= lang_hooks
.decls
.omp_clause_dtor
3968 (c
, build_simple_mem_ref (y2
));
3971 gimple_seq tseq
= NULL
;
3973 gimplify_stmt (&dtor
, &tseq
);
3974 gimple_seq_add_seq (dlist
, tseq
);
3979 x
= omp_reduction_init (c
, TREE_TYPE (type
));
3980 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
3982 /* reduction(-:var) sums up the partial results, so it
3983 acts identically to reduction(+:var). */
3984 if (code
== MINUS_EXPR
)
3987 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
3990 x
= build2 (code
, TREE_TYPE (type
),
3991 build_simple_mem_ref (y4
),
3992 build_simple_mem_ref (y2
));
3993 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
3997 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
3998 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3999 gimple_seq_add_stmt (ilist
, g
);
4002 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4003 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4004 gimple_seq_add_stmt (ilist
, g
);
4006 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4007 build_int_cst (TREE_TYPE (i
), 1));
4008 gimple_seq_add_stmt (ilist
, g
);
4009 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4010 gimple_seq_add_stmt (ilist
, g
);
4011 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4014 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4015 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4016 gimple_seq_add_stmt (dlist
, g
);
4019 g
= gimple_build_assign
4020 (y4
, POINTER_PLUS_EXPR
, y4
,
4021 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4022 gimple_seq_add_stmt (dlist
, g
);
4024 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4025 build_int_cst (TREE_TYPE (i2
), 1));
4026 gimple_seq_add_stmt (dlist
, g
);
4027 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4028 gimple_seq_add_stmt (dlist
, g
);
4029 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4033 else if (is_variable_sized (var
))
4035 /* For variable sized types, we need to allocate the
4036 actual storage here. Call alloca and store the
4037 result in the pointer decl that we created elsewhere. */
4041 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4046 ptr
= DECL_VALUE_EXPR (new_var
);
4047 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4048 ptr
= TREE_OPERAND (ptr
, 0);
4049 gcc_assert (DECL_P (ptr
));
4050 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4052 /* void *tmp = __builtin_alloca */
4053 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4054 stmt
= gimple_build_call (atmp
, 2, x
,
4055 size_int (DECL_ALIGN (var
)));
4056 tmp
= create_tmp_var_raw (ptr_type_node
);
4057 gimple_add_tmp_var (tmp
);
4058 gimple_call_set_lhs (stmt
, tmp
);
4060 gimple_seq_add_stmt (ilist
, stmt
);
4062 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4063 gimplify_assign (ptr
, x
, ilist
);
4066 else if (omp_is_reference (var
))
4068 /* For references that are being privatized for Fortran,
4069 allocate new backing storage for the new pointer
4070 variable. This allows us to avoid changing all the
4071 code that expects a pointer to something that expects
4072 a direct variable. */
4076 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4077 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4079 x
= build_receiver_ref (var
, false, ctx
);
4080 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4082 else if (TREE_CONSTANT (x
))
4084 /* For reduction in SIMD loop, defer adding the
4085 initialization of the reference, because if we decide
4086 to use SIMD array for it, the initilization could cause
4088 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4092 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4094 gimple_add_tmp_var (x
);
4095 TREE_ADDRESSABLE (x
) = 1;
4096 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4102 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4103 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4104 tree al
= size_int (TYPE_ALIGN (rtype
));
4105 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4110 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4111 gimplify_assign (new_var
, x
, ilist
);
4114 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4116 else if (c_kind
== OMP_CLAUSE_REDUCTION
4117 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4125 switch (OMP_CLAUSE_CODE (c
))
4127 case OMP_CLAUSE_SHARED
:
4128 /* Ignore shared directives in teams construct. */
4129 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
4131 /* Shared global vars are just accessed directly. */
4132 if (is_global_var (new_var
))
4134 /* For taskloop firstprivate/lastprivate, represented
4135 as firstprivate and shared clause on the task, new_var
4136 is the firstprivate var. */
4137 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4139 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4140 needs to be delayed until after fixup_child_record_type so
4141 that we get the correct type during the dereference. */
4142 by_ref
= use_pointer_for_field (var
, ctx
);
4143 x
= build_receiver_ref (var
, by_ref
, ctx
);
4144 SET_DECL_VALUE_EXPR (new_var
, x
);
4145 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4147 /* ??? If VAR is not passed by reference, and the variable
4148 hasn't been initialized yet, then we'll get a warning for
4149 the store into the omp_data_s structure. Ideally, we'd be
4150 able to notice this and not store anything at all, but
4151 we're generating code too early. Suppress the warning. */
4153 TREE_NO_WARNING (var
) = 1;
4156 case OMP_CLAUSE_LASTPRIVATE
:
4157 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4161 case OMP_CLAUSE_PRIVATE
:
4162 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4163 x
= build_outer_var_ref (var
, ctx
);
4164 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4166 if (is_task_ctx (ctx
))
4167 x
= build_receiver_ref (var
, false, ctx
);
4169 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4175 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4176 (c
, unshare_expr (new_var
), x
);
4179 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4180 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4181 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4182 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4186 x
= lang_hooks
.decls
.omp_clause_default_ctor
4187 (c
, unshare_expr (ivar
), x
);
4189 gimplify_and_add (x
, &llist
[0]);
4192 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4195 gimple_seq tseq
= NULL
;
4198 gimplify_stmt (&dtor
, &tseq
);
4199 gimple_seq_add_seq (&llist
[1], tseq
);
4206 gimplify_and_add (nx
, ilist
);
4210 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4213 gimple_seq tseq
= NULL
;
4216 gimplify_stmt (&dtor
, &tseq
);
4217 gimple_seq_add_seq (dlist
, tseq
);
4221 case OMP_CLAUSE_LINEAR
:
4222 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4223 goto do_firstprivate
;
4224 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4227 x
= build_outer_var_ref (var
, ctx
);
4230 case OMP_CLAUSE_FIRSTPRIVATE
:
4231 if (is_task_ctx (ctx
))
4233 if (omp_is_reference (var
) || is_variable_sized (var
))
4235 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4237 || use_pointer_for_field (var
, NULL
))
4239 x
= build_receiver_ref (var
, false, ctx
);
4240 SET_DECL_VALUE_EXPR (new_var
, x
);
4241 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4246 x
= build_outer_var_ref (var
, ctx
);
4249 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4250 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4252 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4253 tree stept
= TREE_TYPE (t
);
4254 tree ct
= omp_find_clause (clauses
,
4255 OMP_CLAUSE__LOOPTEMP_
);
4257 tree l
= OMP_CLAUSE_DECL (ct
);
4258 tree n1
= fd
->loop
.n1
;
4259 tree step
= fd
->loop
.step
;
4260 tree itype
= TREE_TYPE (l
);
4261 if (POINTER_TYPE_P (itype
))
4262 itype
= signed_type_for (itype
);
4263 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4264 if (TYPE_UNSIGNED (itype
)
4265 && fd
->loop
.cond_code
== GT_EXPR
)
4266 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4267 fold_build1 (NEGATE_EXPR
, itype
, l
),
4268 fold_build1 (NEGATE_EXPR
,
4271 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4272 t
= fold_build2 (MULT_EXPR
, stept
,
4273 fold_convert (stept
, l
), t
);
4275 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4277 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4279 gimplify_and_add (x
, ilist
);
4283 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4284 x
= fold_build2 (POINTER_PLUS_EXPR
,
4285 TREE_TYPE (x
), x
, t
);
4287 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4290 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4291 || TREE_ADDRESSABLE (new_var
))
4292 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4295 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4297 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4298 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4299 gimplify_and_add (x
, ilist
);
4300 gimple_stmt_iterator gsi
4301 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4303 = gimple_build_assign (unshare_expr (lvar
), iv
);
4304 gsi_insert_before_without_update (&gsi
, g
,
4306 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4307 enum tree_code code
= PLUS_EXPR
;
4308 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4309 code
= POINTER_PLUS_EXPR
;
4310 g
= gimple_build_assign (iv
, code
, iv
, t
);
4311 gsi_insert_before_without_update (&gsi
, g
,
4315 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4316 (c
, unshare_expr (ivar
), x
);
4317 gimplify_and_add (x
, &llist
[0]);
4318 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4321 gimple_seq tseq
= NULL
;
4324 gimplify_stmt (&dtor
, &tseq
);
4325 gimple_seq_add_seq (&llist
[1], tseq
);
4330 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4331 (c
, unshare_expr (new_var
), x
);
4332 gimplify_and_add (x
, ilist
);
4335 case OMP_CLAUSE__LOOPTEMP_
:
4336 gcc_assert (is_taskreg_ctx (ctx
));
4337 x
= build_outer_var_ref (var
, ctx
);
4338 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4339 gimplify_and_add (x
, ilist
);
4342 case OMP_CLAUSE_COPYIN
:
4343 by_ref
= use_pointer_for_field (var
, NULL
);
4344 x
= build_receiver_ref (var
, by_ref
, ctx
);
4345 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4346 append_to_statement_list (x
, ©in_seq
);
4347 copyin_by_ref
|= by_ref
;
4350 case OMP_CLAUSE_REDUCTION
:
4351 /* OpenACC reductions are initialized using the
4352 GOACC_REDUCTION internal function. */
4353 if (is_gimple_omp_oacc (ctx
->stmt
))
4355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4357 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4359 x
= build_outer_var_ref (var
, ctx
);
4361 if (omp_is_reference (var
)
4362 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
4364 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4365 SET_DECL_VALUE_EXPR (placeholder
, x
);
4366 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4367 tree new_vard
= new_var
;
4368 if (omp_is_reference (var
))
4370 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4371 new_vard
= TREE_OPERAND (new_var
, 0);
4372 gcc_assert (DECL_P (new_vard
));
4375 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4378 if (new_vard
== new_var
)
4380 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4381 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4385 SET_DECL_VALUE_EXPR (new_vard
,
4386 build_fold_addr_expr (ivar
));
4387 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4389 x
= lang_hooks
.decls
.omp_clause_default_ctor
4390 (c
, unshare_expr (ivar
),
4391 build_outer_var_ref (var
, ctx
));
4393 gimplify_and_add (x
, &llist
[0]);
4394 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4396 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4397 lower_omp (&tseq
, ctx
);
4398 gimple_seq_add_seq (&llist
[0], tseq
);
4400 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4401 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4402 lower_omp (&tseq
, ctx
);
4403 gimple_seq_add_seq (&llist
[1], tseq
);
4404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4405 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4406 if (new_vard
== new_var
)
4407 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4409 SET_DECL_VALUE_EXPR (new_vard
,
4410 build_fold_addr_expr (lvar
));
4411 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4416 gimplify_stmt (&dtor
, &tseq
);
4417 gimple_seq_add_seq (&llist
[1], tseq
);
4421 /* If this is a reference to constant size reduction var
4422 with placeholder, we haven't emitted the initializer
4423 for it because it is undesirable if SIMD arrays are used.
4424 But if they aren't used, we need to emit the deferred
4425 initialization now. */
4426 else if (omp_is_reference (var
) && is_simd
)
4427 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4428 x
= lang_hooks
.decls
.omp_clause_default_ctor
4429 (c
, unshare_expr (new_var
),
4430 build_outer_var_ref (var
, ctx
));
4432 gimplify_and_add (x
, ilist
);
4433 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4435 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4436 lower_omp (&tseq
, ctx
);
4437 gimple_seq_add_seq (ilist
, tseq
);
4439 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4442 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4443 lower_omp (&tseq
, ctx
);
4444 gimple_seq_add_seq (dlist
, tseq
);
4445 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4447 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4452 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
4453 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
4454 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4456 /* reduction(-:var) sums up the partial results, so it
4457 acts identically to reduction(+:var). */
4458 if (code
== MINUS_EXPR
)
4461 tree new_vard
= new_var
;
4462 if (is_simd
&& omp_is_reference (var
))
4464 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4465 new_vard
= TREE_OPERAND (new_var
, 0);
4466 gcc_assert (DECL_P (new_vard
));
4469 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4472 tree ref
= build_outer_var_ref (var
, ctx
);
4474 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
4479 simt_lane
= create_tmp_var (unsigned_type_node
);
4480 x
= build_call_expr_internal_loc
4481 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
4482 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
4483 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
4484 gimplify_assign (ivar
, x
, &llist
[2]);
4486 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
4487 ref
= build_outer_var_ref (var
, ctx
);
4488 gimplify_assign (ref
, x
, &llist
[1]);
4490 if (new_vard
!= new_var
)
4492 SET_DECL_VALUE_EXPR (new_vard
,
4493 build_fold_addr_expr (lvar
));
4494 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4499 if (omp_is_reference (var
) && is_simd
)
4500 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4501 gimplify_assign (new_var
, x
, ilist
);
4504 tree ref
= build_outer_var_ref (var
, ctx
);
4506 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
4507 ref
= build_outer_var_ref (var
, ctx
);
4508 gimplify_assign (ref
, x
, dlist
);
4520 if (known_eq (sctx
.max_vf
, 1U))
4521 sctx
.is_simt
= false;
4523 if (sctx
.lane
|| sctx
.is_simt
)
4525 uid
= create_tmp_var (ptr_type_node
, "simduid");
4526 /* Don't want uninit warnings on simduid, it is always uninitialized,
4527 but we use it not for the value, but for the DECL_UID only. */
4528 TREE_NO_WARNING (uid
) = 1;
4529 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
4530 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
4531 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4532 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4534 /* Emit calls denoting privatized variables and initializing a pointer to
4535 structure that holds private variables as fields after ompdevlow pass. */
4538 sctx
.simt_eargs
[0] = uid
;
4540 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
4541 gimple_call_set_lhs (g
, uid
);
4542 gimple_seq_add_stmt (ilist
, g
);
4543 sctx
.simt_eargs
.release ();
4545 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
4546 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
4547 gimple_call_set_lhs (g
, simtrec
);
4548 gimple_seq_add_stmt (ilist
, g
);
4553 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
4554 gimple_call_set_lhs (g
, sctx
.lane
);
4555 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4556 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
4557 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
4558 build_int_cst (unsigned_type_node
, 0));
4559 gimple_seq_add_stmt (ilist
, g
);
4560 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4563 tree simt_vf
= create_tmp_var (unsigned_type_node
);
4564 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
4565 gimple_call_set_lhs (g
, simt_vf
);
4566 gimple_seq_add_stmt (dlist
, g
);
4568 tree t
= build_int_cst (unsigned_type_node
, 1);
4569 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
4570 gimple_seq_add_stmt (dlist
, g
);
4572 t
= build_int_cst (unsigned_type_node
, 0);
4573 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4574 gimple_seq_add_stmt (dlist
, g
);
4576 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4577 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4578 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4579 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
4580 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
4582 gimple_seq_add_seq (dlist
, llist
[2]);
4584 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
4585 gimple_seq_add_stmt (dlist
, g
);
4587 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
4588 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
4589 gimple_seq_add_stmt (dlist
, g
);
4591 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
4593 for (int i
= 0; i
< 2; i
++)
4596 tree vf
= create_tmp_var (unsigned_type_node
);
4597 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
4598 gimple_call_set_lhs (g
, vf
);
4599 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
4600 gimple_seq_add_stmt (seq
, g
);
4601 tree t
= build_int_cst (unsigned_type_node
, 0);
4602 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4603 gimple_seq_add_stmt (seq
, g
);
4604 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4605 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4606 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4607 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
4608 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
4609 gimple_seq_add_seq (seq
, llist
[i
]);
4610 t
= build_int_cst (unsigned_type_node
, 1);
4611 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
4612 gimple_seq_add_stmt (seq
, g
);
4613 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
4614 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
4615 gimple_seq_add_stmt (seq
, g
);
4616 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
4621 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
4623 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
4624 gimple_seq_add_stmt (dlist
, g
);
4627 /* The copyin sequence is not to be executed by the main thread, since
4628 that would result in self-copies. Perhaps not visible to scalars,
4629 but it certainly is to C++ operator=. */
4632 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
4634 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
4635 build_int_cst (TREE_TYPE (x
), 0));
4636 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
4637 gimplify_and_add (x
, ilist
);
4640 /* If any copyin variable is passed by reference, we must ensure the
4641 master thread doesn't modify it before it is copied over in all
4642 threads. Similarly for variables in both firstprivate and
4643 lastprivate clauses we need to ensure the lastprivate copying
4644 happens after firstprivate copying in all threads. And similarly
4645 for UDRs if initializer expression refers to omp_orig. */
4646 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
4648 /* Don't add any barrier for #pragma omp simd or
4649 #pragma omp distribute. */
4650 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
4651 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
)
4652 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
4655 /* If max_vf is non-zero, then we can use only a vectorization factor
4656 up to the max_vf we chose. So stick it into the safelen clause. */
4657 if (maybe_ne (sctx
.max_vf
, 0U))
4659 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4660 OMP_CLAUSE_SAFELEN
);
4661 poly_uint64 safe_len
;
4663 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4664 && maybe_gt (safe_len
, sctx
.max_vf
)))
4666 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
4667 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
4669 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4670 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4676 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4677 both parallel and workshare constructs. PREDICATE may be NULL if it's
4681 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
4684 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
4685 bool par_clauses
= false;
4686 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
4688 /* Early exit if there are no lastprivate or linear clauses. */
4689 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
4690 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
4691 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
4692 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
4694 if (clauses
== NULL
)
4696 /* If this was a workshare clause, see if it had been combined
4697 with its parallel. In that case, look for the clauses on the
4698 parallel statement itself. */
4699 if (is_parallel_ctx (ctx
))
4703 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4706 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4707 OMP_CLAUSE_LASTPRIVATE
);
4708 if (clauses
== NULL
)
4713 bool maybe_simt
= false;
4714 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4715 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
4717 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
4718 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
4720 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
4726 tree label_true
, arm1
, arm2
;
4727 enum tree_code pred_code
= TREE_CODE (predicate
);
4729 label
= create_artificial_label (UNKNOWN_LOCATION
);
4730 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4731 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
4733 arm1
= TREE_OPERAND (predicate
, 0);
4734 arm2
= TREE_OPERAND (predicate
, 1);
4735 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4736 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4741 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4742 arm2
= boolean_false_node
;
4743 pred_code
= NE_EXPR
;
4747 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
4748 c
= fold_convert (integer_type_node
, c
);
4749 simtcond
= create_tmp_var (integer_type_node
);
4750 gimplify_assign (simtcond
, c
, stmt_list
);
4751 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
4753 c
= create_tmp_var (integer_type_node
);
4754 gimple_call_set_lhs (g
, c
);
4755 gimple_seq_add_stmt (stmt_list
, g
);
4756 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
4760 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
4761 gimple_seq_add_stmt (stmt_list
, stmt
);
4762 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
4765 for (c
= clauses
; c
;)
4768 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4770 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4771 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4772 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
4774 var
= OMP_CLAUSE_DECL (c
);
4775 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4776 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
4777 && is_taskloop_ctx (ctx
))
4779 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
4780 new_var
= lookup_decl (var
, ctx
->outer
);
4784 new_var
= lookup_decl (var
, ctx
);
4785 /* Avoid uninitialized warnings for lastprivate and
4786 for linear iterators. */
4788 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4789 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
4790 TREE_NO_WARNING (new_var
) = 1;
4793 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
4795 tree val
= DECL_VALUE_EXPR (new_var
);
4796 if (TREE_CODE (val
) == ARRAY_REF
4797 && VAR_P (TREE_OPERAND (val
, 0))
4798 && lookup_attribute ("omp simd array",
4799 DECL_ATTRIBUTES (TREE_OPERAND (val
,
4802 if (lastlane
== NULL
)
4804 lastlane
= create_tmp_var (unsigned_type_node
);
4806 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
4808 TREE_OPERAND (val
, 1));
4809 gimple_call_set_lhs (g
, lastlane
);
4810 gimple_seq_add_stmt (stmt_list
, g
);
4812 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
4813 TREE_OPERAND (val
, 0), lastlane
,
4814 NULL_TREE
, NULL_TREE
);
4817 else if (maybe_simt
)
4819 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
4820 ? DECL_VALUE_EXPR (new_var
)
4822 if (simtlast
== NULL
)
4824 simtlast
= create_tmp_var (unsigned_type_node
);
4825 gcall
*g
= gimple_build_call_internal
4826 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
4827 gimple_call_set_lhs (g
, simtlast
);
4828 gimple_seq_add_stmt (stmt_list
, g
);
4830 x
= build_call_expr_internal_loc
4831 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
4832 TREE_TYPE (val
), 2, val
, simtlast
);
4833 new_var
= unshare_expr (new_var
);
4834 gimplify_assign (new_var
, x
, stmt_list
);
4835 new_var
= unshare_expr (new_var
);
4838 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4839 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
4841 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
4842 gimple_seq_add_seq (stmt_list
,
4843 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
4844 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
4846 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4847 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
4849 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
4850 gimple_seq_add_seq (stmt_list
,
4851 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
4852 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
4856 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4857 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
4859 gcc_checking_assert (is_taskloop_ctx (ctx
));
4860 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
4862 if (is_global_var (ovar
))
4866 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
4867 if (omp_is_reference (var
))
4868 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4869 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
4870 gimplify_and_add (x
, stmt_list
);
4872 c
= OMP_CLAUSE_CHAIN (c
);
4873 if (c
== NULL
&& !par_clauses
)
4875 /* If this was a workshare clause, see if it had been combined
4876 with its parallel. In that case, continue looking for the
4877 clauses also on the parallel statement itself. */
4878 if (is_parallel_ctx (ctx
))
4882 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4885 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4886 OMP_CLAUSE_LASTPRIVATE
);
4892 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
4895 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4896 (which might be a placeholder). INNER is true if this is an inner
4897 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4898 join markers. Generate the before-loop forking sequence in
4899 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4900 general form of these sequences is
4902 GOACC_REDUCTION_SETUP
4904 GOACC_REDUCTION_INIT
4906 GOACC_REDUCTION_FINI
4908 GOACC_REDUCTION_TEARDOWN. */
4911 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
4912 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
4913 gimple_seq
*join_seq
, omp_context
*ctx
)
4915 gimple_seq before_fork
= NULL
;
4916 gimple_seq after_fork
= NULL
;
4917 gimple_seq before_join
= NULL
;
4918 gimple_seq after_join
= NULL
;
4919 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
4920 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
4921 unsigned offset
= 0;
4923 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4924 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4926 tree orig
= OMP_CLAUSE_DECL (c
);
4927 tree var
= maybe_lookup_decl (orig
, ctx
);
4928 tree ref_to_res
= NULL_TREE
;
4929 tree incoming
, outgoing
, v1
, v2
, v3
;
4930 bool is_private
= false;
4932 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
4933 if (rcode
== MINUS_EXPR
)
4935 else if (rcode
== TRUTH_ANDIF_EXPR
)
4936 rcode
= BIT_AND_EXPR
;
4937 else if (rcode
== TRUTH_ORIF_EXPR
)
4938 rcode
= BIT_IOR_EXPR
;
4939 tree op
= build_int_cst (unsigned_type_node
, rcode
);
4944 incoming
= outgoing
= var
;
4948 /* See if an outer construct also reduces this variable. */
4949 omp_context
*outer
= ctx
;
4951 while (omp_context
*probe
= outer
->outer
)
4953 enum gimple_code type
= gimple_code (probe
->stmt
);
4958 case GIMPLE_OMP_FOR
:
4959 cls
= gimple_omp_for_clauses (probe
->stmt
);
4962 case GIMPLE_OMP_TARGET
:
4963 if (gimple_omp_target_kind (probe
->stmt
)
4964 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
4967 cls
= gimple_omp_target_clauses (probe
->stmt
);
4975 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
4976 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
4977 && orig
== OMP_CLAUSE_DECL (cls
))
4979 incoming
= outgoing
= lookup_decl (orig
, probe
);
4980 goto has_outer_reduction
;
4982 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
4983 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
4984 && orig
== OMP_CLAUSE_DECL (cls
))
4992 /* This is the outermost construct with this reduction,
4993 see if there's a mapping for it. */
4994 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
4995 && maybe_lookup_field (orig
, outer
) && !is_private
)
4997 ref_to_res
= build_receiver_ref (orig
, false, outer
);
4998 if (omp_is_reference (orig
))
4999 ref_to_res
= build_simple_mem_ref (ref_to_res
);
5001 tree type
= TREE_TYPE (var
);
5002 if (POINTER_TYPE_P (type
))
5003 type
= TREE_TYPE (type
);
5006 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
5010 /* Try to look at enclosing contexts for reduction var,
5011 use original if no mapping found. */
5013 omp_context
*c
= ctx
->outer
;
5016 t
= maybe_lookup_decl (orig
, c
);
5019 incoming
= outgoing
= (t
? t
: orig
);
5022 has_outer_reduction
:;
5026 ref_to_res
= integer_zero_node
;
5028 if (omp_is_reference (orig
))
5030 tree type
= TREE_TYPE (var
);
5031 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
5035 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
5036 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
5039 v1
= create_tmp_var (type
, id
);
5040 v2
= create_tmp_var (type
, id
);
5041 v3
= create_tmp_var (type
, id
);
5043 gimplify_assign (v1
, var
, fork_seq
);
5044 gimplify_assign (v2
, var
, fork_seq
);
5045 gimplify_assign (v3
, var
, fork_seq
);
5047 var
= build_simple_mem_ref (var
);
5048 v1
= build_simple_mem_ref (v1
);
5049 v2
= build_simple_mem_ref (v2
);
5050 v3
= build_simple_mem_ref (v3
);
5051 outgoing
= build_simple_mem_ref (outgoing
);
5053 if (!TREE_CONSTANT (incoming
))
5054 incoming
= build_simple_mem_ref (incoming
);
5059 /* Determine position in reduction buffer, which may be used
5060 by target. The parser has ensured that this is not a
5061 variable-sized type. */
5062 fixed_size_mode mode
5063 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
5064 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
5065 offset
= (offset
+ align
- 1) & ~(align
- 1);
5066 tree off
= build_int_cst (sizetype
, offset
);
5067 offset
+= GET_MODE_SIZE (mode
);
5071 init_code
= build_int_cst (integer_type_node
,
5072 IFN_GOACC_REDUCTION_INIT
);
5073 fini_code
= build_int_cst (integer_type_node
,
5074 IFN_GOACC_REDUCTION_FINI
);
5075 setup_code
= build_int_cst (integer_type_node
,
5076 IFN_GOACC_REDUCTION_SETUP
);
5077 teardown_code
= build_int_cst (integer_type_node
,
5078 IFN_GOACC_REDUCTION_TEARDOWN
);
5082 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5083 TREE_TYPE (var
), 6, setup_code
,
5084 unshare_expr (ref_to_res
),
5085 incoming
, level
, op
, off
);
5087 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5088 TREE_TYPE (var
), 6, init_code
,
5089 unshare_expr (ref_to_res
),
5090 v1
, level
, op
, off
);
5092 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5093 TREE_TYPE (var
), 6, fini_code
,
5094 unshare_expr (ref_to_res
),
5095 v2
, level
, op
, off
);
5097 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5098 TREE_TYPE (var
), 6, teardown_code
,
5099 ref_to_res
, v3
, level
, op
, off
);
5101 gimplify_assign (v1
, setup_call
, &before_fork
);
5102 gimplify_assign (v2
, init_call
, &after_fork
);
5103 gimplify_assign (v3
, fini_call
, &before_join
);
5104 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5107 /* Now stitch things together. */
5108 gimple_seq_add_seq (fork_seq
, before_fork
);
5110 gimple_seq_add_stmt (fork_seq
, fork
);
5111 gimple_seq_add_seq (fork_seq
, after_fork
);
5113 gimple_seq_add_seq (join_seq
, before_join
);
5115 gimple_seq_add_stmt (join_seq
, join
);
5116 gimple_seq_add_seq (join_seq
, after_join
);
5119 /* Generate code to implement the REDUCTION clauses. */
5122 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5124 gimple_seq sub_seq
= NULL
;
5129 /* OpenACC loop reductions are handled elsewhere. */
5130 if (is_gimple_omp_oacc (ctx
->stmt
))
5133 /* SIMD reductions are handled in lower_rec_input_clauses. */
5134 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5135 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5138 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5139 update in that case, otherwise use a lock. */
5140 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5141 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5143 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5144 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5146 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5156 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5158 tree var
, ref
, new_var
, orig_var
;
5159 enum tree_code code
;
5160 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5162 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5165 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
5166 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5167 if (TREE_CODE (var
) == MEM_REF
)
5169 var
= TREE_OPERAND (var
, 0);
5170 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5171 var
= TREE_OPERAND (var
, 0);
5172 if (TREE_CODE (var
) == ADDR_EXPR
)
5173 var
= TREE_OPERAND (var
, 0);
5176 /* If this is a pointer or referenced based array
5177 section, the var could be private in the outer
5178 context e.g. on orphaned loop construct. Pretend this
5179 is private variable's outer reference. */
5180 ccode
= OMP_CLAUSE_PRIVATE
;
5181 if (TREE_CODE (var
) == INDIRECT_REF
)
5182 var
= TREE_OPERAND (var
, 0);
5185 if (is_variable_sized (var
))
5187 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5188 var
= DECL_VALUE_EXPR (var
);
5189 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5190 var
= TREE_OPERAND (var
, 0);
5191 gcc_assert (DECL_P (var
));
5194 new_var
= lookup_decl (var
, ctx
);
5195 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5196 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5197 ref
= build_outer_var_ref (var
, ctx
, ccode
);
5198 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5200 /* reduction(-:var) sums up the partial results, so it acts
5201 identically to reduction(+:var). */
5202 if (code
== MINUS_EXPR
)
5207 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5209 addr
= save_expr (addr
);
5210 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5211 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5212 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5213 gimplify_and_add (x
, stmt_seqp
);
5216 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5218 tree d
= OMP_CLAUSE_DECL (c
);
5219 tree type
= TREE_TYPE (d
);
5220 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5221 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
5222 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5223 tree bias
= TREE_OPERAND (d
, 1);
5224 d
= TREE_OPERAND (d
, 0);
5225 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5227 tree b
= TREE_OPERAND (d
, 1);
5228 b
= maybe_lookup_decl (b
, ctx
);
5231 b
= TREE_OPERAND (d
, 1);
5232 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5234 if (integer_zerop (bias
))
5238 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5239 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5240 TREE_TYPE (b
), b
, bias
);
5242 d
= TREE_OPERAND (d
, 0);
5244 /* For ref build_outer_var_ref already performs this, so
5245 only new_var needs a dereference. */
5246 if (TREE_CODE (d
) == INDIRECT_REF
)
5248 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5249 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5251 else if (TREE_CODE (d
) == ADDR_EXPR
)
5253 if (orig_var
== var
)
5255 new_var
= build_fold_addr_expr (new_var
);
5256 ref
= build_fold_addr_expr (ref
);
5261 gcc_assert (orig_var
== var
);
5262 if (omp_is_reference (var
))
5263 ref
= build_fold_addr_expr (ref
);
5267 tree t
= maybe_lookup_decl (v
, ctx
);
5271 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5272 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5274 if (!integer_zerop (bias
))
5276 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5277 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5278 TREE_TYPE (new_var
), new_var
,
5279 unshare_expr (bias
));
5280 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5281 TREE_TYPE (ref
), ref
, bias
);
5283 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5284 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5285 tree m
= create_tmp_var (ptype
, NULL
);
5286 gimplify_assign (m
, new_var
, stmt_seqp
);
5288 m
= create_tmp_var (ptype
, NULL
);
5289 gimplify_assign (m
, ref
, stmt_seqp
);
5291 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5292 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5293 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5294 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5295 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5296 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5297 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5299 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5300 tree decl_placeholder
5301 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5302 SET_DECL_VALUE_EXPR (placeholder
, out
);
5303 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5304 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5305 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5306 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5307 gimple_seq_add_seq (&sub_seq
,
5308 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5309 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5310 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5311 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5315 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5316 out
= unshare_expr (out
);
5317 gimplify_assign (out
, x
, &sub_seq
);
5319 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5320 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5321 gimple_seq_add_stmt (&sub_seq
, g
);
5322 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5323 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5324 gimple_seq_add_stmt (&sub_seq
, g
);
5325 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5326 build_int_cst (TREE_TYPE (i
), 1));
5327 gimple_seq_add_stmt (&sub_seq
, g
);
5328 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5329 gimple_seq_add_stmt (&sub_seq
, g
);
5330 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5332 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5334 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5336 if (omp_is_reference (var
)
5337 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
5339 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
5340 SET_DECL_VALUE_EXPR (placeholder
, ref
);
5341 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5342 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5343 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5345 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5349 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5350 ref
= build_outer_var_ref (var
, ctx
);
5351 gimplify_assign (ref
, x
, &sub_seq
);
5355 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
5357 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5359 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
5361 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
5363 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5367 /* Generate code to implement the COPYPRIVATE clauses. */
5370 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
5375 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5377 tree var
, new_var
, ref
, x
;
5379 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5381 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
5384 var
= OMP_CLAUSE_DECL (c
);
5385 by_ref
= use_pointer_for_field (var
, NULL
);
5387 ref
= build_sender_ref (var
, ctx
);
5388 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
5391 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
5392 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
5394 gimplify_assign (ref
, x
, slist
);
5396 ref
= build_receiver_ref (var
, false, ctx
);
5399 ref
= fold_convert_loc (clause_loc
,
5400 build_pointer_type (TREE_TYPE (new_var
)),
5402 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
5404 if (omp_is_reference (var
))
5406 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
5407 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
5408 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5410 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
5411 gimplify_and_add (x
, rlist
);
5416 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5417 and REDUCTION from the sender (aka parent) side. */
5420 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
5424 int ignored_looptemp
= 0;
5425 bool is_taskloop
= false;
5427 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5428 by GOMP_taskloop. */
5429 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
5431 ignored_looptemp
= 2;
5435 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5437 tree val
, ref
, x
, var
;
5438 bool by_ref
, do_in
= false, do_out
= false;
5439 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5441 switch (OMP_CLAUSE_CODE (c
))
5443 case OMP_CLAUSE_PRIVATE
:
5444 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5447 case OMP_CLAUSE_FIRSTPRIVATE
:
5448 case OMP_CLAUSE_COPYIN
:
5449 case OMP_CLAUSE_LASTPRIVATE
:
5450 case OMP_CLAUSE_REDUCTION
:
5452 case OMP_CLAUSE_SHARED
:
5453 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5456 case OMP_CLAUSE__LOOPTEMP_
:
5457 if (ignored_looptemp
)
5467 val
= OMP_CLAUSE_DECL (c
);
5468 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5469 && TREE_CODE (val
) == MEM_REF
)
5471 val
= TREE_OPERAND (val
, 0);
5472 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
5473 val
= TREE_OPERAND (val
, 0);
5474 if (TREE_CODE (val
) == INDIRECT_REF
5475 || TREE_CODE (val
) == ADDR_EXPR
)
5476 val
= TREE_OPERAND (val
, 0);
5477 if (is_variable_sized (val
))
5481 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5482 outer taskloop region. */
5483 omp_context
*ctx_for_o
= ctx
;
5485 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
5486 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5487 ctx_for_o
= ctx
->outer
;
5489 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
5491 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
5492 && is_global_var (var
))
5495 t
= omp_member_access_dummy_var (var
);
5498 var
= DECL_VALUE_EXPR (var
);
5499 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
5501 var
= unshare_and_remap (var
, t
, o
);
5503 var
= unshare_expr (var
);
5506 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
5508 /* Handle taskloop firstprivate/lastprivate, where the
5509 lastprivate on GIMPLE_OMP_TASK is represented as
5510 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5511 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
5512 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
5513 if (use_pointer_for_field (val
, ctx
))
5514 var
= build_fold_addr_expr (var
);
5515 gimplify_assign (x
, var
, ilist
);
5516 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
5520 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5521 || val
== OMP_CLAUSE_DECL (c
))
5522 && is_variable_sized (val
))
5524 by_ref
= use_pointer_for_field (val
, NULL
);
5526 switch (OMP_CLAUSE_CODE (c
))
5528 case OMP_CLAUSE_FIRSTPRIVATE
:
5529 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
5531 && is_task_ctx (ctx
))
5532 TREE_NO_WARNING (var
) = 1;
5536 case OMP_CLAUSE_PRIVATE
:
5537 case OMP_CLAUSE_COPYIN
:
5538 case OMP_CLAUSE__LOOPTEMP_
:
5542 case OMP_CLAUSE_LASTPRIVATE
:
5543 if (by_ref
|| omp_is_reference (val
))
5545 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5552 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
5557 case OMP_CLAUSE_REDUCTION
:
5559 if (val
== OMP_CLAUSE_DECL (c
))
5560 do_out
= !(by_ref
|| omp_is_reference (val
));
5562 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
5571 ref
= build_sender_ref (val
, ctx
);
5572 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
5573 gimplify_assign (ref
, x
, ilist
);
5574 if (is_task_ctx (ctx
))
5575 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
5580 ref
= build_sender_ref (val
, ctx
);
5581 gimplify_assign (var
, ref
, olist
);
5586 /* Generate code to implement SHARED from the sender (aka parent)
5587 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5588 list things that got automatically shared. */
5591 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
5593 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
5595 if (ctx
->record_type
== NULL
)
5598 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
5599 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
5601 ovar
= DECL_ABSTRACT_ORIGIN (f
);
5602 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
5605 nvar
= maybe_lookup_decl (ovar
, ctx
);
5606 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
5609 /* If CTX is a nested parallel directive. Find the immediately
5610 enclosing parallel or workshare construct that contains a
5611 mapping for OVAR. */
5612 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
5614 t
= omp_member_access_dummy_var (var
);
5617 var
= DECL_VALUE_EXPR (var
);
5618 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
5620 var
= unshare_and_remap (var
, t
, o
);
5622 var
= unshare_expr (var
);
5625 if (use_pointer_for_field (ovar
, ctx
))
5627 x
= build_sender_ref (ovar
, ctx
);
5628 var
= build_fold_addr_expr (var
);
5629 gimplify_assign (x
, var
, ilist
);
5633 x
= build_sender_ref (ovar
, ctx
);
5634 gimplify_assign (x
, var
, ilist
);
5636 if (!TREE_READONLY (var
)
5637 /* We don't need to receive a new reference to a result
5638 or parm decl. In fact we may not store to it as we will
5639 invalidate any pending RSO and generate wrong gimple
5641 && !((TREE_CODE (var
) == RESULT_DECL
5642 || TREE_CODE (var
) == PARM_DECL
)
5643 && DECL_BY_REFERENCE (var
)))
5645 x
= build_sender_ref (ovar
, ctx
);
5646 gimplify_assign (var
, x
, olist
);
5652 /* Emit an OpenACC head marker call, encapulating the partitioning and
5653 other information that must be processed by the target compiler.
5654 Return the maximum number of dimensions the associated loop might
5655 be partitioned over. */
5658 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
5659 gimple_seq
*seq
, omp_context
*ctx
)
5661 unsigned levels
= 0;
5663 tree gang_static
= NULL_TREE
;
5664 auto_vec
<tree
, 5> args
;
5666 args
.quick_push (build_int_cst
5667 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
5668 args
.quick_push (ddvar
);
5669 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5671 switch (OMP_CLAUSE_CODE (c
))
5673 case OMP_CLAUSE_GANG
:
5674 tag
|= OLF_DIM_GANG
;
5675 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
5676 /* static:* is represented by -1, and we can ignore it, as
5677 scheduling is always static. */
5678 if (gang_static
&& integer_minus_onep (gang_static
))
5679 gang_static
= NULL_TREE
;
5683 case OMP_CLAUSE_WORKER
:
5684 tag
|= OLF_DIM_WORKER
;
5688 case OMP_CLAUSE_VECTOR
:
5689 tag
|= OLF_DIM_VECTOR
;
5693 case OMP_CLAUSE_SEQ
:
5697 case OMP_CLAUSE_AUTO
:
5701 case OMP_CLAUSE_INDEPENDENT
:
5702 tag
|= OLF_INDEPENDENT
;
5705 case OMP_CLAUSE_TILE
:
5716 if (DECL_P (gang_static
))
5717 gang_static
= build_outer_var_ref (gang_static
, ctx
);
5718 tag
|= OLF_GANG_STATIC
;
5721 /* In a parallel region, loops are implicitly INDEPENDENT. */
5722 omp_context
*tgt
= enclosing_target_ctx (ctx
);
5723 if (!tgt
|| is_oacc_parallel (tgt
))
5724 tag
|= OLF_INDEPENDENT
;
5727 /* Tiling could use all 3 levels. */
5731 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5732 Ensure at least one level, or 2 for possible auto
5734 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
5735 << OLF_DIM_BASE
) | OLF_SEQ
));
5737 if (levels
< 1u + maybe_auto
)
5738 levels
= 1u + maybe_auto
;
5741 args
.quick_push (build_int_cst (integer_type_node
, levels
));
5742 args
.quick_push (build_int_cst (integer_type_node
, tag
));
5744 args
.quick_push (gang_static
);
5746 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
5747 gimple_set_location (call
, loc
);
5748 gimple_set_lhs (call
, ddvar
);
5749 gimple_seq_add_stmt (seq
, call
);
5754 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5755 partitioning level of the enclosed region. */
5758 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
5759 tree tofollow
, gimple_seq
*seq
)
5761 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
5762 : IFN_UNIQUE_OACC_TAIL_MARK
);
5763 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
5764 int nargs
= 2 + (tofollow
!= NULL_TREE
);
5765 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
5766 marker
, ddvar
, tofollow
);
5767 gimple_set_location (call
, loc
);
5768 gimple_set_lhs (call
, ddvar
);
5769 gimple_seq_add_stmt (seq
, call
);
5772 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5773 the loop clauses, from which we extract reductions. Initialize
5777 lower_oacc_head_tail (location_t loc
, tree clauses
,
5778 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
5781 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
5782 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
5784 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
5785 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
5786 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
5789 for (unsigned done
= 1; count
; count
--, done
++)
5791 gimple_seq fork_seq
= NULL
;
5792 gimple_seq join_seq
= NULL
;
5794 tree place
= build_int_cst (integer_type_node
, -1);
5795 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5796 fork_kind
, ddvar
, place
);
5797 gimple_set_location (fork
, loc
);
5798 gimple_set_lhs (fork
, ddvar
);
5800 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5801 join_kind
, ddvar
, place
);
5802 gimple_set_location (join
, loc
);
5803 gimple_set_lhs (join
, ddvar
);
5805 /* Mark the beginning of this level sequence. */
5807 lower_oacc_loop_marker (loc
, ddvar
, true,
5808 build_int_cst (integer_type_node
, count
),
5810 lower_oacc_loop_marker (loc
, ddvar
, false,
5811 build_int_cst (integer_type_node
, done
),
5814 lower_oacc_reductions (loc
, clauses
, place
, inner
,
5815 fork
, join
, &fork_seq
, &join_seq
, ctx
);
5817 /* Append this level to head. */
5818 gimple_seq_add_seq (head
, fork_seq
);
5819 /* Prepend it to tail. */
5820 gimple_seq_add_seq (&join_seq
, *tail
);
5826 /* Mark the end of the sequence. */
5827 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
5828 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
5831 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5832 catch handler and return it. This prevents programs from violating the
5833 structured block semantics with throws. */
5836 maybe_catch_exception (gimple_seq body
)
5841 if (!flag_exceptions
)
5844 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
5845 decl
= lang_hooks
.eh_protect_cleanup_actions ();
5847 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
5849 g
= gimple_build_eh_must_not_throw (decl
);
5850 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
5853 return gimple_seq_alloc_with_stmt (g
);
5857 /* Routines to lower OMP directives into OMP-GIMPLE. */
5859 /* If ctx is a worksharing context inside of a cancellable parallel
5860 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5861 and conditional branch to parallel's cancel_label to handle
5862 cancellation in the implicit barrier. */
5865 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple_seq
*body
)
5867 gimple
*omp_return
= gimple_seq_last_stmt (*body
);
5868 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
5869 if (gimple_omp_return_nowait_p (omp_return
))
5872 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_PARALLEL
5873 && ctx
->outer
->cancellable
)
5875 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5876 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
5877 tree lhs
= create_tmp_var (c_bool_type
);
5878 gimple_omp_return_set_lhs (omp_return
, lhs
);
5879 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5880 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
5881 fold_convert (c_bool_type
,
5882 boolean_false_node
),
5883 ctx
->outer
->cancel_label
, fallthru_label
);
5884 gimple_seq_add_stmt (body
, g
);
5885 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
5889 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5890 CTX is the enclosing OMP context for the current statement. */
5893 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
5895 tree block
, control
;
5896 gimple_stmt_iterator tgsi
;
5897 gomp_sections
*stmt
;
5899 gbind
*new_stmt
, *bind
;
5900 gimple_seq ilist
, dlist
, olist
, new_body
;
5902 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
5904 push_gimplify_context ();
5908 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
5909 &ilist
, &dlist
, ctx
, NULL
);
5911 new_body
= gimple_omp_body (stmt
);
5912 gimple_omp_set_body (stmt
, NULL
);
5913 tgsi
= gsi_start (new_body
);
5914 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
5919 sec_start
= gsi_stmt (tgsi
);
5920 sctx
= maybe_lookup_ctx (sec_start
);
5923 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
5924 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
5925 GSI_CONTINUE_LINKING
);
5926 gimple_omp_set_body (sec_start
, NULL
);
5928 if (gsi_one_before_end_p (tgsi
))
5930 gimple_seq l
= NULL
;
5931 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
5933 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
5934 gimple_omp_section_set_last (sec_start
);
5937 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
5938 GSI_CONTINUE_LINKING
);
5941 block
= make_node (BLOCK
);
5942 bind
= gimple_build_bind (NULL
, new_body
, block
);
5945 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
5947 block
= make_node (BLOCK
);
5948 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
5949 gsi_replace (gsi_p
, new_stmt
, true);
5951 pop_gimplify_context (new_stmt
);
5952 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
5953 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
5954 if (BLOCK_VARS (block
))
5955 TREE_USED (block
) = 1;
5958 gimple_seq_add_seq (&new_body
, ilist
);
5959 gimple_seq_add_stmt (&new_body
, stmt
);
5960 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
5961 gimple_seq_add_stmt (&new_body
, bind
);
5963 control
= create_tmp_var (unsigned_type_node
, ".section");
5964 t
= gimple_build_omp_continue (control
, control
);
5965 gimple_omp_sections_set_control (stmt
, control
);
5966 gimple_seq_add_stmt (&new_body
, t
);
5968 gimple_seq_add_seq (&new_body
, olist
);
5969 if (ctx
->cancellable
)
5970 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
5971 gimple_seq_add_seq (&new_body
, dlist
);
5973 new_body
= maybe_catch_exception (new_body
);
5975 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
5976 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
5977 t
= gimple_build_omp_return (nowait
);
5978 gimple_seq_add_stmt (&new_body
, t
);
5979 maybe_add_implicit_barrier_cancel (ctx
, &new_body
);
5981 gimple_bind_set_body (new_stmt
, new_body
);
5985 /* A subroutine of lower_omp_single. Expand the simple form of
5986 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5988 if (GOMP_single_start ())
5990 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5992 FIXME. It may be better to delay expanding the logic of this until
5993 pass_expand_omp. The expanded logic may make the job more difficult
5994 to a synchronization analysis pass. */
5997 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
5999 location_t loc
= gimple_location (single_stmt
);
6000 tree tlabel
= create_artificial_label (loc
);
6001 tree flabel
= create_artificial_label (loc
);
6002 gimple
*call
, *cond
;
6005 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
6006 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
6007 call
= gimple_build_call (decl
, 0);
6008 gimple_call_set_lhs (call
, lhs
);
6009 gimple_seq_add_stmt (pre_p
, call
);
6011 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
6012 fold_convert_loc (loc
, TREE_TYPE (lhs
),
6015 gimple_seq_add_stmt (pre_p
, cond
);
6016 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
6017 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6018 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
6022 /* A subroutine of lower_omp_single. Expand the simple form of
6023 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6025 #pragma omp single copyprivate (a, b, c)
6027 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6030 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6036 GOMP_single_copy_end (©out);
6047 FIXME. It may be better to delay expanding the logic of this until
6048 pass_expand_omp. The expanded logic may make the job more difficult
6049 to a synchronization analysis pass. */
6052 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
6055 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
6056 gimple_seq copyin_seq
;
6057 location_t loc
= gimple_location (single_stmt
);
6059 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
6061 ptr_type
= build_pointer_type (ctx
->record_type
);
6062 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
6064 l0
= create_artificial_label (loc
);
6065 l1
= create_artificial_label (loc
);
6066 l2
= create_artificial_label (loc
);
6068 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
6069 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
6070 t
= fold_convert_loc (loc
, ptr_type
, t
);
6071 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
6073 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
6074 build_int_cst (ptr_type
, 0));
6075 t
= build3 (COND_EXPR
, void_type_node
, t
,
6076 build_and_jump (&l0
), build_and_jump (&l1
));
6077 gimplify_and_add (t
, pre_p
);
6079 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
6081 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6084 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
6087 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
6088 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
6089 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
6090 gimplify_and_add (t
, pre_p
);
6092 t
= build_and_jump (&l2
);
6093 gimplify_and_add (t
, pre_p
);
6095 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
6097 gimple_seq_add_seq (pre_p
, copyin_seq
);
6099 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
6103 /* Expand code for an OpenMP single directive. */
6106 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6109 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
6111 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6113 push_gimplify_context ();
6115 block
= make_node (BLOCK
);
6116 bind
= gimple_build_bind (NULL
, NULL
, block
);
6117 gsi_replace (gsi_p
, bind
, true);
6120 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6121 &bind_body
, &dlist
, ctx
, NULL
);
6122 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6124 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6126 if (ctx
->record_type
)
6127 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6129 lower_omp_single_simple (single_stmt
, &bind_body
);
6131 gimple_omp_set_body (single_stmt
, NULL
);
6133 gimple_seq_add_seq (&bind_body
, dlist
);
6135 bind_body
= maybe_catch_exception (bind_body
);
6137 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6138 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6139 gimple
*g
= gimple_build_omp_return (nowait
);
6140 gimple_seq_add_stmt (&bind_body_tail
, g
);
6141 maybe_add_implicit_barrier_cancel (ctx
, &bind_body_tail
);
6142 if (ctx
->record_type
)
6144 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6145 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6146 TREE_THIS_VOLATILE (clobber
) = 1;
6147 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6148 clobber
), GSI_SAME_STMT
);
6150 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6151 gimple_bind_set_body (bind
, bind_body
);
6153 pop_gimplify_context (bind
);
6155 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6156 BLOCK_VARS (block
) = ctx
->block_vars
;
6157 if (BLOCK_VARS (block
))
6158 TREE_USED (block
) = 1;
6162 /* Expand code for an OpenMP master directive. */
6165 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6167 tree block
, lab
= NULL
, x
, bfn_decl
;
6168 gimple
*stmt
= gsi_stmt (*gsi_p
);
6170 location_t loc
= gimple_location (stmt
);
6173 push_gimplify_context ();
6175 block
= make_node (BLOCK
);
6176 bind
= gimple_build_bind (NULL
, NULL
, block
);
6177 gsi_replace (gsi_p
, bind
, true);
6178 gimple_bind_add_stmt (bind
, stmt
);
6180 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6181 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6182 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6183 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6185 gimplify_and_add (x
, &tseq
);
6186 gimple_bind_add_seq (bind
, tseq
);
6188 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6189 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6190 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6191 gimple_omp_set_body (stmt
, NULL
);
6193 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6195 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6197 pop_gimplify_context (bind
);
6199 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6200 BLOCK_VARS (block
) = ctx
->block_vars
;
6204 /* Expand code for an OpenMP taskgroup directive. */
6207 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6209 gimple
*stmt
= gsi_stmt (*gsi_p
);
6212 tree block
= make_node (BLOCK
);
6214 bind
= gimple_build_bind (NULL
, NULL
, block
);
6215 gsi_replace (gsi_p
, bind
, true);
6216 gimple_bind_add_stmt (bind
, stmt
);
6218 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
6220 gimple_bind_add_stmt (bind
, x
);
6222 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6223 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6224 gimple_omp_set_body (stmt
, NULL
);
6226 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6228 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6229 BLOCK_VARS (block
) = ctx
->block_vars
;
6233 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6236 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
6239 struct omp_for_data fd
;
6240 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
6243 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
6244 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
6245 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
6249 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6250 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
6251 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
6252 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
6254 /* Merge depend clauses from multiple adjacent
6255 #pragma omp ordered depend(sink:...) constructs
6256 into one #pragma omp ordered depend(sink:...), so that
6257 we can optimize them together. */
6258 gimple_stmt_iterator gsi
= *gsi_p
;
6260 while (!gsi_end_p (gsi
))
6262 gimple
*stmt
= gsi_stmt (gsi
);
6263 if (is_gimple_debug (stmt
)
6264 || gimple_code (stmt
) == GIMPLE_NOP
)
6269 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
6271 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
6272 c
= gimple_omp_ordered_clauses (ord_stmt2
);
6274 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
6275 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6278 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
6280 gsi_remove (&gsi
, true);
6284 /* Canonicalize sink dependence clauses into one folded clause if
6287 The basic algorithm is to create a sink vector whose first
6288 element is the GCD of all the first elements, and whose remaining
6289 elements are the minimum of the subsequent columns.
6291 We ignore dependence vectors whose first element is zero because
6292 such dependencies are known to be executed by the same thread.
6294 We take into account the direction of the loop, so a minimum
6295 becomes a maximum if the loop is iterating forwards. We also
6296 ignore sink clauses where the loop direction is unknown, or where
6297 the offsets are clearly invalid because they are not a multiple
6298 of the loop increment.
6302 #pragma omp for ordered(2)
6303 for (i=0; i < N; ++i)
6304 for (j=0; j < M; ++j)
6306 #pragma omp ordered \
6307 depend(sink:i-8,j-2) \
6308 depend(sink:i,j-1) \ // Completely ignored because i+0.
6309 depend(sink:i-4,j-3) \
6310 depend(sink:i-6,j-4)
6311 #pragma omp ordered depend(source)
6316 depend(sink:-gcd(8,4,6),-min(2,3,4))
6321 /* FIXME: Computing GCD's where the first element is zero is
6322 non-trivial in the presence of collapsed loops. Do this later. */
6323 if (fd
.collapse
> 1)
6326 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
6328 /* wide_int is not a POD so it must be default-constructed. */
6329 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
6330 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
6332 tree folded_dep
= NULL_TREE
;
6333 /* TRUE if the first dimension's offset is negative. */
6334 bool neg_offset_p
= false;
6336 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6338 while ((c
= *list_p
) != NULL
)
6340 bool remove
= false;
6342 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
6343 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6344 goto next_ordered_clause
;
6347 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
6348 vec
&& TREE_CODE (vec
) == TREE_LIST
;
6349 vec
= TREE_CHAIN (vec
), ++i
)
6351 gcc_assert (i
< len
);
6353 /* omp_extract_for_data has canonicalized the condition. */
6354 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
6355 || fd
.loops
[i
].cond_code
== GT_EXPR
);
6356 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
6357 bool maybe_lexically_later
= true;
6359 /* While the committee makes up its mind, bail if we have any
6360 non-constant steps. */
6361 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
6362 goto lower_omp_ordered_ret
;
6364 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
6365 if (POINTER_TYPE_P (itype
))
6367 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
6368 TYPE_PRECISION (itype
),
6371 /* Ignore invalid offsets that are not multiples of the step. */
6372 if (!wi::multiple_of_p (wi::abs (offset
),
6373 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
6376 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
6377 "ignoring sink clause with offset that is not "
6378 "a multiple of the loop step");
6380 goto next_ordered_clause
;
6383 /* Calculate the first dimension. The first dimension of
6384 the folded dependency vector is the GCD of the first
6385 elements, while ignoring any first elements whose offset
6389 /* Ignore dependence vectors whose first dimension is 0. */
6393 goto next_ordered_clause
;
6397 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
6399 error_at (OMP_CLAUSE_LOCATION (c
),
6400 "first offset must be in opposite direction "
6401 "of loop iterations");
6402 goto lower_omp_ordered_ret
;
6406 neg_offset_p
= forward
;
6407 /* Initialize the first time around. */
6408 if (folded_dep
== NULL_TREE
)
6411 folded_deps
[0] = offset
;
6414 folded_deps
[0] = wi::gcd (folded_deps
[0],
6418 /* Calculate minimum for the remaining dimensions. */
6421 folded_deps
[len
+ i
- 1] = offset
;
6422 if (folded_dep
== c
)
6423 folded_deps
[i
] = offset
;
6424 else if (maybe_lexically_later
6425 && !wi::eq_p (folded_deps
[i
], offset
))
6427 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
6431 for (j
= 1; j
<= i
; j
++)
6432 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
6435 maybe_lexically_later
= false;
6439 gcc_assert (i
== len
);
6443 next_ordered_clause
:
6445 *list_p
= OMP_CLAUSE_CHAIN (c
);
6447 list_p
= &OMP_CLAUSE_CHAIN (c
);
6453 folded_deps
[0] = -folded_deps
[0];
6455 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
6456 if (POINTER_TYPE_P (itype
))
6459 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
6460 = wide_int_to_tree (itype
, folded_deps
[0]);
6461 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
6462 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
6465 lower_omp_ordered_ret
:
6467 /* Ordered without clauses is #pragma omp threads, while we want
6468 a nop instead if we remove all clauses. */
6469 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
6470 gsi_replace (gsi_p
, gimple_build_nop (), true);
6474 /* Expand code for an OpenMP ordered directive. */
6477 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6480 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
6481 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
6484 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6486 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6489 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
6490 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6491 OMP_CLAUSE_THREADS
);
6493 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6496 /* FIXME: This is needs to be moved to the expansion to verify various
6497 conditions only testable on cfg with dominators computed, and also
6498 all the depend clauses to be merged still might need to be available
6499 for the runtime checks. */
6501 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
6505 push_gimplify_context ();
6507 block
= make_node (BLOCK
);
6508 bind
= gimple_build_bind (NULL
, NULL
, block
);
6509 gsi_replace (gsi_p
, bind
, true);
6510 gimple_bind_add_stmt (bind
, stmt
);
6514 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
6515 build_int_cst (NULL_TREE
, threads
));
6516 cfun
->has_simduid_loops
= true;
6519 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
6521 gimple_bind_add_stmt (bind
, x
);
6523 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
6526 counter
= create_tmp_var (integer_type_node
);
6527 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
6528 gimple_call_set_lhs (g
, counter
);
6529 gimple_bind_add_stmt (bind
, g
);
6531 body
= create_artificial_label (UNKNOWN_LOCATION
);
6532 test
= create_artificial_label (UNKNOWN_LOCATION
);
6533 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
6535 tree simt_pred
= create_tmp_var (integer_type_node
);
6536 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
6537 gimple_call_set_lhs (g
, simt_pred
);
6538 gimple_bind_add_stmt (bind
, g
);
6540 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
6541 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
6542 gimple_bind_add_stmt (bind
, g
);
6544 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
6546 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6547 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6548 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6549 gimple_omp_set_body (stmt
, NULL
);
6553 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
6554 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
6555 gimple_bind_add_stmt (bind
, g
);
6557 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
6558 tree nonneg
= create_tmp_var (integer_type_node
);
6559 gimple_seq tseq
= NULL
;
6560 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
6561 gimple_bind_add_seq (bind
, tseq
);
6563 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
6564 gimple_call_set_lhs (g
, nonneg
);
6565 gimple_bind_add_stmt (bind
, g
);
6567 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6568 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
6569 gimple_bind_add_stmt (bind
, g
);
6571 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
6574 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
6575 build_int_cst (NULL_TREE
, threads
));
6577 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
6579 gimple_bind_add_stmt (bind
, x
);
6581 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6583 pop_gimplify_context (bind
);
6585 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6586 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6590 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6591 substitution of a couple of function calls. But in the NAMED case,
6592 requires that languages coordinate a symbol name. It is therefore
6593 best put here in common code. */
6595 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
6598 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6601 tree name
, lock
, unlock
;
6602 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
6604 location_t loc
= gimple_location (stmt
);
6607 name
= gimple_omp_critical_name (stmt
);
6612 if (!critical_name_mutexes
)
6613 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
6615 tree
*n
= critical_name_mutexes
->get (name
);
6620 decl
= create_tmp_var_raw (ptr_type_node
);
6622 new_str
= ACONCAT ((".gomp_critical_user_",
6623 IDENTIFIER_POINTER (name
), NULL
));
6624 DECL_NAME (decl
) = get_identifier (new_str
);
6625 TREE_PUBLIC (decl
) = 1;
6626 TREE_STATIC (decl
) = 1;
6627 DECL_COMMON (decl
) = 1;
6628 DECL_ARTIFICIAL (decl
) = 1;
6629 DECL_IGNORED_P (decl
) = 1;
6631 varpool_node::finalize_decl (decl
);
6633 critical_name_mutexes
->put (name
, decl
);
6638 /* If '#pragma omp critical' is inside offloaded region or
6639 inside function marked as offloadable, the symbol must be
6640 marked as offloadable too. */
6642 if (cgraph_node::get (current_function_decl
)->offloadable
)
6643 varpool_node::get_create (decl
)->offloadable
= 1;
6645 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
6646 if (is_gimple_omp_offloaded (octx
->stmt
))
6648 varpool_node::get_create (decl
)->offloadable
= 1;
6652 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
6653 lock
= build_call_expr_loc (loc
, lock
, 1,
6654 build_fold_addr_expr_loc (loc
, decl
));
6656 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
6657 unlock
= build_call_expr_loc (loc
, unlock
, 1,
6658 build_fold_addr_expr_loc (loc
, decl
));
6662 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
6663 lock
= build_call_expr_loc (loc
, lock
, 0);
6665 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
6666 unlock
= build_call_expr_loc (loc
, unlock
, 0);
6669 push_gimplify_context ();
6671 block
= make_node (BLOCK
);
6672 bind
= gimple_build_bind (NULL
, NULL
, block
);
6673 gsi_replace (gsi_p
, bind
, true);
6674 gimple_bind_add_stmt (bind
, stmt
);
6676 tbody
= gimple_bind_body (bind
);
6677 gimplify_and_add (lock
, &tbody
);
6678 gimple_bind_set_body (bind
, tbody
);
6680 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6681 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6682 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6683 gimple_omp_set_body (stmt
, NULL
);
6685 tbody
= gimple_bind_body (bind
);
6686 gimplify_and_add (unlock
, &tbody
);
6687 gimple_bind_set_body (bind
, tbody
);
6689 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6691 pop_gimplify_context (bind
);
6692 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6693 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6696 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6697 for a lastprivate clause. Given a loop control predicate of (V
6698 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6699 is appended to *DLIST, iterator initialization is appended to
6703 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
6704 gimple_seq
*dlist
, struct omp_context
*ctx
)
6706 tree clauses
, cond
, vinit
;
6707 enum tree_code cond_code
;
6710 cond_code
= fd
->loop
.cond_code
;
6711 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
6713 /* When possible, use a strict equality expression. This can let VRP
6714 type optimizations deduce the value and remove a copy. */
6715 if (tree_fits_shwi_p (fd
->loop
.step
))
6717 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
6718 if (step
== 1 || step
== -1)
6719 cond_code
= EQ_EXPR
;
6722 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
6723 || gimple_omp_for_grid_phony (fd
->for_stmt
))
6724 cond
= omp_grid_lastprivate_predicate (fd
);
6727 tree n2
= fd
->loop
.n2
;
6728 if (fd
->collapse
> 1
6729 && TREE_CODE (n2
) != INTEGER_CST
6730 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
6732 struct omp_context
*taskreg_ctx
= NULL
;
6733 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
6735 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
6736 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
6737 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
6739 if (gimple_omp_for_combined_into_p (gfor
))
6741 gcc_assert (ctx
->outer
->outer
6742 && is_parallel_ctx (ctx
->outer
->outer
));
6743 taskreg_ctx
= ctx
->outer
->outer
;
6747 struct omp_for_data outer_fd
;
6748 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
6749 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
6752 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
6753 taskreg_ctx
= ctx
->outer
->outer
;
6755 else if (is_taskreg_ctx (ctx
->outer
))
6756 taskreg_ctx
= ctx
->outer
;
6760 tree taskreg_clauses
6761 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
6762 tree innerc
= omp_find_clause (taskreg_clauses
,
6763 OMP_CLAUSE__LOOPTEMP_
);
6764 gcc_assert (innerc
);
6765 for (i
= 0; i
< fd
->collapse
; i
++)
6767 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6768 OMP_CLAUSE__LOOPTEMP_
);
6769 gcc_assert (innerc
);
6771 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6772 OMP_CLAUSE__LOOPTEMP_
);
6774 n2
= fold_convert (TREE_TYPE (n2
),
6775 lookup_decl (OMP_CLAUSE_DECL (innerc
),
6779 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
6782 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
6784 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
6785 if (!gimple_seq_empty_p (stmts
))
6787 gimple_seq_add_seq (&stmts
, *dlist
);
6790 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6791 vinit
= fd
->loop
.n1
;
6792 if (cond_code
== EQ_EXPR
6793 && tree_fits_shwi_p (fd
->loop
.n2
)
6794 && ! integer_zerop (fd
->loop
.n2
))
6795 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
6797 vinit
= unshare_expr (vinit
);
6799 /* Initialize the iterator variable, so that threads that don't execute
6800 any iterations don't execute the lastprivate clauses by accident. */
6801 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
6806 /* Lower code for an OMP loop directive. */
6809 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6812 struct omp_for_data fd
, *fdp
= NULL
;
6813 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
6815 gimple_seq omp_for_body
, body
, dlist
;
6816 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
6819 push_gimplify_context ();
6821 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
6823 block
= make_node (BLOCK
);
6824 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6825 /* Replace at gsi right away, so that 'stmt' is no member
6826 of a sequence anymore as we're going to add to a different
6828 gsi_replace (gsi_p
, new_stmt
, true);
6830 /* Move declaration of temporaries in the loop body before we make
6832 omp_for_body
= gimple_omp_body (stmt
);
6833 if (!gimple_seq_empty_p (omp_for_body
)
6834 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
6837 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
6838 tree vars
= gimple_bind_vars (inner_bind
);
6839 gimple_bind_append_vars (new_stmt
, vars
);
6840 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6841 keep them on the inner_bind and it's block. */
6842 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
6843 if (gimple_bind_block (inner_bind
))
6844 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
6847 if (gimple_omp_for_combined_into_p (stmt
))
6849 omp_extract_for_data (stmt
, &fd
, NULL
);
6852 /* We need two temporaries with fd.loop.v type (istart/iend)
6853 and then (fd.collapse - 1) temporaries with the same
6854 type for count2 ... countN-1 vars if not constant. */
6856 tree type
= fd
.iter_type
;
6858 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
6859 count
+= fd
.collapse
- 1;
6861 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
6862 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
6863 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
6868 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
6869 OMP_CLAUSE__LOOPTEMP_
);
6871 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
6872 OMP_CLAUSE__LOOPTEMP_
);
6873 for (i
= 0; i
< count
; i
++)
6878 gcc_assert (outerc
);
6879 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
6880 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
6881 OMP_CLAUSE__LOOPTEMP_
);
6885 /* If there are 2 adjacent SIMD stmts, one with _simt_
6886 clause, another without, make sure they have the same
6887 decls in _looptemp_ clauses, because the outer stmt
6888 they are combined into will look up just one inner_stmt. */
6890 temp
= OMP_CLAUSE_DECL (simtc
);
6892 temp
= create_tmp_var (type
);
6893 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
6895 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
6896 OMP_CLAUSE_DECL (*pc
) = temp
;
6897 pc
= &OMP_CLAUSE_CHAIN (*pc
);
6899 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
6900 OMP_CLAUSE__LOOPTEMP_
);
6905 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6908 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
6910 gimple_seq_add_seq (&body
, gimple_omp_for_pre_body (stmt
));
6912 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6914 /* Lower the header expressions. At this point, we can assume that
6915 the header is of the form:
6917 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6919 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6920 using the .omp_data_s mapping, if needed. */
6921 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
6923 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
6924 if (!is_gimple_min_invariant (*rhs_p
))
6925 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6926 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
6927 recompute_tree_invariant_for_addr_expr (*rhs_p
);
6929 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
6930 if (!is_gimple_min_invariant (*rhs_p
))
6931 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6932 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
6933 recompute_tree_invariant_for_addr_expr (*rhs_p
);
6935 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
6936 if (!is_gimple_min_invariant (*rhs_p
))
6937 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6940 /* Once lowered, extract the bounds and clauses. */
6941 omp_extract_for_data (stmt
, &fd
, NULL
);
6943 if (is_gimple_omp_oacc (ctx
->stmt
)
6944 && !ctx_in_oacc_kernels_region (ctx
))
6945 lower_oacc_head_tail (gimple_location (stmt
),
6946 gimple_omp_for_clauses (stmt
),
6947 &oacc_head
, &oacc_tail
, ctx
);
6949 /* Add OpenACC partitioning and reduction markers just before the loop. */
6951 gimple_seq_add_seq (&body
, oacc_head
);
6953 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
6955 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
6956 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
6957 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6958 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6960 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6961 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
6962 OMP_CLAUSE_LINEAR_STEP (c
)
6963 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
6967 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
6968 && gimple_omp_for_grid_phony (stmt
));
6970 gimple_seq_add_stmt (&body
, stmt
);
6971 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
6974 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
6977 /* After the loop, add exit clauses. */
6978 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
6980 if (ctx
->cancellable
)
6981 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
6983 gimple_seq_add_seq (&body
, dlist
);
6985 body
= maybe_catch_exception (body
);
6989 /* Region exit marker goes at the end of the loop body. */
6990 gimple_seq_add_stmt (&body
, gimple_build_omp_return (fd
.have_nowait
));
6991 maybe_add_implicit_barrier_cancel (ctx
, &body
);
6994 /* Add OpenACC joining and reduction markers just after the loop. */
6996 gimple_seq_add_seq (&body
, oacc_tail
);
6998 pop_gimplify_context (new_stmt
);
7000 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7001 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
7002 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
7003 if (BLOCK_VARS (block
))
7004 TREE_USED (block
) = 1;
7006 gimple_bind_set_body (new_stmt
, body
);
7007 gimple_omp_set_body (stmt
, NULL
);
7008 gimple_omp_for_set_pre_body (stmt
, NULL
);
7011 /* Callback for walk_stmts. Check if the current statement only contains
7012 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7015 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
7016 bool *handled_ops_p
,
7017 struct walk_stmt_info
*wi
)
7019 int *info
= (int *) wi
->info
;
7020 gimple
*stmt
= gsi_stmt (*gsi_p
);
7022 *handled_ops_p
= true;
7023 switch (gimple_code (stmt
))
7029 case GIMPLE_OMP_FOR
:
7030 case GIMPLE_OMP_SECTIONS
:
7031 *info
= *info
== 0 ? 1 : -1;
7040 struct omp_taskcopy_context
7042 /* This field must be at the beginning, as we do "inheritance": Some
7043 callback functions for tree-inline.c (e.g., omp_copy_decl)
7044 receive a copy_body_data pointer that is up-casted to an
7045 omp_context pointer. */
7051 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
7053 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
7055 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
7056 return create_tmp_var (TREE_TYPE (var
));
7062 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
7064 tree name
, new_fields
= NULL
, type
, f
;
7066 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7067 name
= DECL_NAME (TYPE_NAME (orig_type
));
7068 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
7069 TYPE_DECL
, name
, type
);
7070 TYPE_NAME (type
) = name
;
7072 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
7074 tree new_f
= copy_node (f
);
7075 DECL_CONTEXT (new_f
) = type
;
7076 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
7077 TREE_CHAIN (new_f
) = new_fields
;
7078 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7079 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7080 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
7083 tcctx
->cb
.decl_map
->put (f
, new_f
);
7085 TYPE_FIELDS (type
) = nreverse (new_fields
);
7090 /* Create task copyfn. */
7093 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
7095 struct function
*child_cfun
;
7096 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
7097 tree record_type
, srecord_type
, bind
, list
;
7098 bool record_needs_remap
= false, srecord_needs_remap
= false;
7100 struct omp_taskcopy_context tcctx
;
7101 location_t loc
= gimple_location (task_stmt
);
7103 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
7104 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
7105 gcc_assert (child_cfun
->cfg
== NULL
);
7106 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
7108 /* Reset DECL_CONTEXT on function arguments. */
7109 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
7110 DECL_CONTEXT (t
) = child_fn
;
7112 /* Populate the function. */
7113 push_gimplify_context ();
7114 push_cfun (child_cfun
);
7116 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
7117 TREE_SIDE_EFFECTS (bind
) = 1;
7119 DECL_SAVED_TREE (child_fn
) = bind
;
7120 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
7122 /* Remap src and dst argument types if needed. */
7123 record_type
= ctx
->record_type
;
7124 srecord_type
= ctx
->srecord_type
;
7125 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7126 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7128 record_needs_remap
= true;
7131 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
7132 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7134 srecord_needs_remap
= true;
7138 if (record_needs_remap
|| srecord_needs_remap
)
7140 memset (&tcctx
, '\0', sizeof (tcctx
));
7141 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
7142 tcctx
.cb
.dst_fn
= child_fn
;
7143 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
7144 gcc_checking_assert (tcctx
.cb
.src_node
);
7145 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
7146 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
7147 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
7148 tcctx
.cb
.eh_lp_nr
= 0;
7149 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
7150 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
7153 if (record_needs_remap
)
7154 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
7155 if (srecord_needs_remap
)
7156 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
7159 tcctx
.cb
.decl_map
= NULL
;
7161 arg
= DECL_ARGUMENTS (child_fn
);
7162 TREE_TYPE (arg
) = build_pointer_type (record_type
);
7163 sarg
= DECL_CHAIN (arg
);
7164 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
7166 /* First pass: initialize temporaries used in record_type and srecord_type
7167 sizes and field offsets. */
7168 if (tcctx
.cb
.decl_map
)
7169 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7170 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7174 decl
= OMP_CLAUSE_DECL (c
);
7175 p
= tcctx
.cb
.decl_map
->get (decl
);
7178 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7179 sf
= (tree
) n
->value
;
7180 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7181 src
= build_simple_mem_ref_loc (loc
, sarg
);
7182 src
= omp_build_component_ref (src
, sf
);
7183 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
7184 append_to_statement_list (t
, &list
);
7187 /* Second pass: copy shared var pointers and copy construct non-VLA
7188 firstprivate vars. */
7189 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7190 switch (OMP_CLAUSE_CODE (c
))
7193 case OMP_CLAUSE_SHARED
:
7194 decl
= OMP_CLAUSE_DECL (c
);
7195 key
= (splay_tree_key
) decl
;
7196 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7197 key
= (splay_tree_key
) &DECL_UID (decl
);
7198 n
= splay_tree_lookup (ctx
->field_map
, key
);
7201 f
= (tree
) n
->value
;
7202 if (tcctx
.cb
.decl_map
)
7203 f
= *tcctx
.cb
.decl_map
->get (f
);
7204 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
7205 sf
= (tree
) n
->value
;
7206 if (tcctx
.cb
.decl_map
)
7207 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7208 src
= build_simple_mem_ref_loc (loc
, sarg
);
7209 src
= omp_build_component_ref (src
, sf
);
7210 dst
= build_simple_mem_ref_loc (loc
, arg
);
7211 dst
= omp_build_component_ref (dst
, f
);
7212 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7213 append_to_statement_list (t
, &list
);
7215 case OMP_CLAUSE_FIRSTPRIVATE
:
7216 decl
= OMP_CLAUSE_DECL (c
);
7217 if (is_variable_sized (decl
))
7219 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7222 f
= (tree
) n
->value
;
7223 if (tcctx
.cb
.decl_map
)
7224 f
= *tcctx
.cb
.decl_map
->get (f
);
7225 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7228 sf
= (tree
) n
->value
;
7229 if (tcctx
.cb
.decl_map
)
7230 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7231 src
= build_simple_mem_ref_loc (loc
, sarg
);
7232 src
= omp_build_component_ref (src
, sf
);
7233 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
7234 src
= build_simple_mem_ref_loc (loc
, src
);
7238 dst
= build_simple_mem_ref_loc (loc
, arg
);
7239 dst
= omp_build_component_ref (dst
, f
);
7240 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7241 append_to_statement_list (t
, &list
);
7243 case OMP_CLAUSE_PRIVATE
:
7244 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7246 decl
= OMP_CLAUSE_DECL (c
);
7247 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7248 f
= (tree
) n
->value
;
7249 if (tcctx
.cb
.decl_map
)
7250 f
= *tcctx
.cb
.decl_map
->get (f
);
7251 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7254 sf
= (tree
) n
->value
;
7255 if (tcctx
.cb
.decl_map
)
7256 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7257 src
= build_simple_mem_ref_loc (loc
, sarg
);
7258 src
= omp_build_component_ref (src
, sf
);
7259 if (use_pointer_for_field (decl
, NULL
))
7260 src
= build_simple_mem_ref_loc (loc
, src
);
7264 dst
= build_simple_mem_ref_loc (loc
, arg
);
7265 dst
= omp_build_component_ref (dst
, f
);
7266 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7267 append_to_statement_list (t
, &list
);
7273 /* Last pass: handle VLA firstprivates. */
7274 if (tcctx
.cb
.decl_map
)
7275 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7276 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7280 decl
= OMP_CLAUSE_DECL (c
);
7281 if (!is_variable_sized (decl
))
7283 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7286 f
= (tree
) n
->value
;
7287 f
= *tcctx
.cb
.decl_map
->get (f
);
7288 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
7289 ind
= DECL_VALUE_EXPR (decl
);
7290 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
7291 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
7292 n
= splay_tree_lookup (ctx
->sfield_map
,
7293 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7294 sf
= (tree
) n
->value
;
7295 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7296 src
= build_simple_mem_ref_loc (loc
, sarg
);
7297 src
= omp_build_component_ref (src
, sf
);
7298 src
= build_simple_mem_ref_loc (loc
, src
);
7299 dst
= build_simple_mem_ref_loc (loc
, arg
);
7300 dst
= omp_build_component_ref (dst
, f
);
7301 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7302 append_to_statement_list (t
, &list
);
7303 n
= splay_tree_lookup (ctx
->field_map
,
7304 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7305 df
= (tree
) n
->value
;
7306 df
= *tcctx
.cb
.decl_map
->get (df
);
7307 ptr
= build_simple_mem_ref_loc (loc
, arg
);
7308 ptr
= omp_build_component_ref (ptr
, df
);
7309 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
7310 build_fold_addr_expr_loc (loc
, dst
));
7311 append_to_statement_list (t
, &list
);
7314 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
7315 append_to_statement_list (t
, &list
);
7317 if (tcctx
.cb
.decl_map
)
7318 delete tcctx
.cb
.decl_map
;
7319 pop_gimplify_context (NULL
);
7320 BIND_EXPR_BODY (bind
) = list
;
7325 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
7329 size_t n_in
= 0, n_out
= 0, idx
= 2, i
;
7331 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
7332 gcc_assert (clauses
);
7333 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7334 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7335 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7337 case OMP_CLAUSE_DEPEND_IN
:
7340 case OMP_CLAUSE_DEPEND_OUT
:
7341 case OMP_CLAUSE_DEPEND_INOUT
:
7344 case OMP_CLAUSE_DEPEND_SOURCE
:
7345 case OMP_CLAUSE_DEPEND_SINK
:
7350 tree type
= build_array_type_nelts (ptr_type_node
, n_in
+ n_out
+ 2);
7351 tree array
= create_tmp_var (type
);
7352 TREE_ADDRESSABLE (array
) = 1;
7353 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7355 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_in
+ n_out
));
7356 gimple_seq_add_stmt (iseq
, g
);
7357 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7359 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_out
));
7360 gimple_seq_add_stmt (iseq
, g
);
7361 for (i
= 0; i
< 2; i
++)
7363 if ((i
? n_in
: n_out
) == 0)
7365 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7366 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7367 && ((OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_IN
) ^ i
))
7369 tree t
= OMP_CLAUSE_DECL (c
);
7370 t
= fold_convert (ptr_type_node
, t
);
7371 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
7372 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
7373 NULL_TREE
, NULL_TREE
);
7374 g
= gimple_build_assign (r
, t
);
7375 gimple_seq_add_stmt (iseq
, g
);
7378 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
7379 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
7380 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
7382 tree clobber
= build_constructor (type
, NULL
);
7383 TREE_THIS_VOLATILE (clobber
) = 1;
7384 g
= gimple_build_assign (array
, clobber
);
7385 gimple_seq_add_stmt (oseq
, g
);
7388 /* Lower the OpenMP parallel or task directive in the current statement
7389 in GSI_P. CTX holds context information for the directive. */
7392 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7396 gimple
*stmt
= gsi_stmt (*gsi_p
);
7397 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
7398 gimple_seq par_body
, olist
, ilist
, par_olist
, par_rlist
, par_ilist
, new_body
;
7399 location_t loc
= gimple_location (stmt
);
7401 clauses
= gimple_omp_taskreg_clauses (stmt
);
7403 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
7404 par_body
= gimple_bind_body (par_bind
);
7405 child_fn
= ctx
->cb
.dst_fn
;
7406 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7407 && !gimple_omp_parallel_combined_p (stmt
))
7409 struct walk_stmt_info wi
;
7412 memset (&wi
, 0, sizeof (wi
));
7415 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
7417 gimple_omp_parallel_set_combined_p (stmt
, true);
7419 gimple_seq dep_ilist
= NULL
;
7420 gimple_seq dep_olist
= NULL
;
7421 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
7422 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7424 push_gimplify_context ();
7425 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7426 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
7427 &dep_ilist
, &dep_olist
);
7430 if (ctx
->srecord_type
)
7431 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
7433 push_gimplify_context ();
7438 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7439 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
7440 if (phony_construct
&& ctx
->record_type
)
7442 gcc_checking_assert (!ctx
->receiver_decl
);
7443 ctx
->receiver_decl
= create_tmp_var
7444 (build_reference_type (ctx
->record_type
), ".omp_rec");
7446 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
7447 lower_omp (&par_body
, ctx
);
7448 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
7449 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
7451 /* Declare all the variables created by mapping and the variables
7452 declared in the scope of the parallel body. */
7453 record_vars_into (ctx
->block_vars
, child_fn
);
7454 maybe_remove_omp_member_access_dummy_vars (par_bind
);
7455 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
7457 if (ctx
->record_type
)
7460 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
7461 : ctx
->record_type
, ".omp_data_o");
7462 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7463 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7464 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
7469 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
7470 lower_send_shared_vars (&ilist
, &olist
, ctx
);
7472 if (ctx
->record_type
)
7474 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
7475 TREE_THIS_VOLATILE (clobber
) = 1;
7476 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
7480 /* Once all the expansions are done, sequence all the different
7481 fragments inside gimple_omp_body. */
7485 if (ctx
->record_type
)
7487 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7488 /* fixup_child_record_type might have changed receiver_decl's type. */
7489 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
7490 gimple_seq_add_stmt (&new_body
,
7491 gimple_build_assign (ctx
->receiver_decl
, t
));
7494 gimple_seq_add_seq (&new_body
, par_ilist
);
7495 gimple_seq_add_seq (&new_body
, par_body
);
7496 gimple_seq_add_seq (&new_body
, par_rlist
);
7497 if (ctx
->cancellable
)
7498 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7499 gimple_seq_add_seq (&new_body
, par_olist
);
7500 new_body
= maybe_catch_exception (new_body
);
7501 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
7502 gimple_seq_add_stmt (&new_body
,
7503 gimple_build_omp_continue (integer_zero_node
,
7504 integer_zero_node
));
7505 if (!phony_construct
)
7507 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
7508 gimple_omp_set_body (stmt
, new_body
);
7511 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
7512 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
7513 gimple_bind_add_seq (bind
, ilist
);
7514 if (!phony_construct
)
7515 gimple_bind_add_stmt (bind
, stmt
);
7517 gimple_bind_add_seq (bind
, new_body
);
7518 gimple_bind_add_seq (bind
, olist
);
7520 pop_gimplify_context (NULL
);
7524 gimple_bind_add_seq (dep_bind
, dep_ilist
);
7525 gimple_bind_add_stmt (dep_bind
, bind
);
7526 gimple_bind_add_seq (dep_bind
, dep_olist
);
7527 pop_gimplify_context (dep_bind
);
7531 /* Lower the GIMPLE_OMP_TARGET in the current statement
7532 in GSI_P. CTX holds context information for the directive. */
7535 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7538 tree child_fn
, t
, c
;
7539 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
7540 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
7541 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
7542 location_t loc
= gimple_location (stmt
);
7543 bool offloaded
, data_region
;
7544 unsigned int map_cnt
= 0;
7546 offloaded
= is_gimple_omp_offloaded (stmt
);
7547 switch (gimple_omp_target_kind (stmt
))
7549 case GF_OMP_TARGET_KIND_REGION
:
7550 case GF_OMP_TARGET_KIND_UPDATE
:
7551 case GF_OMP_TARGET_KIND_ENTER_DATA
:
7552 case GF_OMP_TARGET_KIND_EXIT_DATA
:
7553 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
7554 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
7555 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
7556 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
7557 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
7558 data_region
= false;
7560 case GF_OMP_TARGET_KIND_DATA
:
7561 case GF_OMP_TARGET_KIND_OACC_DATA
:
7562 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
7569 clauses
= gimple_omp_target_clauses (stmt
);
7571 gimple_seq dep_ilist
= NULL
;
7572 gimple_seq dep_olist
= NULL
;
7573 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7575 push_gimplify_context ();
7576 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7577 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
7578 &dep_ilist
, &dep_olist
);
7585 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
7586 tgt_body
= gimple_bind_body (tgt_bind
);
7588 else if (data_region
)
7589 tgt_body
= gimple_omp_body (stmt
);
7590 child_fn
= ctx
->cb
.dst_fn
;
7592 push_gimplify_context ();
7595 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7596 switch (OMP_CLAUSE_CODE (c
))
7602 case OMP_CLAUSE_MAP
:
7604 /* First check what we're prepared to handle in the following. */
7605 switch (OMP_CLAUSE_MAP_KIND (c
))
7607 case GOMP_MAP_ALLOC
:
7610 case GOMP_MAP_TOFROM
:
7611 case GOMP_MAP_POINTER
:
7612 case GOMP_MAP_TO_PSET
:
7613 case GOMP_MAP_DELETE
:
7614 case GOMP_MAP_RELEASE
:
7615 case GOMP_MAP_ALWAYS_TO
:
7616 case GOMP_MAP_ALWAYS_FROM
:
7617 case GOMP_MAP_ALWAYS_TOFROM
:
7618 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
7619 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
7620 case GOMP_MAP_STRUCT
:
7621 case GOMP_MAP_ALWAYS_POINTER
:
7623 case GOMP_MAP_FORCE_ALLOC
:
7624 case GOMP_MAP_FORCE_TO
:
7625 case GOMP_MAP_FORCE_FROM
:
7626 case GOMP_MAP_FORCE_TOFROM
:
7627 case GOMP_MAP_FORCE_PRESENT
:
7628 case GOMP_MAP_FORCE_DEVICEPTR
:
7629 case GOMP_MAP_DEVICE_RESIDENT
:
7631 gcc_assert (is_gimple_omp_oacc (stmt
));
7639 case OMP_CLAUSE_FROM
:
7641 var
= OMP_CLAUSE_DECL (c
);
7644 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
7645 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7646 && (OMP_CLAUSE_MAP_KIND (c
)
7647 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
7653 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
7655 tree var2
= DECL_VALUE_EXPR (var
);
7656 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
7657 var2
= TREE_OPERAND (var2
, 0);
7658 gcc_assert (DECL_P (var2
));
7663 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7664 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7665 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7667 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7669 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
7670 && varpool_node::get_create (var
)->offloadable
)
7673 tree type
= build_pointer_type (TREE_TYPE (var
));
7674 tree new_var
= lookup_decl (var
, ctx
);
7675 x
= create_tmp_var_raw (type
, get_name (new_var
));
7676 gimple_add_tmp_var (x
);
7677 x
= build_simple_mem_ref (x
);
7678 SET_DECL_VALUE_EXPR (new_var
, x
);
7679 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7684 if (!maybe_lookup_field (var
, ctx
))
7687 /* Don't remap oacc parallel reduction variables, because the
7688 intermediate result must be local to each gang. */
7689 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7690 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
7692 x
= build_receiver_ref (var
, true, ctx
);
7693 tree new_var
= lookup_decl (var
, ctx
);
7695 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7696 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7697 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7698 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7699 x
= build_simple_mem_ref (x
);
7700 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7702 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7703 if (omp_is_reference (new_var
))
7705 /* Create a local object to hold the instance
7707 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
7708 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
7709 tree inst
= create_tmp_var (type
, id
);
7710 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
7711 x
= build_fold_addr_expr (inst
);
7713 gimplify_assign (new_var
, x
, &fplist
);
7715 else if (DECL_P (new_var
))
7717 SET_DECL_VALUE_EXPR (new_var
, x
);
7718 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7726 case OMP_CLAUSE_FIRSTPRIVATE
:
7727 if (is_oacc_parallel (ctx
))
7728 goto oacc_firstprivate
;
7730 var
= OMP_CLAUSE_DECL (c
);
7731 if (!omp_is_reference (var
)
7732 && !is_gimple_reg_type (TREE_TYPE (var
)))
7734 tree new_var
= lookup_decl (var
, ctx
);
7735 if (is_variable_sized (var
))
7737 tree pvar
= DECL_VALUE_EXPR (var
);
7738 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7739 pvar
= TREE_OPERAND (pvar
, 0);
7740 gcc_assert (DECL_P (pvar
));
7741 tree new_pvar
= lookup_decl (pvar
, ctx
);
7742 x
= build_fold_indirect_ref (new_pvar
);
7743 TREE_THIS_NOTRAP (x
) = 1;
7746 x
= build_receiver_ref (var
, true, ctx
);
7747 SET_DECL_VALUE_EXPR (new_var
, x
);
7748 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7752 case OMP_CLAUSE_PRIVATE
:
7753 if (is_gimple_omp_oacc (ctx
->stmt
))
7755 var
= OMP_CLAUSE_DECL (c
);
7756 if (is_variable_sized (var
))
7758 tree new_var
= lookup_decl (var
, ctx
);
7759 tree pvar
= DECL_VALUE_EXPR (var
);
7760 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7761 pvar
= TREE_OPERAND (pvar
, 0);
7762 gcc_assert (DECL_P (pvar
));
7763 tree new_pvar
= lookup_decl (pvar
, ctx
);
7764 x
= build_fold_indirect_ref (new_pvar
);
7765 TREE_THIS_NOTRAP (x
) = 1;
7766 SET_DECL_VALUE_EXPR (new_var
, x
);
7767 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7771 case OMP_CLAUSE_USE_DEVICE_PTR
:
7772 case OMP_CLAUSE_IS_DEVICE_PTR
:
7773 var
= OMP_CLAUSE_DECL (c
);
7775 if (is_variable_sized (var
))
7777 tree new_var
= lookup_decl (var
, ctx
);
7778 tree pvar
= DECL_VALUE_EXPR (var
);
7779 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7780 pvar
= TREE_OPERAND (pvar
, 0);
7781 gcc_assert (DECL_P (pvar
));
7782 tree new_pvar
= lookup_decl (pvar
, ctx
);
7783 x
= build_fold_indirect_ref (new_pvar
);
7784 TREE_THIS_NOTRAP (x
) = 1;
7785 SET_DECL_VALUE_EXPR (new_var
, x
);
7786 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7788 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7790 tree new_var
= lookup_decl (var
, ctx
);
7791 tree type
= build_pointer_type (TREE_TYPE (var
));
7792 x
= create_tmp_var_raw (type
, get_name (new_var
));
7793 gimple_add_tmp_var (x
);
7794 x
= build_simple_mem_ref (x
);
7795 SET_DECL_VALUE_EXPR (new_var
, x
);
7796 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7800 tree new_var
= lookup_decl (var
, ctx
);
7801 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
7802 gimple_add_tmp_var (x
);
7803 SET_DECL_VALUE_EXPR (new_var
, x
);
7804 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7811 target_nesting_level
++;
7812 lower_omp (&tgt_body
, ctx
);
7813 target_nesting_level
--;
7815 else if (data_region
)
7816 lower_omp (&tgt_body
, ctx
);
7820 /* Declare all the variables created by mapping and the variables
7821 declared in the scope of the target body. */
7822 record_vars_into (ctx
->block_vars
, child_fn
);
7823 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
7824 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
7829 if (ctx
->record_type
)
7832 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
7833 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7834 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7835 t
= make_tree_vec (3);
7836 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
7838 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
7840 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
7841 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
7842 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
7843 tree tkind_type
= short_unsigned_type_node
;
7844 int talign_shift
= 8;
7846 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
7848 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
7849 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
7850 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
7851 gimple_omp_target_set_data_arg (stmt
, t
);
7853 vec
<constructor_elt
, va_gc
> *vsize
;
7854 vec
<constructor_elt
, va_gc
> *vkind
;
7855 vec_alloc (vsize
, map_cnt
);
7856 vec_alloc (vkind
, map_cnt
);
7857 unsigned int map_idx
= 0;
7859 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7860 switch (OMP_CLAUSE_CODE (c
))
7862 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
7863 unsigned int talign
;
7868 case OMP_CLAUSE_MAP
:
7870 case OMP_CLAUSE_FROM
:
7871 oacc_firstprivate_map
:
7873 ovar
= OMP_CLAUSE_DECL (c
);
7874 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7875 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7876 || (OMP_CLAUSE_MAP_KIND (c
)
7877 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
7881 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7882 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
7884 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
7885 == get_base_address (ovar
));
7886 nc
= OMP_CLAUSE_CHAIN (c
);
7887 ovar
= OMP_CLAUSE_DECL (nc
);
7891 tree x
= build_sender_ref (ovar
, ctx
);
7893 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
7894 gimplify_assign (x
, v
, &ilist
);
7900 if (DECL_SIZE (ovar
)
7901 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
7903 tree ovar2
= DECL_VALUE_EXPR (ovar
);
7904 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
7905 ovar2
= TREE_OPERAND (ovar2
, 0);
7906 gcc_assert (DECL_P (ovar2
));
7909 if (!maybe_lookup_field (ovar
, ctx
))
7913 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
7914 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
7915 talign
= DECL_ALIGN_UNIT (ovar
);
7918 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7919 x
= build_sender_ref (ovar
, ctx
);
7921 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7922 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7923 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7924 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
7926 gcc_assert (offloaded
);
7928 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
7929 mark_addressable (avar
);
7930 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
7931 talign
= DECL_ALIGN_UNIT (avar
);
7932 avar
= build_fold_addr_expr (avar
);
7933 gimplify_assign (x
, avar
, &ilist
);
7935 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7937 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7938 if (!omp_is_reference (var
))
7940 if (is_gimple_reg (var
)
7941 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7942 TREE_NO_WARNING (var
) = 1;
7943 var
= build_fold_addr_expr (var
);
7946 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7947 gimplify_assign (x
, var
, &ilist
);
7949 else if (is_gimple_reg (var
))
7951 gcc_assert (offloaded
);
7952 tree avar
= create_tmp_var (TREE_TYPE (var
));
7953 mark_addressable (avar
);
7954 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
7955 if (GOMP_MAP_COPY_TO_P (map_kind
)
7956 || map_kind
== GOMP_MAP_POINTER
7957 || map_kind
== GOMP_MAP_TO_PSET
7958 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7960 /* If we need to initialize a temporary
7961 with VAR because it is not addressable, and
7962 the variable hasn't been initialized yet, then
7963 we'll get a warning for the store to avar.
7964 Don't warn in that case, the mapping might
7966 TREE_NO_WARNING (var
) = 1;
7967 gimplify_assign (avar
, var
, &ilist
);
7969 avar
= build_fold_addr_expr (avar
);
7970 gimplify_assign (x
, avar
, &ilist
);
7971 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
7972 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7973 && !TYPE_READONLY (TREE_TYPE (var
)))
7975 x
= unshare_expr (x
);
7976 x
= build_simple_mem_ref (x
);
7977 gimplify_assign (var
, x
, &olist
);
7982 var
= build_fold_addr_expr (var
);
7983 gimplify_assign (x
, var
, &ilist
);
7987 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7989 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7990 s
= TREE_TYPE (ovar
);
7991 if (TREE_CODE (s
) == REFERENCE_TYPE
)
7993 s
= TYPE_SIZE_UNIT (s
);
7996 s
= OMP_CLAUSE_SIZE (c
);
7998 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
7999 s
= fold_convert (size_type_node
, s
);
8000 purpose
= size_int (map_idx
++);
8001 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8002 if (TREE_CODE (s
) != INTEGER_CST
)
8003 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8005 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
8006 switch (OMP_CLAUSE_CODE (c
))
8008 case OMP_CLAUSE_MAP
:
8009 tkind
= OMP_CLAUSE_MAP_KIND (c
);
8011 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
8014 case GOMP_MAP_ALLOC
:
8017 case GOMP_MAP_TOFROM
:
8018 case GOMP_MAP_ALWAYS_TO
:
8019 case GOMP_MAP_ALWAYS_FROM
:
8020 case GOMP_MAP_ALWAYS_TOFROM
:
8021 case GOMP_MAP_RELEASE
:
8022 case GOMP_MAP_FORCE_TO
:
8023 case GOMP_MAP_FORCE_FROM
:
8024 case GOMP_MAP_FORCE_TOFROM
:
8025 case GOMP_MAP_FORCE_PRESENT
:
8026 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
8028 case GOMP_MAP_DELETE
:
8029 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
8033 if (tkind_zero
!= tkind
)
8035 if (integer_zerop (s
))
8037 else if (integer_nonzerop (s
))
8041 case OMP_CLAUSE_FIRSTPRIVATE
:
8042 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
8043 tkind
= GOMP_MAP_TO
;
8047 tkind
= GOMP_MAP_TO
;
8050 case OMP_CLAUSE_FROM
:
8051 tkind
= GOMP_MAP_FROM
;
8057 gcc_checking_assert (tkind
8058 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8059 gcc_checking_assert (tkind_zero
8060 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8061 talign
= ceil_log2 (talign
);
8062 tkind
|= talign
<< talign_shift
;
8063 tkind_zero
|= talign
<< talign_shift
;
8064 gcc_checking_assert (tkind
8065 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8066 gcc_checking_assert (tkind_zero
8067 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8068 if (tkind
== tkind_zero
)
8069 x
= build_int_cstu (tkind_type
, tkind
);
8072 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
8073 x
= build3 (COND_EXPR
, tkind_type
,
8074 fold_build2 (EQ_EXPR
, boolean_type_node
,
8075 unshare_expr (s
), size_zero_node
),
8076 build_int_cstu (tkind_type
, tkind_zero
),
8077 build_int_cstu (tkind_type
, tkind
));
8079 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
8084 case OMP_CLAUSE_FIRSTPRIVATE
:
8085 if (is_oacc_parallel (ctx
))
8086 goto oacc_firstprivate_map
;
8087 ovar
= OMP_CLAUSE_DECL (c
);
8088 if (omp_is_reference (ovar
))
8089 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8091 talign
= DECL_ALIGN_UNIT (ovar
);
8092 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8093 x
= build_sender_ref (ovar
, ctx
);
8094 tkind
= GOMP_MAP_FIRSTPRIVATE
;
8095 type
= TREE_TYPE (ovar
);
8096 if (omp_is_reference (ovar
))
8097 type
= TREE_TYPE (type
);
8098 if ((INTEGRAL_TYPE_P (type
)
8099 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8100 || TREE_CODE (type
) == POINTER_TYPE
)
8102 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8104 if (omp_is_reference (var
))
8105 t
= build_simple_mem_ref (var
);
8106 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8107 TREE_NO_WARNING (var
) = 1;
8108 if (TREE_CODE (type
) != POINTER_TYPE
)
8109 t
= fold_convert (pointer_sized_int_node
, t
);
8110 t
= fold_convert (TREE_TYPE (x
), t
);
8111 gimplify_assign (x
, t
, &ilist
);
8113 else if (omp_is_reference (var
))
8114 gimplify_assign (x
, var
, &ilist
);
8115 else if (is_gimple_reg (var
))
8117 tree avar
= create_tmp_var (TREE_TYPE (var
));
8118 mark_addressable (avar
);
8119 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8120 TREE_NO_WARNING (var
) = 1;
8121 gimplify_assign (avar
, var
, &ilist
);
8122 avar
= build_fold_addr_expr (avar
);
8123 gimplify_assign (x
, avar
, &ilist
);
8127 var
= build_fold_addr_expr (var
);
8128 gimplify_assign (x
, var
, &ilist
);
8130 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
8132 else if (omp_is_reference (ovar
))
8133 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8135 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
8136 s
= fold_convert (size_type_node
, s
);
8137 purpose
= size_int (map_idx
++);
8138 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8139 if (TREE_CODE (s
) != INTEGER_CST
)
8140 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8142 gcc_checking_assert (tkind
8143 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8144 talign
= ceil_log2 (talign
);
8145 tkind
|= talign
<< talign_shift
;
8146 gcc_checking_assert (tkind
8147 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8148 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8149 build_int_cstu (tkind_type
, tkind
));
8152 case OMP_CLAUSE_USE_DEVICE_PTR
:
8153 case OMP_CLAUSE_IS_DEVICE_PTR
:
8154 ovar
= OMP_CLAUSE_DECL (c
);
8155 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8156 x
= build_sender_ref (ovar
, ctx
);
8157 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8158 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
8160 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8161 type
= TREE_TYPE (ovar
);
8162 if (TREE_CODE (type
) == ARRAY_TYPE
)
8163 var
= build_fold_addr_expr (var
);
8166 if (omp_is_reference (ovar
))
8168 type
= TREE_TYPE (type
);
8169 if (TREE_CODE (type
) != ARRAY_TYPE
)
8170 var
= build_simple_mem_ref (var
);
8171 var
= fold_convert (TREE_TYPE (x
), var
);
8174 gimplify_assign (x
, var
, &ilist
);
8176 purpose
= size_int (map_idx
++);
8177 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8178 gcc_checking_assert (tkind
8179 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8180 gcc_checking_assert (tkind
8181 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8182 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8183 build_int_cstu (tkind_type
, tkind
));
8187 gcc_assert (map_idx
== map_cnt
);
8189 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
8190 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
8191 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
8192 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
8193 for (int i
= 1; i
<= 2; i
++)
8194 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
8196 gimple_seq initlist
= NULL
;
8197 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
8198 TREE_VEC_ELT (t
, i
)),
8199 &initlist
, true, NULL_TREE
);
8200 gimple_seq_add_seq (&ilist
, initlist
);
8202 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
8204 TREE_THIS_VOLATILE (clobber
) = 1;
8205 gimple_seq_add_stmt (&olist
,
8206 gimple_build_assign (TREE_VEC_ELT (t
, i
),
8210 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
8211 TREE_THIS_VOLATILE (clobber
) = 1;
8212 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
8216 /* Once all the expansions are done, sequence all the different
8217 fragments inside gimple_omp_body. */
8222 && ctx
->record_type
)
8224 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8225 /* fixup_child_record_type might have changed receiver_decl's type. */
8226 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
8227 gimple_seq_add_stmt (&new_body
,
8228 gimple_build_assign (ctx
->receiver_decl
, t
));
8230 gimple_seq_add_seq (&new_body
, fplist
);
8232 if (offloaded
|| data_region
)
8234 tree prev
= NULL_TREE
;
8235 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8236 switch (OMP_CLAUSE_CODE (c
))
8241 case OMP_CLAUSE_FIRSTPRIVATE
:
8242 if (is_gimple_omp_oacc (ctx
->stmt
))
8244 var
= OMP_CLAUSE_DECL (c
);
8245 if (omp_is_reference (var
)
8246 || is_gimple_reg_type (TREE_TYPE (var
)))
8248 tree new_var
= lookup_decl (var
, ctx
);
8250 type
= TREE_TYPE (var
);
8251 if (omp_is_reference (var
))
8252 type
= TREE_TYPE (type
);
8253 if ((INTEGRAL_TYPE_P (type
)
8254 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8255 || TREE_CODE (type
) == POINTER_TYPE
)
8257 x
= build_receiver_ref (var
, false, ctx
);
8258 if (TREE_CODE (type
) != POINTER_TYPE
)
8259 x
= fold_convert (pointer_sized_int_node
, x
);
8260 x
= fold_convert (type
, x
);
8261 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8263 if (omp_is_reference (var
))
8265 tree v
= create_tmp_var_raw (type
, get_name (var
));
8266 gimple_add_tmp_var (v
);
8267 TREE_ADDRESSABLE (v
) = 1;
8268 gimple_seq_add_stmt (&new_body
,
8269 gimple_build_assign (v
, x
));
8270 x
= build_fold_addr_expr (v
);
8272 gimple_seq_add_stmt (&new_body
,
8273 gimple_build_assign (new_var
, x
));
8277 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
8278 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8280 gimple_seq_add_stmt (&new_body
,
8281 gimple_build_assign (new_var
, x
));
8284 else if (is_variable_sized (var
))
8286 tree pvar
= DECL_VALUE_EXPR (var
);
8287 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8288 pvar
= TREE_OPERAND (pvar
, 0);
8289 gcc_assert (DECL_P (pvar
));
8290 tree new_var
= lookup_decl (pvar
, ctx
);
8291 x
= build_receiver_ref (var
, false, ctx
);
8292 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8293 gimple_seq_add_stmt (&new_body
,
8294 gimple_build_assign (new_var
, x
));
8297 case OMP_CLAUSE_PRIVATE
:
8298 if (is_gimple_omp_oacc (ctx
->stmt
))
8300 var
= OMP_CLAUSE_DECL (c
);
8301 if (omp_is_reference (var
))
8303 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8304 tree new_var
= lookup_decl (var
, ctx
);
8305 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8306 if (TREE_CONSTANT (x
))
8308 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
8310 gimple_add_tmp_var (x
);
8311 TREE_ADDRESSABLE (x
) = 1;
8312 x
= build_fold_addr_expr_loc (clause_loc
, x
);
8317 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8318 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8319 gimple_seq_add_stmt (&new_body
,
8320 gimple_build_assign (new_var
, x
));
8323 case OMP_CLAUSE_USE_DEVICE_PTR
:
8324 case OMP_CLAUSE_IS_DEVICE_PTR
:
8325 var
= OMP_CLAUSE_DECL (c
);
8326 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8327 x
= build_sender_ref (var
, ctx
);
8329 x
= build_receiver_ref (var
, false, ctx
);
8330 if (is_variable_sized (var
))
8332 tree pvar
= DECL_VALUE_EXPR (var
);
8333 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8334 pvar
= TREE_OPERAND (pvar
, 0);
8335 gcc_assert (DECL_P (pvar
));
8336 tree new_var
= lookup_decl (pvar
, ctx
);
8337 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8338 gimple_seq_add_stmt (&new_body
,
8339 gimple_build_assign (new_var
, x
));
8341 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
8343 tree new_var
= lookup_decl (var
, ctx
);
8344 new_var
= DECL_VALUE_EXPR (new_var
);
8345 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
8346 new_var
= TREE_OPERAND (new_var
, 0);
8347 gcc_assert (DECL_P (new_var
));
8348 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8349 gimple_seq_add_stmt (&new_body
,
8350 gimple_build_assign (new_var
, x
));
8354 tree type
= TREE_TYPE (var
);
8355 tree new_var
= lookup_decl (var
, ctx
);
8356 if (omp_is_reference (var
))
8358 type
= TREE_TYPE (type
);
8359 if (TREE_CODE (type
) != ARRAY_TYPE
)
8361 tree v
= create_tmp_var_raw (type
, get_name (var
));
8362 gimple_add_tmp_var (v
);
8363 TREE_ADDRESSABLE (v
) = 1;
8364 x
= fold_convert (type
, x
);
8365 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8367 gimple_seq_add_stmt (&new_body
,
8368 gimple_build_assign (v
, x
));
8369 x
= build_fold_addr_expr (v
);
8372 new_var
= DECL_VALUE_EXPR (new_var
);
8373 x
= fold_convert (TREE_TYPE (new_var
), x
);
8374 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8375 gimple_seq_add_stmt (&new_body
,
8376 gimple_build_assign (new_var
, x
));
8380 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8381 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8382 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8383 or references to VLAs. */
8384 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8385 switch (OMP_CLAUSE_CODE (c
))
8390 case OMP_CLAUSE_MAP
:
8391 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8392 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8394 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8395 poly_int64 offset
= 0;
8397 var
= OMP_CLAUSE_DECL (c
);
8399 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
8400 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
8402 && varpool_node::get_create (var
)->offloadable
)
8404 if (TREE_CODE (var
) == INDIRECT_REF
8405 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
8406 var
= TREE_OPERAND (var
, 0);
8407 if (TREE_CODE (var
) == COMPONENT_REF
)
8409 var
= get_addr_base_and_unit_offset (var
, &offset
);
8410 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
8412 else if (DECL_SIZE (var
)
8413 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
8415 tree var2
= DECL_VALUE_EXPR (var
);
8416 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
8417 var2
= TREE_OPERAND (var2
, 0);
8418 gcc_assert (DECL_P (var2
));
8421 tree new_var
= lookup_decl (var
, ctx
), x
;
8422 tree type
= TREE_TYPE (new_var
);
8424 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
8425 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8428 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
8430 new_var
= build2 (MEM_REF
, type
,
8431 build_fold_addr_expr (new_var
),
8432 build_int_cst (build_pointer_type (type
),
8435 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
8437 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
8438 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
8439 new_var
= build2 (MEM_REF
, type
,
8440 build_fold_addr_expr (new_var
),
8441 build_int_cst (build_pointer_type (type
),
8445 is_ref
= omp_is_reference (var
);
8446 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8448 bool ref_to_array
= false;
8451 type
= TREE_TYPE (type
);
8452 if (TREE_CODE (type
) == ARRAY_TYPE
)
8454 type
= build_pointer_type (type
);
8455 ref_to_array
= true;
8458 else if (TREE_CODE (type
) == ARRAY_TYPE
)
8460 tree decl2
= DECL_VALUE_EXPR (new_var
);
8461 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
8462 decl2
= TREE_OPERAND (decl2
, 0);
8463 gcc_assert (DECL_P (decl2
));
8465 type
= TREE_TYPE (new_var
);
8467 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
8468 x
= fold_convert_loc (clause_loc
, type
, x
);
8469 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
8471 tree bias
= OMP_CLAUSE_SIZE (c
);
8473 bias
= lookup_decl (bias
, ctx
);
8474 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
8475 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
8477 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
8478 TREE_TYPE (x
), x
, bias
);
8481 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8482 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8483 if (is_ref
&& !ref_to_array
)
8485 tree t
= create_tmp_var_raw (type
, get_name (var
));
8486 gimple_add_tmp_var (t
);
8487 TREE_ADDRESSABLE (t
) = 1;
8488 gimple_seq_add_stmt (&new_body
,
8489 gimple_build_assign (t
, x
));
8490 x
= build_fold_addr_expr_loc (clause_loc
, t
);
8492 gimple_seq_add_stmt (&new_body
,
8493 gimple_build_assign (new_var
, x
));
8496 else if (OMP_CLAUSE_CHAIN (c
)
8497 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
8499 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8500 == GOMP_MAP_FIRSTPRIVATE_POINTER
8501 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8502 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
8505 case OMP_CLAUSE_PRIVATE
:
8506 var
= OMP_CLAUSE_DECL (c
);
8507 if (is_variable_sized (var
))
8509 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8510 tree new_var
= lookup_decl (var
, ctx
);
8511 tree pvar
= DECL_VALUE_EXPR (var
);
8512 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8513 pvar
= TREE_OPERAND (pvar
, 0);
8514 gcc_assert (DECL_P (pvar
));
8515 tree new_pvar
= lookup_decl (pvar
, ctx
);
8516 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8517 tree al
= size_int (DECL_ALIGN (var
));
8518 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
8519 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8520 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
8521 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8522 gimple_seq_add_stmt (&new_body
,
8523 gimple_build_assign (new_pvar
, x
));
8525 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
8527 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8528 tree new_var
= lookup_decl (var
, ctx
);
8529 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8530 if (TREE_CONSTANT (x
))
8535 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8536 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
8537 tree al
= size_int (TYPE_ALIGN (rtype
));
8538 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8541 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8542 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8543 gimple_seq_add_stmt (&new_body
,
8544 gimple_build_assign (new_var
, x
));
8549 gimple_seq fork_seq
= NULL
;
8550 gimple_seq join_seq
= NULL
;
8552 if (is_oacc_parallel (ctx
))
8554 /* If there are reductions on the offloaded region itself, treat
8555 them as a dummy GANG loop. */
8556 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
8558 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
8559 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
8562 gimple_seq_add_seq (&new_body
, fork_seq
);
8563 gimple_seq_add_seq (&new_body
, tgt_body
);
8564 gimple_seq_add_seq (&new_body
, join_seq
);
8567 new_body
= maybe_catch_exception (new_body
);
8569 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
8570 gimple_omp_set_body (stmt
, new_body
);
8573 bind
= gimple_build_bind (NULL
, NULL
,
8574 tgt_bind
? gimple_bind_block (tgt_bind
)
8576 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
8577 gimple_bind_add_seq (bind
, ilist
);
8578 gimple_bind_add_stmt (bind
, stmt
);
8579 gimple_bind_add_seq (bind
, olist
);
8581 pop_gimplify_context (NULL
);
8585 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8586 gimple_bind_add_stmt (dep_bind
, bind
);
8587 gimple_bind_add_seq (dep_bind
, dep_olist
);
8588 pop_gimplify_context (dep_bind
);
8592 /* Expand code for an OpenMP teams directive. */
8595 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8597 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
8598 push_gimplify_context ();
8600 tree block
= make_node (BLOCK
);
8601 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
8602 gsi_replace (gsi_p
, bind
, true);
8603 gimple_seq bind_body
= NULL
;
8604 gimple_seq dlist
= NULL
;
8605 gimple_seq olist
= NULL
;
8607 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8608 OMP_CLAUSE_NUM_TEAMS
);
8609 if (num_teams
== NULL_TREE
)
8610 num_teams
= build_int_cst (unsigned_type_node
, 0);
8613 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
8614 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
8615 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
8617 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8618 OMP_CLAUSE_THREAD_LIMIT
);
8619 if (thread_limit
== NULL_TREE
)
8620 thread_limit
= build_int_cst (unsigned_type_node
, 0);
8623 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
8624 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
8625 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
8629 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
8630 &bind_body
, &dlist
, ctx
, NULL
);
8631 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
8632 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
8633 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8635 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
8636 location_t loc
= gimple_location (teams_stmt
);
8637 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
8638 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
8639 gimple_set_location (call
, loc
);
8640 gimple_seq_add_stmt (&bind_body
, call
);
8643 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
8644 gimple_omp_set_body (teams_stmt
, NULL
);
8645 gimple_seq_add_seq (&bind_body
, olist
);
8646 gimple_seq_add_seq (&bind_body
, dlist
);
8647 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8648 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
8649 gimple_bind_set_body (bind
, bind_body
);
8651 pop_gimplify_context (bind
);
8653 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8654 BLOCK_VARS (block
) = ctx
->block_vars
;
8655 if (BLOCK_VARS (block
))
8656 TREE_USED (block
) = 1;
8659 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8662 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8664 gimple
*stmt
= gsi_stmt (*gsi_p
);
8665 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8666 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
8667 gimple_build_omp_return (false));
8671 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8672 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8673 of OMP context, but with task_shared_vars set. */
8676 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
8681 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8682 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
8685 if (task_shared_vars
8687 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
8690 /* If a global variable has been privatized, TREE_CONSTANT on
8691 ADDR_EXPR might be wrong. */
8692 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
8693 recompute_tree_invariant_for_addr_expr (t
);
8695 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
8699 /* Data to be communicated between lower_omp_regimplify_operands and
8700 lower_omp_regimplify_operands_p. */
8702 struct lower_omp_regimplify_operands_data
8708 /* Helper function for lower_omp_regimplify_operands. Find
8709 omp_member_access_dummy_var vars and adjust temporarily their
8710 DECL_VALUE_EXPRs if needed. */
8713 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
8716 tree t
= omp_member_access_dummy_var (*tp
);
8719 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8720 lower_omp_regimplify_operands_data
*ldata
8721 = (lower_omp_regimplify_operands_data
*) wi
->info
;
8722 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
8725 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
8726 ldata
->decls
->safe_push (*tp
);
8727 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
8728 SET_DECL_VALUE_EXPR (*tp
, v
);
8731 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
8735 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8736 of omp_member_access_dummy_var vars during regimplification. */
8739 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
8740 gimple_stmt_iterator
*gsi_p
)
8742 auto_vec
<tree
, 10> decls
;
8745 struct walk_stmt_info wi
;
8746 memset (&wi
, '\0', sizeof (wi
));
8747 struct lower_omp_regimplify_operands_data data
;
8749 data
.decls
= &decls
;
8751 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
8753 gimple_regimplify_operands (stmt
, gsi_p
);
8754 while (!decls
.is_empty ())
8756 tree t
= decls
.pop ();
8757 tree v
= decls
.pop ();
8758 SET_DECL_VALUE_EXPR (t
, v
);
8763 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8765 gimple
*stmt
= gsi_stmt (*gsi_p
);
8766 struct walk_stmt_info wi
;
8769 if (gimple_has_location (stmt
))
8770 input_location
= gimple_location (stmt
);
8772 if (task_shared_vars
)
8773 memset (&wi
, '\0', sizeof (wi
));
8775 /* If we have issued syntax errors, avoid doing any heavy lifting.
8776 Just replace the OMP directives with a NOP to avoid
8777 confusing RTL expansion. */
8778 if (seen_error () && is_gimple_omp (stmt
))
8780 gsi_replace (gsi_p
, gimple_build_nop (), true);
8784 switch (gimple_code (stmt
))
8788 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
8789 if ((ctx
|| task_shared_vars
)
8790 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
8791 lower_omp_regimplify_p
,
8792 ctx
? NULL
: &wi
, NULL
)
8793 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
8794 lower_omp_regimplify_p
,
8795 ctx
? NULL
: &wi
, NULL
)))
8796 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
8800 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
8802 case GIMPLE_EH_FILTER
:
8803 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
8806 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
8807 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
8809 case GIMPLE_TRANSACTION
:
8810 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
8814 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
8815 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
8817 case GIMPLE_OMP_PARALLEL
:
8818 case GIMPLE_OMP_TASK
:
8819 ctx
= maybe_lookup_ctx (stmt
);
8821 if (ctx
->cancellable
)
8822 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8823 lower_omp_taskreg (gsi_p
, ctx
);
8825 case GIMPLE_OMP_FOR
:
8826 ctx
= maybe_lookup_ctx (stmt
);
8828 if (ctx
->cancellable
)
8829 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8830 lower_omp_for (gsi_p
, ctx
);
8832 case GIMPLE_OMP_SECTIONS
:
8833 ctx
= maybe_lookup_ctx (stmt
);
8835 if (ctx
->cancellable
)
8836 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8837 lower_omp_sections (gsi_p
, ctx
);
8839 case GIMPLE_OMP_SINGLE
:
8840 ctx
= maybe_lookup_ctx (stmt
);
8842 lower_omp_single (gsi_p
, ctx
);
8844 case GIMPLE_OMP_MASTER
:
8845 ctx
= maybe_lookup_ctx (stmt
);
8847 lower_omp_master (gsi_p
, ctx
);
8849 case GIMPLE_OMP_TASKGROUP
:
8850 ctx
= maybe_lookup_ctx (stmt
);
8852 lower_omp_taskgroup (gsi_p
, ctx
);
8854 case GIMPLE_OMP_ORDERED
:
8855 ctx
= maybe_lookup_ctx (stmt
);
8857 lower_omp_ordered (gsi_p
, ctx
);
8859 case GIMPLE_OMP_CRITICAL
:
8860 ctx
= maybe_lookup_ctx (stmt
);
8862 lower_omp_critical (gsi_p
, ctx
);
8864 case GIMPLE_OMP_ATOMIC_LOAD
:
8865 if ((ctx
|| task_shared_vars
)
8866 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8867 as_a
<gomp_atomic_load
*> (stmt
)),
8868 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
8869 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8871 case GIMPLE_OMP_TARGET
:
8872 ctx
= maybe_lookup_ctx (stmt
);
8874 lower_omp_target (gsi_p
, ctx
);
8876 case GIMPLE_OMP_TEAMS
:
8877 ctx
= maybe_lookup_ctx (stmt
);
8879 lower_omp_teams (gsi_p
, ctx
);
8881 case GIMPLE_OMP_GRID_BODY
:
8882 ctx
= maybe_lookup_ctx (stmt
);
8884 lower_omp_grid_body (gsi_p
, ctx
);
8888 call_stmt
= as_a
<gcall
*> (stmt
);
8889 fndecl
= gimple_call_fndecl (call_stmt
);
8891 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8892 switch (DECL_FUNCTION_CODE (fndecl
))
8894 case BUILT_IN_GOMP_BARRIER
:
8898 case BUILT_IN_GOMP_CANCEL
:
8899 case BUILT_IN_GOMP_CANCELLATION_POINT
:
8902 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
8904 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
8905 if (!cctx
->cancellable
)
8907 if (DECL_FUNCTION_CODE (fndecl
)
8908 == BUILT_IN_GOMP_CANCELLATION_POINT
)
8910 stmt
= gimple_build_nop ();
8911 gsi_replace (gsi_p
, stmt
, false);
8915 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
8917 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
8918 gimple_call_set_fndecl (call_stmt
, fndecl
);
8919 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
8922 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
8923 gimple_call_set_lhs (call_stmt
, lhs
);
8924 tree fallthru_label
;
8925 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8927 g
= gimple_build_label (fallthru_label
);
8928 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8929 g
= gimple_build_cond (NE_EXPR
, lhs
,
8930 fold_convert (TREE_TYPE (lhs
),
8931 boolean_false_node
),
8932 cctx
->cancel_label
, fallthru_label
);
8933 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8940 if ((ctx
|| task_shared_vars
)
8941 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
8944 /* Just remove clobbers, this should happen only if we have
8945 "privatized" local addressable variables in SIMD regions,
8946 the clobber isn't needed in that case and gimplifying address
8947 of the ARRAY_REF into a pointer and creating MEM_REF based
8948 clobber would create worse code than we get with the clobber
8950 if (gimple_clobber_p (stmt
))
8952 gsi_replace (gsi_p
, gimple_build_nop (), true);
8955 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8962 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
8964 location_t saved_location
= input_location
;
8965 gimple_stmt_iterator gsi
;
8966 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8967 lower_omp_1 (&gsi
, ctx
);
8968 /* During gimplification, we haven't folded statments inside offloading
8969 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8970 if (target_nesting_level
|| taskreg_nesting_level
)
8971 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8973 input_location
= saved_location
;
8976 /* Main entry point. */
8979 execute_lower_omp (void)
8985 /* This pass always runs, to provide PROP_gimple_lomp.
8986 But often, there is nothing to do. */
8987 if (flag_openacc
== 0 && flag_openmp
== 0
8988 && flag_openmp_simd
== 0)
8991 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
8992 delete_omp_context
);
8994 body
= gimple_body (current_function_decl
);
8996 if (hsa_gen_requested_p ())
8997 omp_grid_gridify_all_targets (&body
);
8999 scan_omp (&body
, NULL
);
9000 gcc_assert (taskreg_nesting_level
== 0);
9001 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
9002 finish_taskreg_scan (ctx
);
9003 taskreg_contexts
.release ();
9005 if (all_contexts
->root
)
9007 if (task_shared_vars
)
9008 push_gimplify_context ();
9009 lower_omp (&body
, NULL
);
9010 if (task_shared_vars
)
9011 pop_gimplify_context (NULL
);
9016 splay_tree_delete (all_contexts
);
9017 all_contexts
= NULL
;
9019 BITMAP_FREE (task_shared_vars
);
9021 /* If current function is a method, remove artificial dummy VAR_DECL created
9022 for non-static data member privatization, they aren't needed for
9023 debuginfo nor anything else, have been already replaced everywhere in the
9024 IL and cause problems with LTO. */
9025 if (DECL_ARGUMENTS (current_function_decl
)
9026 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
9027 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
9029 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
9035 const pass_data pass_data_lower_omp
=
9037 GIMPLE_PASS
, /* type */
9038 "omplower", /* name */
9039 OPTGROUP_OMP
, /* optinfo_flags */
9040 TV_NONE
, /* tv_id */
9041 PROP_gimple_any
, /* properties_required */
9042 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
9043 0, /* properties_destroyed */
9044 0, /* todo_flags_start */
9045 0, /* todo_flags_finish */
9048 class pass_lower_omp
: public gimple_opt_pass
9051 pass_lower_omp (gcc::context
*ctxt
)
9052 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
9055 /* opt_pass methods: */
9056 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
9058 }; // class pass_lower_omp
9063 make_pass_lower_omp (gcc::context
*ctxt
)
9065 return new pass_lower_omp (ctxt
);
9068 /* The following is a utility to diagnose structured block violations.
9069 It is not part of the "omplower" pass, as that's invoked too late. It
9070 should be invoked by the respective front ends after gimplification. */
9072 static splay_tree all_labels
;
9074 /* Check for mismatched contexts and generate an error if needed. Return
9075 true if an error is detected. */
9078 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
9079 gimple
*branch_ctx
, gimple
*label_ctx
)
9081 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
9082 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
9084 if (label_ctx
== branch_ctx
)
9087 const char* kind
= NULL
;
9091 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
9092 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
9094 gcc_checking_assert (kind
== NULL
);
9100 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
9104 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9105 so we could traverse it and issue a correct "exit" or "enter" error
9106 message upon a structured block violation.
9108 We built the context by building a list with tree_cons'ing, but there is
9109 no easy counterpart in gimple tuples. It seems like far too much work
9110 for issuing exit/enter error messages. If someone really misses the
9111 distinct error message... patches welcome. */
9114 /* Try to avoid confusing the user by producing and error message
9115 with correct "exit" or "enter" verbiage. We prefer "exit"
9116 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9117 if (branch_ctx
== NULL
)
9123 if (TREE_VALUE (label_ctx
) == branch_ctx
)
9128 label_ctx
= TREE_CHAIN (label_ctx
);
9133 error ("invalid exit from %s structured block", kind
);
9135 error ("invalid entry to %s structured block", kind
);
9138 /* If it's obvious we have an invalid entry, be specific about the error. */
9139 if (branch_ctx
== NULL
)
9140 error ("invalid entry to %s structured block", kind
);
9143 /* Otherwise, be vague and lazy, but efficient. */
9144 error ("invalid branch to/from %s structured block", kind
);
9147 gsi_replace (gsi_p
, gimple_build_nop (), false);
9151 /* Pass 1: Create a minimal tree of structured blocks, and record
9152 where each label is found. */
9155 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9156 struct walk_stmt_info
*wi
)
9158 gimple
*context
= (gimple
*) wi
->info
;
9159 gimple
*inner_context
;
9160 gimple
*stmt
= gsi_stmt (*gsi_p
);
9162 *handled_ops_p
= true;
9164 switch (gimple_code (stmt
))
9168 case GIMPLE_OMP_PARALLEL
:
9169 case GIMPLE_OMP_TASK
:
9170 case GIMPLE_OMP_SECTIONS
:
9171 case GIMPLE_OMP_SINGLE
:
9172 case GIMPLE_OMP_SECTION
:
9173 case GIMPLE_OMP_MASTER
:
9174 case GIMPLE_OMP_ORDERED
:
9175 case GIMPLE_OMP_CRITICAL
:
9176 case GIMPLE_OMP_TARGET
:
9177 case GIMPLE_OMP_TEAMS
:
9178 case GIMPLE_OMP_TASKGROUP
:
9179 /* The minimal context here is just the current OMP construct. */
9180 inner_context
= stmt
;
9181 wi
->info
= inner_context
;
9182 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9186 case GIMPLE_OMP_FOR
:
9187 inner_context
= stmt
;
9188 wi
->info
= inner_context
;
9189 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9191 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9192 diagnose_sb_1
, NULL
, wi
);
9193 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9198 splay_tree_insert (all_labels
,
9199 (splay_tree_key
) gimple_label_label (
9200 as_a
<glabel
*> (stmt
)),
9201 (splay_tree_value
) context
);
9211 /* Pass 2: Check each branch and see if its context differs from that of
9212 the destination label's context. */
9215 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9216 struct walk_stmt_info
*wi
)
9218 gimple
*context
= (gimple
*) wi
->info
;
9220 gimple
*stmt
= gsi_stmt (*gsi_p
);
9222 *handled_ops_p
= true;
9224 switch (gimple_code (stmt
))
9228 case GIMPLE_OMP_PARALLEL
:
9229 case GIMPLE_OMP_TASK
:
9230 case GIMPLE_OMP_SECTIONS
:
9231 case GIMPLE_OMP_SINGLE
:
9232 case GIMPLE_OMP_SECTION
:
9233 case GIMPLE_OMP_MASTER
:
9234 case GIMPLE_OMP_ORDERED
:
9235 case GIMPLE_OMP_CRITICAL
:
9236 case GIMPLE_OMP_TARGET
:
9237 case GIMPLE_OMP_TEAMS
:
9238 case GIMPLE_OMP_TASKGROUP
:
9240 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9244 case GIMPLE_OMP_FOR
:
9246 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9248 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
9249 diagnose_sb_2
, NULL
, wi
);
9250 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9256 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
9257 tree lab
= gimple_cond_true_label (cond_stmt
);
9260 n
= splay_tree_lookup (all_labels
,
9261 (splay_tree_key
) lab
);
9262 diagnose_sb_0 (gsi_p
, context
,
9263 n
? (gimple
*) n
->value
: NULL
);
9265 lab
= gimple_cond_false_label (cond_stmt
);
9268 n
= splay_tree_lookup (all_labels
,
9269 (splay_tree_key
) lab
);
9270 diagnose_sb_0 (gsi_p
, context
,
9271 n
? (gimple
*) n
->value
: NULL
);
9278 tree lab
= gimple_goto_dest (stmt
);
9279 if (TREE_CODE (lab
) != LABEL_DECL
)
9282 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9283 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
9289 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
9291 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
9293 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
9294 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9295 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
9302 diagnose_sb_0 (gsi_p
, context
, NULL
);
9313 diagnose_omp_structured_block_errors (void)
9315 struct walk_stmt_info wi
;
9316 gimple_seq body
= gimple_body (current_function_decl
);
9318 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
9320 memset (&wi
, 0, sizeof (wi
));
9321 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
9323 memset (&wi
, 0, sizeof (wi
));
9324 wi
.want_locations
= true;
9325 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
9327 gimple_set_body (current_function_decl
, body
);
9329 splay_tree_delete (all_labels
);
9337 const pass_data pass_data_diagnose_omp_blocks
=
9339 GIMPLE_PASS
, /* type */
9340 "*diagnose_omp_blocks", /* name */
9341 OPTGROUP_OMP
, /* optinfo_flags */
9342 TV_NONE
, /* tv_id */
9343 PROP_gimple_any
, /* properties_required */
9344 0, /* properties_provided */
9345 0, /* properties_destroyed */
9346 0, /* todo_flags_start */
9347 0, /* todo_flags_finish */
9350 class pass_diagnose_omp_blocks
: public gimple_opt_pass
9353 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9354 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
9357 /* opt_pass methods: */
9358 virtual bool gate (function
*)
9360 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
9362 virtual unsigned int execute (function
*)
9364 return diagnose_omp_structured_block_errors ();
9367 }; // class pass_diagnose_omp_blocks
9372 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9374 return new pass_diagnose_omp_blocks (ctxt
);
9378 #include "gt-omp-low.h"