1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2016 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context
*outer
;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map
;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
111 /* What to do with variables with implicitly determined sharing
113 enum omp_clause_default_kind default_kind
;
115 /* Nesting depth of this context. Used to beautify error messages re
116 invalid gotos. The outermost ctx is depth 1, with depth 0 being
117 reserved for the main body of the function. */
120 /* True if this parallel directive is nested within another. */
123 /* True if this construct can be cancelled. */
127 static splay_tree all_contexts
;
128 static int taskreg_nesting_level
;
129 static int target_nesting_level
;
130 static bitmap task_shared_vars
;
131 static vec
<omp_context
*> taskreg_contexts
;
133 static void scan_omp (gimple_seq
*, omp_context
*);
134 static tree
scan_omp_1_op (tree
*, int *, void *);
136 #define WALK_SUBSTMTS \
140 case GIMPLE_EH_FILTER: \
141 case GIMPLE_TRANSACTION: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
146 /* Return true if CTX corresponds to an oacc parallel region. */
149 is_oacc_parallel (omp_context
*ctx
)
151 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
152 return ((outer_type
== GIMPLE_OMP_TARGET
)
153 && (gimple_omp_target_kind (ctx
->stmt
)
154 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
157 /* Return true if CTX corresponds to an oacc kernels region. */
160 is_oacc_kernels (omp_context
*ctx
)
162 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
163 return ((outer_type
== GIMPLE_OMP_TARGET
)
164 && (gimple_omp_target_kind (ctx
->stmt
)
165 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
168 /* If DECL is the artificial dummy VAR_DECL created for non-static
169 data member privatization, return the underlying "this" parameter,
170 otherwise return NULL. */
173 omp_member_access_dummy_var (tree decl
)
176 || !DECL_ARTIFICIAL (decl
)
177 || !DECL_IGNORED_P (decl
)
178 || !DECL_HAS_VALUE_EXPR_P (decl
)
179 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
182 tree v
= DECL_VALUE_EXPR (decl
);
183 if (TREE_CODE (v
) != COMPONENT_REF
)
187 switch (TREE_CODE (v
))
193 case POINTER_PLUS_EXPR
:
194 v
= TREE_OPERAND (v
, 0);
197 if (DECL_CONTEXT (v
) == current_function_decl
198 && DECL_ARTIFICIAL (v
)
199 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
207 /* Helper for unshare_and_remap, called through walk_tree. */
210 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
212 tree
*pair
= (tree
*) data
;
215 *tp
= unshare_expr (pair
[1]);
218 else if (IS_TYPE_OR_DECL_P (*tp
))
223 /* Return unshare_expr (X) with all occurrences of FROM
227 unshare_and_remap (tree x
, tree from
, tree to
)
229 tree pair
[2] = { from
, to
};
230 x
= unshare_expr (x
);
231 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
235 /* Convenience function for calling scan_omp_1_op on tree operands. */
238 scan_omp_op (tree
*tp
, omp_context
*ctx
)
240 struct walk_stmt_info wi
;
242 memset (&wi
, 0, sizeof (wi
));
244 wi
.want_locations
= true;
246 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
249 static void lower_omp (gimple_seq
*, omp_context
*);
250 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
251 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
253 /* Return true if CTX is for an omp parallel. */
256 is_parallel_ctx (omp_context
*ctx
)
258 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
262 /* Return true if CTX is for an omp task. */
265 is_task_ctx (omp_context
*ctx
)
267 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
271 /* Return true if CTX is for an omp taskloop. */
274 is_taskloop_ctx (omp_context
*ctx
)
276 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
277 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
281 /* Return true if CTX is for an omp parallel or omp task. */
284 is_taskreg_ctx (omp_context
*ctx
)
286 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
);
289 /* Return true if EXPR is variable sized. */
292 is_variable_sized (const_tree expr
)
294 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
297 /* Lookup variables. The "maybe" form
298 allows for the variable form to not have been entered, otherwise we
299 assert that the variable must have been entered. */
302 lookup_decl (tree var
, omp_context
*ctx
)
304 tree
*n
= ctx
->cb
.decl_map
->get (var
);
309 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
311 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
312 return n
? *n
: NULL_TREE
;
316 lookup_field (tree var
, omp_context
*ctx
)
319 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
320 return (tree
) n
->value
;
324 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
327 n
= splay_tree_lookup (ctx
->sfield_map
328 ? ctx
->sfield_map
: ctx
->field_map
, key
);
329 return (tree
) n
->value
;
333 lookup_sfield (tree var
, omp_context
*ctx
)
335 return lookup_sfield ((splay_tree_key
) var
, ctx
);
339 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
342 n
= splay_tree_lookup (ctx
->field_map
, key
);
343 return n
? (tree
) n
->value
: NULL_TREE
;
347 maybe_lookup_field (tree var
, omp_context
*ctx
)
349 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
352 /* Return true if DECL should be copied by pointer. SHARED_CTX is
353 the parallel context if DECL is to be shared. */
356 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
358 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
359 || TYPE_ATOMIC (TREE_TYPE (decl
)))
362 /* We can only use copy-in/copy-out semantics for shared variables
363 when we know the value is not accessible from an outer scope. */
366 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
368 /* ??? Trivially accessible from anywhere. But why would we even
369 be passing an address in this case? Should we simply assert
370 this to be false, or should we have a cleanup pass that removes
371 these from the list of mappings? */
372 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
375 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
376 without analyzing the expression whether or not its location
377 is accessible to anyone else. In the case of nested parallel
378 regions it certainly may be. */
379 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
382 /* Do not use copy-in/copy-out for variables that have their
384 if (TREE_ADDRESSABLE (decl
))
387 /* lower_send_shared_vars only uses copy-in, but not copy-out
389 if (TREE_READONLY (decl
)
390 || ((TREE_CODE (decl
) == RESULT_DECL
391 || TREE_CODE (decl
) == PARM_DECL
)
392 && DECL_BY_REFERENCE (decl
)))
395 /* Disallow copy-in/out in nested parallel if
396 decl is shared in outer parallel, otherwise
397 each thread could store the shared variable
398 in its own copy-in location, making the
399 variable no longer really shared. */
400 if (shared_ctx
->is_nested
)
404 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
405 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
412 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
413 c
; c
= OMP_CLAUSE_CHAIN (c
))
414 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
415 && OMP_CLAUSE_DECL (c
) == decl
)
419 goto maybe_mark_addressable_and_ret
;
423 /* For tasks avoid using copy-in/out. As tasks can be
424 deferred or executed in different thread, when GOMP_task
425 returns, the task hasn't necessarily terminated. */
426 if (is_task_ctx (shared_ctx
))
429 maybe_mark_addressable_and_ret
:
430 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
431 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
433 /* Taking address of OUTER in lower_send_shared_vars
434 might need regimplification of everything that uses the
436 if (!task_shared_vars
)
437 task_shared_vars
= BITMAP_ALLOC (NULL
);
438 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
439 TREE_ADDRESSABLE (outer
) = 1;
448 /* Construct a new automatic decl similar to VAR. */
451 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
453 tree copy
= copy_var_decl (var
, name
, type
);
455 DECL_CONTEXT (copy
) = current_function_decl
;
456 DECL_CHAIN (copy
) = ctx
->block_vars
;
457 /* If VAR is listed in task_shared_vars, it means it wasn't
458 originally addressable and is just because task needs to take
459 it's address. But we don't need to take address of privatizations
461 if (TREE_ADDRESSABLE (var
)
463 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
464 TREE_ADDRESSABLE (copy
) = 0;
465 ctx
->block_vars
= copy
;
471 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
473 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
476 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 omp_build_component_ref (tree obj
, tree field
)
481 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
482 if (TREE_THIS_VOLATILE (field
))
483 TREE_THIS_VOLATILE (ret
) |= 1;
484 if (TREE_READONLY (field
))
485 TREE_READONLY (ret
) |= 1;
489 /* Build tree nodes to access the field for VAR on the receiver side. */
492 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
494 tree x
, field
= lookup_field (var
, ctx
);
496 /* If the receiver record type was remapped in the child function,
497 remap the field into the new record type. */
498 x
= maybe_lookup_field (field
, ctx
);
502 x
= build_simple_mem_ref (ctx
->receiver_decl
);
503 TREE_THIS_NOTRAP (x
) = 1;
504 x
= omp_build_component_ref (x
, field
);
507 x
= build_simple_mem_ref (x
);
508 TREE_THIS_NOTRAP (x
) = 1;
514 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
515 of a parallel, this is a component reference; for workshare constructs
516 this is some variable. */
519 build_outer_var_ref (tree var
, omp_context
*ctx
,
520 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
524 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
526 else if (is_variable_sized (var
))
528 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
529 x
= build_outer_var_ref (x
, ctx
, code
);
530 x
= build_simple_mem_ref (x
);
532 else if (is_taskreg_ctx (ctx
))
534 bool by_ref
= use_pointer_for_field (var
, NULL
);
535 x
= build_receiver_ref (var
, by_ref
, ctx
);
537 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
538 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
539 || (code
== OMP_CLAUSE_PRIVATE
540 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
541 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
542 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
544 /* #pragma omp simd isn't a worksharing construct, and can reference
545 even private vars in its linear etc. clauses.
546 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
547 to private vars in all worksharing constructs. */
549 if (ctx
->outer
&& is_taskreg_ctx (ctx
))
550 x
= lookup_decl (var
, ctx
->outer
);
552 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
556 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
558 gcc_assert (ctx
->outer
);
560 = splay_tree_lookup (ctx
->outer
->field_map
,
561 (splay_tree_key
) &DECL_UID (var
));
564 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
->outer
)))
567 x
= lookup_decl (var
, ctx
->outer
);
571 tree field
= (tree
) n
->value
;
572 /* If the receiver record type was remapped in the child function,
573 remap the field into the new record type. */
574 x
= maybe_lookup_field (field
, ctx
->outer
);
578 x
= build_simple_mem_ref (ctx
->outer
->receiver_decl
);
579 x
= omp_build_component_ref (x
, field
);
580 if (use_pointer_for_field (var
, ctx
->outer
))
581 x
= build_simple_mem_ref (x
);
586 omp_context
*outer
= ctx
->outer
;
587 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
589 outer
= outer
->outer
;
591 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
593 x
= lookup_decl (var
, outer
);
595 else if (omp_is_reference (var
))
596 /* This can happen with orphaned constructs. If var is reference, it is
597 possible it is shared and as such valid. */
599 else if (omp_member_access_dummy_var (var
))
606 tree t
= omp_member_access_dummy_var (var
);
609 x
= DECL_VALUE_EXPR (var
);
610 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
612 x
= unshare_and_remap (x
, t
, o
);
614 x
= unshare_expr (x
);
618 if (omp_is_reference (var
))
619 x
= build_simple_mem_ref (x
);
624 /* Build tree nodes to access the field for VAR on the sender side. */
627 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
629 tree field
= lookup_sfield (key
, ctx
);
630 return omp_build_component_ref (ctx
->sender_decl
, field
);
634 build_sender_ref (tree var
, omp_context
*ctx
)
636 return build_sender_ref ((splay_tree_key
) var
, ctx
);
639 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
640 BASE_POINTERS_RESTRICT, declare the field with restrict. */
643 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
,
644 bool base_pointers_restrict
= false)
646 tree field
, type
, sfield
= NULL_TREE
;
647 splay_tree_key key
= (splay_tree_key
) var
;
651 key
= (splay_tree_key
) &DECL_UID (var
);
652 gcc_checking_assert (key
!= (splay_tree_key
) var
);
654 gcc_assert ((mask
& 1) == 0
655 || !splay_tree_lookup (ctx
->field_map
, key
));
656 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
657 || !splay_tree_lookup (ctx
->sfield_map
, key
));
658 gcc_assert ((mask
& 3) == 3
659 || !is_gimple_omp_oacc (ctx
->stmt
));
661 type
= TREE_TYPE (var
);
662 /* Prevent redeclaring the var in the split-off function with a restrict
663 pointer type. Note that we only clear type itself, restrict qualifiers in
664 the pointed-to type will be ignored by points-to analysis. */
665 if (POINTER_TYPE_P (type
)
666 && TYPE_RESTRICT (type
))
667 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
671 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
672 type
= build_pointer_type (build_pointer_type (type
));
676 type
= build_pointer_type (type
);
677 if (base_pointers_restrict
)
678 type
= build_qualified_type (type
, TYPE_QUAL_RESTRICT
);
680 else if ((mask
& 3) == 1 && omp_is_reference (var
))
681 type
= TREE_TYPE (type
);
683 field
= build_decl (DECL_SOURCE_LOCATION (var
),
684 FIELD_DECL
, DECL_NAME (var
), type
);
686 /* Remember what variable this field was created for. This does have a
687 side effect of making dwarf2out ignore this member, so for helpful
688 debugging we clear it later in delete_omp_context. */
689 DECL_ABSTRACT_ORIGIN (field
) = var
;
690 if (type
== TREE_TYPE (var
))
692 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
693 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
694 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
697 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
701 insert_field_into_struct (ctx
->record_type
, field
);
702 if (ctx
->srecord_type
)
704 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
705 FIELD_DECL
, DECL_NAME (var
), type
);
706 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
707 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
708 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
709 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
710 insert_field_into_struct (ctx
->srecord_type
, sfield
);
715 if (ctx
->srecord_type
== NULL_TREE
)
719 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
720 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
721 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
723 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
724 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
725 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
726 insert_field_into_struct (ctx
->srecord_type
, sfield
);
727 splay_tree_insert (ctx
->sfield_map
,
728 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
729 (splay_tree_value
) sfield
);
733 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
734 : ctx
->srecord_type
, field
);
738 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
739 if ((mask
& 2) && ctx
->sfield_map
)
740 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
744 install_var_local (tree var
, omp_context
*ctx
)
746 tree new_var
= omp_copy_decl_1 (var
, ctx
);
747 insert_decl_map (&ctx
->cb
, var
, new_var
);
751 /* Adjust the replacement for DECL in CTX for the new context. This means
752 copying the DECL_VALUE_EXPR, and fixing up the type. */
755 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
759 new_decl
= lookup_decl (decl
, ctx
);
761 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
763 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
764 && DECL_HAS_VALUE_EXPR_P (decl
))
766 tree ve
= DECL_VALUE_EXPR (decl
);
767 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
768 SET_DECL_VALUE_EXPR (new_decl
, ve
);
769 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
772 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
774 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
775 if (size
== error_mark_node
)
776 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
777 DECL_SIZE (new_decl
) = size
;
779 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
780 if (size
== error_mark_node
)
781 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
782 DECL_SIZE_UNIT (new_decl
) = size
;
786 /* The callback for remap_decl. Search all containing contexts for a
787 mapping of the variable; this avoids having to duplicate the splay
788 tree ahead of time. We know a mapping doesn't already exist in the
789 given context. Create new mappings to implement default semantics. */
792 omp_copy_decl (tree var
, copy_body_data
*cb
)
794 omp_context
*ctx
= (omp_context
*) cb
;
797 if (TREE_CODE (var
) == LABEL_DECL
)
799 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
800 DECL_CONTEXT (new_var
) = current_function_decl
;
801 insert_decl_map (&ctx
->cb
, var
, new_var
);
805 while (!is_taskreg_ctx (ctx
))
810 new_var
= maybe_lookup_decl (var
, ctx
);
815 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
818 return error_mark_node
;
821 /* Create a new context, with OUTER_CTX being the surrounding context. */
824 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
826 omp_context
*ctx
= XCNEW (omp_context
);
828 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
829 (splay_tree_value
) ctx
);
834 ctx
->outer
= outer_ctx
;
835 ctx
->cb
= outer_ctx
->cb
;
836 ctx
->cb
.block
= NULL
;
837 ctx
->depth
= outer_ctx
->depth
+ 1;
841 ctx
->cb
.src_fn
= current_function_decl
;
842 ctx
->cb
.dst_fn
= current_function_decl
;
843 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
844 gcc_checking_assert (ctx
->cb
.src_node
);
845 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
846 ctx
->cb
.src_cfun
= cfun
;
847 ctx
->cb
.copy_decl
= omp_copy_decl
;
848 ctx
->cb
.eh_lp_nr
= 0;
849 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
853 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
858 static gimple_seq
maybe_catch_exception (gimple_seq
);
860 /* Finalize task copyfn. */
863 finalize_task_copyfn (gomp_task
*task_stmt
)
865 struct function
*child_cfun
;
867 gimple_seq seq
= NULL
, new_seq
;
870 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
871 if (child_fn
== NULL_TREE
)
874 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
875 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
877 push_cfun (child_cfun
);
878 bind
= gimplify_body (child_fn
, false);
879 gimple_seq_add_stmt (&seq
, bind
);
880 new_seq
= maybe_catch_exception (seq
);
883 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
885 gimple_seq_add_stmt (&seq
, bind
);
887 gimple_set_body (child_fn
, seq
);
890 /* Inform the callgraph about the new function. */
891 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
892 node
->parallelized_function
= 1;
893 cgraph_node::add_new_function (child_fn
, false);
896 /* Destroy a omp_context data structures. Called through the splay tree
897 value delete callback. */
900 delete_omp_context (splay_tree_value value
)
902 omp_context
*ctx
= (omp_context
*) value
;
904 delete ctx
->cb
.decl_map
;
907 splay_tree_delete (ctx
->field_map
);
909 splay_tree_delete (ctx
->sfield_map
);
911 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
912 it produces corrupt debug information. */
913 if (ctx
->record_type
)
916 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
917 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
919 if (ctx
->srecord_type
)
922 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
923 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
926 if (is_task_ctx (ctx
))
927 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
932 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
936 fixup_child_record_type (omp_context
*ctx
)
938 tree f
, type
= ctx
->record_type
;
940 if (!ctx
->receiver_decl
)
942 /* ??? It isn't sufficient to just call remap_type here, because
943 variably_modified_type_p doesn't work the way we expect for
944 record types. Testing each field for whether it needs remapping
945 and creating a new record by hand works, however. */
946 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
947 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
951 tree name
, new_fields
= NULL
;
953 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
954 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
955 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
956 TYPE_DECL
, name
, type
);
957 TYPE_NAME (type
) = name
;
959 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
961 tree new_f
= copy_node (f
);
962 DECL_CONTEXT (new_f
) = type
;
963 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
964 DECL_CHAIN (new_f
) = new_fields
;
965 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
966 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
968 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
972 /* Arrange to be able to look up the receiver field
973 given the sender field. */
974 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
975 (splay_tree_value
) new_f
);
977 TYPE_FIELDS (type
) = nreverse (new_fields
);
981 /* In a target region we never modify any of the pointers in *.omp_data_i,
982 so attempt to help the optimizers. */
983 if (is_gimple_omp_offloaded (ctx
->stmt
))
984 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
986 TREE_TYPE (ctx
->receiver_decl
)
987 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
990 /* Instantiate decls as necessary in CTX to satisfy the data sharing
991 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
995 scan_sharing_clauses (tree clauses
, omp_context
*ctx
,
996 bool base_pointers_restrict
= false)
999 bool scan_array_reductions
= false;
1001 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1005 switch (OMP_CLAUSE_CODE (c
))
1007 case OMP_CLAUSE_PRIVATE
:
1008 decl
= OMP_CLAUSE_DECL (c
);
1009 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1011 else if (!is_variable_sized (decl
))
1012 install_var_local (decl
, ctx
);
1015 case OMP_CLAUSE_SHARED
:
1016 decl
= OMP_CLAUSE_DECL (c
);
1017 /* Ignore shared directives in teams construct. */
1018 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1020 /* Global variables don't need to be copied,
1021 the receiver side will use them directly. */
1022 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1023 if (is_global_var (odecl
))
1025 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1028 gcc_assert (is_taskreg_ctx (ctx
));
1029 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1030 || !is_variable_sized (decl
));
1031 /* Global variables don't need to be copied,
1032 the receiver side will use them directly. */
1033 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1035 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1037 use_pointer_for_field (decl
, ctx
);
1040 by_ref
= use_pointer_for_field (decl
, NULL
);
1041 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1042 || TREE_ADDRESSABLE (decl
)
1044 || omp_is_reference (decl
))
1046 by_ref
= use_pointer_for_field (decl
, ctx
);
1047 install_var_field (decl
, by_ref
, 3, ctx
);
1048 install_var_local (decl
, ctx
);
1051 /* We don't need to copy const scalar vars back. */
1052 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1055 case OMP_CLAUSE_REDUCTION
:
1056 decl
= OMP_CLAUSE_DECL (c
);
1057 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1058 && TREE_CODE (decl
) == MEM_REF
)
1060 tree t
= TREE_OPERAND (decl
, 0);
1061 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1062 t
= TREE_OPERAND (t
, 0);
1063 if (TREE_CODE (t
) == INDIRECT_REF
1064 || TREE_CODE (t
) == ADDR_EXPR
)
1065 t
= TREE_OPERAND (t
, 0);
1066 install_var_local (t
, ctx
);
1067 if (is_taskreg_ctx (ctx
)
1068 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1069 && !is_variable_sized (t
))
1071 by_ref
= use_pointer_for_field (t
, ctx
);
1072 install_var_field (t
, by_ref
, 3, ctx
);
1078 case OMP_CLAUSE_LASTPRIVATE
:
1079 /* Let the corresponding firstprivate clause create
1081 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1085 case OMP_CLAUSE_FIRSTPRIVATE
:
1086 case OMP_CLAUSE_LINEAR
:
1087 decl
= OMP_CLAUSE_DECL (c
);
1089 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1090 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1091 && is_gimple_omp_offloaded (ctx
->stmt
))
1093 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1094 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1095 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1096 install_var_field (decl
, true, 3, ctx
);
1098 install_var_field (decl
, false, 3, ctx
);
1100 if (is_variable_sized (decl
))
1102 if (is_task_ctx (ctx
))
1103 install_var_field (decl
, false, 1, ctx
);
1106 else if (is_taskreg_ctx (ctx
))
1109 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1110 by_ref
= use_pointer_for_field (decl
, NULL
);
1112 if (is_task_ctx (ctx
)
1113 && (global
|| by_ref
|| omp_is_reference (decl
)))
1115 install_var_field (decl
, false, 1, ctx
);
1117 install_var_field (decl
, by_ref
, 2, ctx
);
1120 install_var_field (decl
, by_ref
, 3, ctx
);
1122 install_var_local (decl
, ctx
);
1125 case OMP_CLAUSE_USE_DEVICE_PTR
:
1126 decl
= OMP_CLAUSE_DECL (c
);
1127 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1128 install_var_field (decl
, true, 3, ctx
);
1130 install_var_field (decl
, false, 3, ctx
);
1131 if (DECL_SIZE (decl
)
1132 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1134 tree decl2
= DECL_VALUE_EXPR (decl
);
1135 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1136 decl2
= TREE_OPERAND (decl2
, 0);
1137 gcc_assert (DECL_P (decl2
));
1138 install_var_local (decl2
, ctx
);
1140 install_var_local (decl
, ctx
);
1143 case OMP_CLAUSE_IS_DEVICE_PTR
:
1144 decl
= OMP_CLAUSE_DECL (c
);
1147 case OMP_CLAUSE__LOOPTEMP_
:
1148 gcc_assert (is_taskreg_ctx (ctx
));
1149 decl
= OMP_CLAUSE_DECL (c
);
1150 install_var_field (decl
, false, 3, ctx
);
1151 install_var_local (decl
, ctx
);
1154 case OMP_CLAUSE_COPYPRIVATE
:
1155 case OMP_CLAUSE_COPYIN
:
1156 decl
= OMP_CLAUSE_DECL (c
);
1157 by_ref
= use_pointer_for_field (decl
, NULL
);
1158 install_var_field (decl
, by_ref
, 3, ctx
);
1161 case OMP_CLAUSE_DEFAULT
:
1162 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
1165 case OMP_CLAUSE_FINAL
:
1167 case OMP_CLAUSE_NUM_THREADS
:
1168 case OMP_CLAUSE_NUM_TEAMS
:
1169 case OMP_CLAUSE_THREAD_LIMIT
:
1170 case OMP_CLAUSE_DEVICE
:
1171 case OMP_CLAUSE_SCHEDULE
:
1172 case OMP_CLAUSE_DIST_SCHEDULE
:
1173 case OMP_CLAUSE_DEPEND
:
1174 case OMP_CLAUSE_PRIORITY
:
1175 case OMP_CLAUSE_GRAINSIZE
:
1176 case OMP_CLAUSE_NUM_TASKS
:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1178 case OMP_CLAUSE_NUM_GANGS
:
1179 case OMP_CLAUSE_NUM_WORKERS
:
1180 case OMP_CLAUSE_VECTOR_LENGTH
:
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1186 case OMP_CLAUSE_FROM
:
1187 case OMP_CLAUSE_MAP
:
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1190 decl
= OMP_CLAUSE_DECL (c
);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1197 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c
)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1200 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1202 && varpool_node::get_create (decl
)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl
)))
1206 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c
)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1221 if (TREE_CODE (decl
) == COMPONENT_REF
1222 || (TREE_CODE (decl
) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1225 == REFERENCE_TYPE
)))
1227 if (DECL_SIZE (decl
)
1228 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1230 tree decl2
= DECL_VALUE_EXPR (decl
);
1231 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1232 decl2
= TREE_OPERAND (decl2
, 0);
1233 gcc_assert (DECL_P (decl2
));
1234 install_var_local (decl2
, ctx
);
1236 install_var_local (decl
, ctx
);
1241 if (DECL_SIZE (decl
)
1242 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1244 tree decl2
= DECL_VALUE_EXPR (decl
);
1245 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1246 decl2
= TREE_OPERAND (decl2
, 0);
1247 gcc_assert (DECL_P (decl2
));
1248 install_var_field (decl2
, true, 3, ctx
);
1249 install_var_local (decl2
, ctx
);
1250 install_var_local (decl
, ctx
);
1254 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1257 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1258 install_var_field (decl
, true, 7, ctx
);
1260 install_var_field (decl
, true, 3, ctx
,
1261 base_pointers_restrict
);
1262 if (is_gimple_omp_offloaded (ctx
->stmt
)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1264 install_var_local (decl
, ctx
);
1269 tree base
= get_base_address (decl
);
1270 tree nc
= OMP_CLAUSE_CHAIN (c
);
1273 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc
) == base
1275 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1285 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1286 decl
= OMP_CLAUSE_DECL (c
);
1288 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1289 (splay_tree_key
) decl
));
1291 = build_decl (OMP_CLAUSE_LOCATION (c
),
1292 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1293 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1294 insert_field_into_struct (ctx
->record_type
, field
);
1295 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1296 (splay_tree_value
) field
);
1301 case OMP_CLAUSE__GRIDDIM_
:
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1309 case OMP_CLAUSE_NOWAIT
:
1310 case OMP_CLAUSE_ORDERED
:
1311 case OMP_CLAUSE_COLLAPSE
:
1312 case OMP_CLAUSE_UNTIED
:
1313 case OMP_CLAUSE_MERGEABLE
:
1314 case OMP_CLAUSE_PROC_BIND
:
1315 case OMP_CLAUSE_SAFELEN
:
1316 case OMP_CLAUSE_SIMDLEN
:
1317 case OMP_CLAUSE_THREADS
:
1318 case OMP_CLAUSE_SIMD
:
1319 case OMP_CLAUSE_NOGROUP
:
1320 case OMP_CLAUSE_DEFAULTMAP
:
1321 case OMP_CLAUSE_ASYNC
:
1322 case OMP_CLAUSE_WAIT
:
1323 case OMP_CLAUSE_GANG
:
1324 case OMP_CLAUSE_WORKER
:
1325 case OMP_CLAUSE_VECTOR
:
1326 case OMP_CLAUSE_INDEPENDENT
:
1327 case OMP_CLAUSE_AUTO
:
1328 case OMP_CLAUSE_SEQ
:
1329 case OMP_CLAUSE__SIMT_
:
1332 case OMP_CLAUSE_ALIGNED
:
1333 decl
= OMP_CLAUSE_DECL (c
);
1334 if (is_global_var (decl
)
1335 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1336 install_var_local (decl
, ctx
);
1339 case OMP_CLAUSE_TILE
:
1340 case OMP_CLAUSE__CACHE_
:
1346 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1348 switch (OMP_CLAUSE_CODE (c
))
1350 case OMP_CLAUSE_LASTPRIVATE
:
1351 /* Let the corresponding firstprivate clause create
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1354 scan_array_reductions
= true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1359 case OMP_CLAUSE_FIRSTPRIVATE
:
1360 case OMP_CLAUSE_PRIVATE
:
1361 case OMP_CLAUSE_LINEAR
:
1362 case OMP_CLAUSE_IS_DEVICE_PTR
:
1363 decl
= OMP_CLAUSE_DECL (c
);
1364 if (is_variable_sized (decl
))
1366 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1368 && is_gimple_omp_offloaded (ctx
->stmt
))
1370 tree decl2
= DECL_VALUE_EXPR (decl
);
1371 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1372 decl2
= TREE_OPERAND (decl2
, 0);
1373 gcc_assert (DECL_P (decl2
));
1374 install_var_local (decl2
, ctx
);
1375 fixup_remapped_decl (decl2
, ctx
, false);
1377 install_var_local (decl
, ctx
);
1379 fixup_remapped_decl (decl
, ctx
,
1380 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1382 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1384 scan_array_reductions
= true;
1387 case OMP_CLAUSE_REDUCTION
:
1388 decl
= OMP_CLAUSE_DECL (c
);
1389 if (TREE_CODE (decl
) != MEM_REF
)
1391 if (is_variable_sized (decl
))
1392 install_var_local (decl
, ctx
);
1393 fixup_remapped_decl (decl
, ctx
, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1396 scan_array_reductions
= true;
1399 case OMP_CLAUSE_SHARED
:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1403 decl
= OMP_CLAUSE_DECL (c
);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1411 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1412 install_var_field (decl
, by_ref
, 11, ctx
);
1415 fixup_remapped_decl (decl
, ctx
, false);
1418 case OMP_CLAUSE_MAP
:
1419 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1421 decl
= OMP_CLAUSE_DECL (c
);
1423 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c
)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1426 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1428 && varpool_node::get_create (decl
)->offloadable
)
1432 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1434 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1437 tree new_decl
= lookup_decl (decl
, ctx
);
1438 TREE_TYPE (new_decl
)
1439 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1441 else if (DECL_SIZE (decl
)
1442 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1444 tree decl2
= DECL_VALUE_EXPR (decl
);
1445 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1446 decl2
= TREE_OPERAND (decl2
, 0);
1447 gcc_assert (DECL_P (decl2
));
1448 fixup_remapped_decl (decl2
, ctx
, false);
1449 fixup_remapped_decl (decl
, ctx
, true);
1452 fixup_remapped_decl (decl
, ctx
, false);
1456 case OMP_CLAUSE_COPYPRIVATE
:
1457 case OMP_CLAUSE_COPYIN
:
1458 case OMP_CLAUSE_DEFAULT
:
1460 case OMP_CLAUSE_NUM_THREADS
:
1461 case OMP_CLAUSE_NUM_TEAMS
:
1462 case OMP_CLAUSE_THREAD_LIMIT
:
1463 case OMP_CLAUSE_DEVICE
:
1464 case OMP_CLAUSE_SCHEDULE
:
1465 case OMP_CLAUSE_DIST_SCHEDULE
:
1466 case OMP_CLAUSE_NOWAIT
:
1467 case OMP_CLAUSE_ORDERED
:
1468 case OMP_CLAUSE_COLLAPSE
:
1469 case OMP_CLAUSE_UNTIED
:
1470 case OMP_CLAUSE_FINAL
:
1471 case OMP_CLAUSE_MERGEABLE
:
1472 case OMP_CLAUSE_PROC_BIND
:
1473 case OMP_CLAUSE_SAFELEN
:
1474 case OMP_CLAUSE_SIMDLEN
:
1475 case OMP_CLAUSE_ALIGNED
:
1476 case OMP_CLAUSE_DEPEND
:
1477 case OMP_CLAUSE__LOOPTEMP_
:
1479 case OMP_CLAUSE_FROM
:
1480 case OMP_CLAUSE_PRIORITY
:
1481 case OMP_CLAUSE_GRAINSIZE
:
1482 case OMP_CLAUSE_NUM_TASKS
:
1483 case OMP_CLAUSE_THREADS
:
1484 case OMP_CLAUSE_SIMD
:
1485 case OMP_CLAUSE_NOGROUP
:
1486 case OMP_CLAUSE_DEFAULTMAP
:
1487 case OMP_CLAUSE_USE_DEVICE_PTR
:
1488 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1489 case OMP_CLAUSE_ASYNC
:
1490 case OMP_CLAUSE_WAIT
:
1491 case OMP_CLAUSE_NUM_GANGS
:
1492 case OMP_CLAUSE_NUM_WORKERS
:
1493 case OMP_CLAUSE_VECTOR_LENGTH
:
1494 case OMP_CLAUSE_GANG
:
1495 case OMP_CLAUSE_WORKER
:
1496 case OMP_CLAUSE_VECTOR
:
1497 case OMP_CLAUSE_INDEPENDENT
:
1498 case OMP_CLAUSE_AUTO
:
1499 case OMP_CLAUSE_SEQ
:
1500 case OMP_CLAUSE__GRIDDIM_
:
1501 case OMP_CLAUSE__SIMT_
:
1504 case OMP_CLAUSE_TILE
:
1505 case OMP_CLAUSE__CACHE_
:
1511 gcc_checking_assert (!scan_array_reductions
1512 || !is_gimple_omp_oacc (ctx
->stmt
));
1513 if (scan_array_reductions
)
1515 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1516 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1517 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1522 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1523 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1524 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1525 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1526 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1527 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1531 /* Create a new name for omp child function. Returns an identifier. If
1532 IS_CILK_FOR is true then the suffix for the child function is
1536 create_omp_child_function_name (bool task_copy
, bool is_cilk_for
)
1539 return clone_function_name (current_function_decl
, "_cilk_for_fn");
1540 return clone_function_name (current_function_decl
,
1541 task_copy
? "_omp_cpyfn" : "_omp_fn");
1544 /* Returns the type of the induction variable for the child function for
1545 _Cilk_for and the types for _high and _low variables based on TYPE. */
1548 cilk_for_check_loop_diff_type (tree type
)
1550 if (TYPE_PRECISION (type
) <= TYPE_PRECISION (uint32_type_node
))
1552 if (TYPE_UNSIGNED (type
))
1553 return uint32_type_node
;
1555 return integer_type_node
;
1559 if (TYPE_UNSIGNED (type
))
1560 return uint64_type_node
;
1562 return long_long_integer_type_node
;
1566 /* Return true if CTX may belong to offloaded code: either if current function
1567 is offloaded, or any enclosing context corresponds to a target region. */
1570 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1572 if (cgraph_node::get (current_function_decl
)->offloadable
)
1574 for (; ctx
; ctx
= ctx
->outer
)
1575 if (is_gimple_omp_offloaded (ctx
->stmt
))
1580 /* Build a decl for the omp child function. It'll not contain a body
1581 yet, just the bare decl. */
1584 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1586 tree decl
, type
, name
, t
;
1589 = (flag_cilkplus
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1590 ? omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
1591 OMP_CLAUSE__CILK_FOR_COUNT_
) : NULL_TREE
;
1592 tree cilk_var_type
= NULL_TREE
;
1594 name
= create_omp_child_function_name (task_copy
,
1595 cilk_for_count
!= NULL_TREE
);
1597 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1598 ptr_type_node
, NULL_TREE
);
1599 else if (cilk_for_count
)
1601 type
= TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count
, 0));
1602 cilk_var_type
= cilk_for_check_loop_diff_type (type
);
1603 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1604 cilk_var_type
, cilk_var_type
, NULL_TREE
);
1607 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1609 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1611 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1614 ctx
->cb
.dst_fn
= decl
;
1616 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1618 TREE_STATIC (decl
) = 1;
1619 TREE_USED (decl
) = 1;
1620 DECL_ARTIFICIAL (decl
) = 1;
1621 DECL_IGNORED_P (decl
) = 0;
1622 TREE_PUBLIC (decl
) = 0;
1623 DECL_UNINLINABLE (decl
) = 1;
1624 DECL_EXTERNAL (decl
) = 0;
1625 DECL_CONTEXT (decl
) = NULL_TREE
;
1626 DECL_INITIAL (decl
) = make_node (BLOCK
);
1627 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1628 if (omp_maybe_offloaded_ctx (ctx
))
1630 cgraph_node::get_create (decl
)->offloadable
= 1;
1631 if (ENABLE_OFFLOADING
)
1632 g
->have_offload
= true;
1635 if (cgraph_node::get_create (decl
)->offloadable
1636 && !lookup_attribute ("omp declare target",
1637 DECL_ATTRIBUTES (current_function_decl
)))
1639 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1640 ? "omp target entrypoint"
1641 : "omp declare target");
1642 DECL_ATTRIBUTES (decl
)
1643 = tree_cons (get_identifier (target_attr
),
1644 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1647 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1648 RESULT_DECL
, NULL_TREE
, void_type_node
);
1649 DECL_ARTIFICIAL (t
) = 1;
1650 DECL_IGNORED_P (t
) = 1;
1651 DECL_CONTEXT (t
) = decl
;
1652 DECL_RESULT (decl
) = t
;
1654 /* _Cilk_for's child function requires two extra parameters called
1655 __low and __high that are set the by Cilk runtime when it calls this
1659 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1660 PARM_DECL
, get_identifier ("__high"), cilk_var_type
);
1661 DECL_ARTIFICIAL (t
) = 1;
1662 DECL_NAMELESS (t
) = 1;
1663 DECL_ARG_TYPE (t
) = ptr_type_node
;
1664 DECL_CONTEXT (t
) = current_function_decl
;
1666 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1667 DECL_ARGUMENTS (decl
) = t
;
1669 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1670 PARM_DECL
, get_identifier ("__low"), cilk_var_type
);
1671 DECL_ARTIFICIAL (t
) = 1;
1672 DECL_NAMELESS (t
) = 1;
1673 DECL_ARG_TYPE (t
) = ptr_type_node
;
1674 DECL_CONTEXT (t
) = current_function_decl
;
1676 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1677 DECL_ARGUMENTS (decl
) = t
;
1680 tree data_name
= get_identifier (".omp_data_i");
1681 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1683 DECL_ARTIFICIAL (t
) = 1;
1684 DECL_NAMELESS (t
) = 1;
1685 DECL_ARG_TYPE (t
) = ptr_type_node
;
1686 DECL_CONTEXT (t
) = current_function_decl
;
1688 TREE_READONLY (t
) = 1;
1690 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1691 DECL_ARGUMENTS (decl
) = t
;
1693 ctx
->receiver_decl
= t
;
1696 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1697 PARM_DECL
, get_identifier (".omp_data_o"),
1699 DECL_ARTIFICIAL (t
) = 1;
1700 DECL_NAMELESS (t
) = 1;
1701 DECL_ARG_TYPE (t
) = ptr_type_node
;
1702 DECL_CONTEXT (t
) = current_function_decl
;
1704 TREE_ADDRESSABLE (t
) = 1;
1705 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1706 DECL_ARGUMENTS (decl
) = t
;
1709 /* Allocate memory for the function structure. The call to
1710 allocate_struct_function clobbers CFUN, so we need to restore
1712 push_struct_function (decl
);
1713 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1714 init_tree_ssa (cfun
);
1718 /* Callback for walk_gimple_seq. Check if combined parallel
1719 contains gimple_omp_for_combined_into_p OMP_FOR. */
1722 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1723 bool *handled_ops_p
,
1724 struct walk_stmt_info
*wi
)
1726 gimple
*stmt
= gsi_stmt (*gsi_p
);
1728 *handled_ops_p
= true;
1729 switch (gimple_code (stmt
))
1733 case GIMPLE_OMP_FOR
:
1734 if (gimple_omp_for_combined_into_p (stmt
)
1735 && gimple_omp_for_kind (stmt
)
1736 == *(const enum gf_mask
*) (wi
->info
))
1739 return integer_zero_node
;
1748 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1751 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1752 omp_context
*outer_ctx
)
1754 struct walk_stmt_info wi
;
1756 memset (&wi
, 0, sizeof (wi
));
1758 wi
.info
= (void *) &msk
;
1759 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1760 if (wi
.info
!= (void *) &msk
)
1762 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1763 struct omp_for_data fd
;
1764 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1765 /* We need two temporaries with fd.loop.v type (istart/iend)
1766 and then (fd.collapse - 1) temporaries with the same
1767 type for count2 ... countN-1 vars if not constant. */
1768 size_t count
= 2, i
;
1769 tree type
= fd
.iter_type
;
1771 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1773 count
+= fd
.collapse
- 1;
1774 /* If there are lastprivate clauses on the inner
1775 GIMPLE_OMP_FOR, add one more temporaries for the total number
1776 of iterations (product of count1 ... countN-1). */
1777 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1778 OMP_CLAUSE_LASTPRIVATE
))
1780 else if (msk
== GF_OMP_FOR_KIND_FOR
1781 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1782 OMP_CLAUSE_LASTPRIVATE
))
1785 for (i
= 0; i
< count
; i
++)
1787 tree temp
= create_tmp_var (type
);
1788 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1789 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1790 OMP_CLAUSE_DECL (c
) = temp
;
1791 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1792 gimple_omp_taskreg_set_clauses (stmt
, c
);
1797 /* Scan an OpenMP parallel directive. */
1800 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1804 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1806 /* Ignore parallel directives with empty bodies, unless there
1807 are copyin clauses. */
1809 && empty_body_p (gimple_omp_body (stmt
))
1810 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1811 OMP_CLAUSE_COPYIN
) == NULL
)
1813 gsi_replace (gsi
, gimple_build_nop (), false);
1817 if (gimple_omp_parallel_combined_p (stmt
))
1818 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1820 ctx
= new_omp_context (stmt
, outer_ctx
);
1821 taskreg_contexts
.safe_push (ctx
);
1822 if (taskreg_nesting_level
> 1)
1823 ctx
->is_nested
= true;
1824 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1825 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
1826 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1827 name
= create_tmp_var_name (".omp_data_s");
1828 name
= build_decl (gimple_location (stmt
),
1829 TYPE_DECL
, name
, ctx
->record_type
);
1830 DECL_ARTIFICIAL (name
) = 1;
1831 DECL_NAMELESS (name
) = 1;
1832 TYPE_NAME (ctx
->record_type
) = name
;
1833 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1834 if (!gimple_omp_parallel_grid_phony (stmt
))
1836 create_omp_child_function (ctx
, false);
1837 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1840 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1841 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1843 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1844 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1847 /* Scan an OpenMP task directive. */
1850 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1854 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1856 /* Ignore task directives with empty bodies. */
1858 && empty_body_p (gimple_omp_body (stmt
)))
1860 gsi_replace (gsi
, gimple_build_nop (), false);
1864 if (gimple_omp_task_taskloop_p (stmt
))
1865 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1867 ctx
= new_omp_context (stmt
, outer_ctx
);
1868 taskreg_contexts
.safe_push (ctx
);
1869 if (taskreg_nesting_level
> 1)
1870 ctx
->is_nested
= true;
1871 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1872 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
1873 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1874 name
= create_tmp_var_name (".omp_data_s");
1875 name
= build_decl (gimple_location (stmt
),
1876 TYPE_DECL
, name
, ctx
->record_type
);
1877 DECL_ARTIFICIAL (name
) = 1;
1878 DECL_NAMELESS (name
) = 1;
1879 TYPE_NAME (ctx
->record_type
) = name
;
1880 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1881 create_omp_child_function (ctx
, false);
1882 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1884 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1886 if (ctx
->srecord_type
)
1888 name
= create_tmp_var_name (".omp_data_a");
1889 name
= build_decl (gimple_location (stmt
),
1890 TYPE_DECL
, name
, ctx
->srecord_type
);
1891 DECL_ARTIFICIAL (name
) = 1;
1892 DECL_NAMELESS (name
) = 1;
1893 TYPE_NAME (ctx
->srecord_type
) = name
;
1894 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1895 create_omp_child_function (ctx
, true);
1898 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1900 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1902 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1903 t
= build_int_cst (long_integer_type_node
, 0);
1904 gimple_omp_task_set_arg_size (stmt
, t
);
1905 t
= build_int_cst (long_integer_type_node
, 1);
1906 gimple_omp_task_set_arg_align (stmt
, t
);
1911 /* If any decls have been made addressable during scan_omp,
1912 adjust their fields if needed, and layout record types
1913 of parallel/task constructs. */
1916 finish_taskreg_scan (omp_context
*ctx
)
1918 if (ctx
->record_type
== NULL_TREE
)
1921 /* If any task_shared_vars were needed, verify all
1922 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1923 statements if use_pointer_for_field hasn't changed
1924 because of that. If it did, update field types now. */
1925 if (task_shared_vars
)
1929 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
1930 c
; c
= OMP_CLAUSE_CHAIN (c
))
1931 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1932 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1934 tree decl
= OMP_CLAUSE_DECL (c
);
1936 /* Global variables don't need to be copied,
1937 the receiver side will use them directly. */
1938 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1940 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
1941 || !use_pointer_for_field (decl
, ctx
))
1943 tree field
= lookup_field (decl
, ctx
);
1944 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
1945 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
1947 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
1948 TREE_THIS_VOLATILE (field
) = 0;
1949 DECL_USER_ALIGN (field
) = 0;
1950 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
1951 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
1952 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
1953 if (ctx
->srecord_type
)
1955 tree sfield
= lookup_sfield (decl
, ctx
);
1956 TREE_TYPE (sfield
) = TREE_TYPE (field
);
1957 TREE_THIS_VOLATILE (sfield
) = 0;
1958 DECL_USER_ALIGN (sfield
) = 0;
1959 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
1960 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
1961 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
1966 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1968 layout_type (ctx
->record_type
);
1969 fixup_child_record_type (ctx
);
1973 location_t loc
= gimple_location (ctx
->stmt
);
1974 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
1975 /* Move VLA fields to the end. */
1976 p
= &TYPE_FIELDS (ctx
->record_type
);
1978 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
1979 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
1982 *p
= TREE_CHAIN (*p
);
1983 TREE_CHAIN (*q
) = NULL_TREE
;
1984 q
= &TREE_CHAIN (*q
);
1987 p
= &DECL_CHAIN (*p
);
1989 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
1991 /* Move fields corresponding to first and second _looptemp_
1992 clause first. There are filled by GOMP_taskloop
1993 and thus need to be in specific positions. */
1994 tree c1
= gimple_omp_task_clauses (ctx
->stmt
);
1995 c1
= omp_find_clause (c1
, OMP_CLAUSE__LOOPTEMP_
);
1996 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
1997 OMP_CLAUSE__LOOPTEMP_
);
1998 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
1999 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2000 p
= &TYPE_FIELDS (ctx
->record_type
);
2002 if (*p
== f1
|| *p
== f2
)
2003 *p
= DECL_CHAIN (*p
);
2005 p
= &DECL_CHAIN (*p
);
2006 DECL_CHAIN (f1
) = f2
;
2007 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2008 TYPE_FIELDS (ctx
->record_type
) = f1
;
2009 if (ctx
->srecord_type
)
2011 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2012 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2013 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2015 if (*p
== f1
|| *p
== f2
)
2016 *p
= DECL_CHAIN (*p
);
2018 p
= &DECL_CHAIN (*p
);
2019 DECL_CHAIN (f1
) = f2
;
2020 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2021 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2024 layout_type (ctx
->record_type
);
2025 fixup_child_record_type (ctx
);
2026 if (ctx
->srecord_type
)
2027 layout_type (ctx
->srecord_type
);
2028 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2029 TYPE_SIZE_UNIT (ctx
->record_type
));
2030 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2031 t
= build_int_cst (long_integer_type_node
,
2032 TYPE_ALIGN_UNIT (ctx
->record_type
));
2033 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2037 /* Find the enclosing offload context. */
2039 static omp_context
*
2040 enclosing_target_ctx (omp_context
*ctx
)
2042 for (; ctx
; ctx
= ctx
->outer
)
2043 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2049 /* Return true if ctx is part of an oacc kernels region. */
2052 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2054 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2056 gimple
*stmt
= ctx
->stmt
;
2057 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2058 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2065 /* Check the parallelism clauses inside a kernels regions.
2066 Until kernels handling moves to use the same loop indirection
2067 scheme as parallel, we need to do this checking early. */
2070 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2072 bool checking
= true;
2073 unsigned outer_mask
= 0;
2074 unsigned this_mask
= 0;
2075 bool has_seq
= false, has_auto
= false;
2078 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2082 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2084 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2087 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2089 switch (OMP_CLAUSE_CODE (c
))
2091 case OMP_CLAUSE_GANG
:
2092 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2094 case OMP_CLAUSE_WORKER
:
2095 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2097 case OMP_CLAUSE_VECTOR
:
2098 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2100 case OMP_CLAUSE_SEQ
:
2103 case OMP_CLAUSE_AUTO
:
2113 if (has_seq
&& (this_mask
|| has_auto
))
2114 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2115 " OpenACC loop specifiers");
2116 else if (has_auto
&& this_mask
)
2117 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2118 " OpenACC loop specifiers");
2120 if (this_mask
& outer_mask
)
2121 error_at (gimple_location (stmt
), "inner loop uses same"
2122 " OpenACC parallelism as containing loop");
2125 return outer_mask
| this_mask
;
2128 /* Scan a GIMPLE_OMP_FOR. */
2131 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2135 tree clauses
= gimple_omp_for_clauses (stmt
);
2137 ctx
= new_omp_context (stmt
, outer_ctx
);
2139 if (is_gimple_omp_oacc (stmt
))
2141 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2143 if (!tgt
|| is_oacc_parallel (tgt
))
2144 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2146 char const *check
= NULL
;
2148 switch (OMP_CLAUSE_CODE (c
))
2150 case OMP_CLAUSE_GANG
:
2154 case OMP_CLAUSE_WORKER
:
2158 case OMP_CLAUSE_VECTOR
:
2166 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2167 error_at (gimple_location (stmt
),
2168 "argument not permitted on %qs clause in"
2169 " OpenACC %<parallel%>", check
);
2172 if (tgt
&& is_oacc_kernels (tgt
))
2174 /* Strip out reductions, as they are not handled yet. */
2175 tree
*prev_ptr
= &clauses
;
2177 while (tree probe
= *prev_ptr
)
2179 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2181 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2182 *prev_ptr
= *next_ptr
;
2184 prev_ptr
= next_ptr
;
2187 gimple_omp_for_set_clauses (stmt
, clauses
);
2188 check_oacc_kernel_gwv (stmt
, ctx
);
2192 scan_sharing_clauses (clauses
, ctx
);
2194 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2195 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2197 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2198 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2199 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2200 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2202 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2205 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2208 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2209 omp_context
*outer_ctx
)
2211 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2212 gsi_replace (gsi
, bind
, false);
2213 gimple_seq seq
= NULL
;
2214 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2215 tree cond
= create_tmp_var_raw (integer_type_node
);
2216 DECL_CONTEXT (cond
) = current_function_decl
;
2217 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2218 gimple_bind_set_vars (bind
, cond
);
2219 gimple_call_set_lhs (g
, cond
);
2220 gimple_seq_add_stmt (&seq
, g
);
2221 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2222 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2223 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2224 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2225 gimple_seq_add_stmt (&seq
, g
);
2226 g
= gimple_build_label (lab1
);
2227 gimple_seq_add_stmt (&seq
, g
);
2228 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2229 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2230 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2231 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2232 gimple_omp_for_set_clauses (new_stmt
, clause
);
2233 gimple_seq_add_stmt (&seq
, new_stmt
);
2234 g
= gimple_build_goto (lab3
);
2235 gimple_seq_add_stmt (&seq
, g
);
2236 g
= gimple_build_label (lab2
);
2237 gimple_seq_add_stmt (&seq
, g
);
2238 gimple_seq_add_stmt (&seq
, stmt
);
2239 g
= gimple_build_label (lab3
);
2240 gimple_seq_add_stmt (&seq
, g
);
2241 gimple_bind_set_body (bind
, seq
);
2243 scan_omp_for (new_stmt
, outer_ctx
);
2244 scan_omp_for (stmt
, outer_ctx
);
2247 /* Scan an OpenMP sections directive. */
2250 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2254 ctx
= new_omp_context (stmt
, outer_ctx
);
2255 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2256 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2259 /* Scan an OpenMP single directive. */
2262 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2267 ctx
= new_omp_context (stmt
, outer_ctx
);
2268 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2269 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2270 name
= create_tmp_var_name (".omp_copy_s");
2271 name
= build_decl (gimple_location (stmt
),
2272 TYPE_DECL
, name
, ctx
->record_type
);
2273 TYPE_NAME (ctx
->record_type
) = name
;
2275 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2276 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2278 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2279 ctx
->record_type
= NULL
;
2281 layout_type (ctx
->record_type
);
2284 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2285 used in the corresponding offloaded function are restrict. */
2288 omp_target_base_pointers_restrict_p (tree clauses
)
2290 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2292 if (flag_openacc
== 0)
2295 /* I. Basic example:
2299 unsigned int a[2], b[2];
2301 #pragma acc kernels \
2310 After gimplification, we have:
2312 #pragma omp target oacc_kernels \
2313 map(force_from:a [len: 8]) \
2314 map(force_from:b [len: 8])
2320 Because both mappings have the force prefix, we know that they will be
2321 allocated when calling the corresponding offloaded function, which means we
2322 can mark the base pointers for a and b in the offloaded function as
2326 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2328 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
2331 switch (OMP_CLAUSE_MAP_KIND (c
))
2333 case GOMP_MAP_FORCE_ALLOC
:
2334 case GOMP_MAP_FORCE_TO
:
2335 case GOMP_MAP_FORCE_FROM
:
2336 case GOMP_MAP_FORCE_TOFROM
:
2346 /* Scan a GIMPLE_OMP_TARGET. */
2349 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2353 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2354 tree clauses
= gimple_omp_target_clauses (stmt
);
2356 ctx
= new_omp_context (stmt
, outer_ctx
);
2357 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2358 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
2359 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2360 name
= create_tmp_var_name (".omp_data_t");
2361 name
= build_decl (gimple_location (stmt
),
2362 TYPE_DECL
, name
, ctx
->record_type
);
2363 DECL_ARTIFICIAL (name
) = 1;
2364 DECL_NAMELESS (name
) = 1;
2365 TYPE_NAME (ctx
->record_type
) = name
;
2366 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2368 bool base_pointers_restrict
= false;
2371 create_omp_child_function (ctx
, false);
2372 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2374 base_pointers_restrict
= omp_target_base_pointers_restrict_p (clauses
);
2375 if (base_pointers_restrict
2376 && dump_file
&& (dump_flags
& TDF_DETAILS
))
2378 "Base pointers in offloaded function are restrict\n");
2381 scan_sharing_clauses (clauses
, ctx
, base_pointers_restrict
);
2382 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2384 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2385 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2388 TYPE_FIELDS (ctx
->record_type
)
2389 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2392 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2393 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2395 field
= DECL_CHAIN (field
))
2396 gcc_assert (DECL_ALIGN (field
) == align
);
2398 layout_type (ctx
->record_type
);
2400 fixup_child_record_type (ctx
);
2404 /* Scan an OpenMP teams directive. */
2407 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2409 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2410 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2411 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2414 /* Check nesting restrictions. */
2416 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2420 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2421 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2422 the original copy of its contents. */
2425 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2426 inside an OpenACC CTX. */
2427 if (!(is_gimple_omp (stmt
)
2428 && is_gimple_omp_oacc (stmt
))
2429 /* Except for atomic codes that we share with OpenMP. */
2430 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2431 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2433 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2435 error_at (gimple_location (stmt
),
2436 "non-OpenACC construct inside of OpenACC routine");
2440 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2441 if (is_gimple_omp (octx
->stmt
)
2442 && is_gimple_omp_oacc (octx
->stmt
))
2444 error_at (gimple_location (stmt
),
2445 "non-OpenACC construct inside of OpenACC region");
2452 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2453 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2456 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2458 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2459 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2461 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2462 && (ctx
->outer
== NULL
2463 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2464 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2465 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2466 != GF_OMP_FOR_KIND_FOR
)
2467 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2469 error_at (gimple_location (stmt
),
2470 "%<ordered simd threads%> must be closely "
2471 "nested inside of %<for simd%> region");
2477 error_at (gimple_location (stmt
),
2478 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2479 " may not be nested inside %<simd%> region");
2482 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2484 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2485 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2486 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2487 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2489 error_at (gimple_location (stmt
),
2490 "only %<distribute%> or %<parallel%> regions are "
2491 "allowed to be strictly nested inside %<teams%> "
2497 switch (gimple_code (stmt
))
2499 case GIMPLE_OMP_FOR
:
2500 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2502 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2504 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2506 error_at (gimple_location (stmt
),
2507 "%<distribute%> region must be strictly nested "
2508 "inside %<teams%> construct");
2513 /* We split taskloop into task and nested taskloop in it. */
2514 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2516 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2521 switch (gimple_code (ctx
->stmt
))
2523 case GIMPLE_OMP_FOR
:
2524 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2525 == GF_OMP_FOR_KIND_OACC_LOOP
);
2528 case GIMPLE_OMP_TARGET
:
2529 switch (gimple_omp_target_kind (ctx
->stmt
))
2531 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2532 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2543 else if (oacc_get_fn_attrib (current_function_decl
))
2547 error_at (gimple_location (stmt
),
2548 "OpenACC loop directive must be associated with"
2549 " an OpenACC compute region");
2555 if (is_gimple_call (stmt
)
2556 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2557 == BUILT_IN_GOMP_CANCEL
2558 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2559 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2561 const char *bad
= NULL
;
2562 const char *kind
= NULL
;
2563 const char *construct
2564 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2565 == BUILT_IN_GOMP_CANCEL
)
2566 ? "#pragma omp cancel"
2567 : "#pragma omp cancellation point";
2570 error_at (gimple_location (stmt
), "orphaned %qs construct",
2574 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2575 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2579 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2580 bad
= "#pragma omp parallel";
2581 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2582 == BUILT_IN_GOMP_CANCEL
2583 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2584 ctx
->cancellable
= true;
2588 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2589 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2590 bad
= "#pragma omp for";
2591 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2592 == BUILT_IN_GOMP_CANCEL
2593 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2595 ctx
->cancellable
= true;
2596 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2598 warning_at (gimple_location (stmt
), 0,
2599 "%<#pragma omp cancel for%> inside "
2600 "%<nowait%> for construct");
2601 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2602 OMP_CLAUSE_ORDERED
))
2603 warning_at (gimple_location (stmt
), 0,
2604 "%<#pragma omp cancel for%> inside "
2605 "%<ordered%> for construct");
2610 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2611 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2612 bad
= "#pragma omp sections";
2613 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2614 == BUILT_IN_GOMP_CANCEL
2615 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2617 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2619 ctx
->cancellable
= true;
2620 if (omp_find_clause (gimple_omp_sections_clauses
2623 warning_at (gimple_location (stmt
), 0,
2624 "%<#pragma omp cancel sections%> inside "
2625 "%<nowait%> sections construct");
2629 gcc_assert (ctx
->outer
2630 && gimple_code (ctx
->outer
->stmt
)
2631 == GIMPLE_OMP_SECTIONS
);
2632 ctx
->outer
->cancellable
= true;
2633 if (omp_find_clause (gimple_omp_sections_clauses
2636 warning_at (gimple_location (stmt
), 0,
2637 "%<#pragma omp cancel sections%> inside "
2638 "%<nowait%> sections construct");
2644 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TASK
)
2645 bad
= "#pragma omp task";
2648 for (omp_context
*octx
= ctx
->outer
;
2649 octx
; octx
= octx
->outer
)
2651 switch (gimple_code (octx
->stmt
))
2653 case GIMPLE_OMP_TASKGROUP
:
2655 case GIMPLE_OMP_TARGET
:
2656 if (gimple_omp_target_kind (octx
->stmt
)
2657 != GF_OMP_TARGET_KIND_REGION
)
2660 case GIMPLE_OMP_PARALLEL
:
2661 case GIMPLE_OMP_TEAMS
:
2662 error_at (gimple_location (stmt
),
2663 "%<%s taskgroup%> construct not closely "
2664 "nested inside of %<taskgroup%> region",
2672 ctx
->cancellable
= true;
2677 error_at (gimple_location (stmt
), "invalid arguments");
2682 error_at (gimple_location (stmt
),
2683 "%<%s %s%> construct not closely nested inside of %qs",
2684 construct
, kind
, bad
);
2689 case GIMPLE_OMP_SECTIONS
:
2690 case GIMPLE_OMP_SINGLE
:
2691 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2692 switch (gimple_code (ctx
->stmt
))
2694 case GIMPLE_OMP_FOR
:
2695 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2696 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2699 case GIMPLE_OMP_SECTIONS
:
2700 case GIMPLE_OMP_SINGLE
:
2701 case GIMPLE_OMP_ORDERED
:
2702 case GIMPLE_OMP_MASTER
:
2703 case GIMPLE_OMP_TASK
:
2704 case GIMPLE_OMP_CRITICAL
:
2705 if (is_gimple_call (stmt
))
2707 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2708 != BUILT_IN_GOMP_BARRIER
)
2710 error_at (gimple_location (stmt
),
2711 "barrier region may not be closely nested inside "
2712 "of work-sharing, %<critical%>, %<ordered%>, "
2713 "%<master%>, explicit %<task%> or %<taskloop%> "
2717 error_at (gimple_location (stmt
),
2718 "work-sharing region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> region");
2722 case GIMPLE_OMP_PARALLEL
:
2723 case GIMPLE_OMP_TEAMS
:
2725 case GIMPLE_OMP_TARGET
:
2726 if (gimple_omp_target_kind (ctx
->stmt
)
2727 == GF_OMP_TARGET_KIND_REGION
)
2734 case GIMPLE_OMP_MASTER
:
2735 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2736 switch (gimple_code (ctx
->stmt
))
2738 case GIMPLE_OMP_FOR
:
2739 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2740 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2743 case GIMPLE_OMP_SECTIONS
:
2744 case GIMPLE_OMP_SINGLE
:
2745 case GIMPLE_OMP_TASK
:
2746 error_at (gimple_location (stmt
),
2747 "%<master%> region may not be closely nested inside "
2748 "of work-sharing, explicit %<task%> or %<taskloop%> "
2751 case GIMPLE_OMP_PARALLEL
:
2752 case GIMPLE_OMP_TEAMS
:
2754 case GIMPLE_OMP_TARGET
:
2755 if (gimple_omp_target_kind (ctx
->stmt
)
2756 == GF_OMP_TARGET_KIND_REGION
)
2763 case GIMPLE_OMP_TASK
:
2764 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2765 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2766 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2767 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2769 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2770 error_at (OMP_CLAUSE_LOCATION (c
),
2771 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2772 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2776 case GIMPLE_OMP_ORDERED
:
2777 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2778 c
; c
= OMP_CLAUSE_CHAIN (c
))
2780 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2782 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2783 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2786 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2787 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2788 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2791 /* Look for containing ordered(N) loop. */
2793 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2795 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2796 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2798 error_at (OMP_CLAUSE_LOCATION (c
),
2799 "%<ordered%> construct with %<depend%> clause "
2800 "must be closely nested inside an %<ordered%> "
2804 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2806 error_at (OMP_CLAUSE_LOCATION (c
),
2807 "%<ordered%> construct with %<depend%> clause "
2808 "must be closely nested inside a loop with "
2809 "%<ordered%> clause with a parameter");
2815 error_at (OMP_CLAUSE_LOCATION (c
),
2816 "invalid depend kind in omp %<ordered%> %<depend%>");
2820 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2821 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2823 /* ordered simd must be closely nested inside of simd region,
2824 and simd region must not encounter constructs other than
2825 ordered simd, therefore ordered simd may be either orphaned,
2826 or ctx->stmt must be simd. The latter case is handled already
2830 error_at (gimple_location (stmt
),
2831 "%<ordered%> %<simd%> must be closely nested inside "
2836 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2837 switch (gimple_code (ctx
->stmt
))
2839 case GIMPLE_OMP_CRITICAL
:
2840 case GIMPLE_OMP_TASK
:
2841 case GIMPLE_OMP_ORDERED
:
2842 ordered_in_taskloop
:
2843 error_at (gimple_location (stmt
),
2844 "%<ordered%> region may not be closely nested inside "
2845 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2846 "%<taskloop%> region");
2848 case GIMPLE_OMP_FOR
:
2849 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2850 goto ordered_in_taskloop
;
2851 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2852 OMP_CLAUSE_ORDERED
) == NULL
)
2854 error_at (gimple_location (stmt
),
2855 "%<ordered%> region must be closely nested inside "
2856 "a loop region with an %<ordered%> clause");
2860 case GIMPLE_OMP_TARGET
:
2861 if (gimple_omp_target_kind (ctx
->stmt
)
2862 != GF_OMP_TARGET_KIND_REGION
)
2865 case GIMPLE_OMP_PARALLEL
:
2866 case GIMPLE_OMP_TEAMS
:
2867 error_at (gimple_location (stmt
),
2868 "%<ordered%> region must be closely nested inside "
2869 "a loop region with an %<ordered%> clause");
2875 case GIMPLE_OMP_CRITICAL
:
2878 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
2879 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2880 if (gomp_critical
*other_crit
2881 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
2882 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
2884 error_at (gimple_location (stmt
),
2885 "%<critical%> region may not be nested inside "
2886 "a %<critical%> region with the same name");
2891 case GIMPLE_OMP_TEAMS
:
2893 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
2894 || gimple_omp_target_kind (ctx
->stmt
) != GF_OMP_TARGET_KIND_REGION
)
2896 error_at (gimple_location (stmt
),
2897 "%<teams%> construct not closely nested inside of "
2898 "%<target%> construct");
2902 case GIMPLE_OMP_TARGET
:
2903 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2904 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2905 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2906 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2908 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2909 error_at (OMP_CLAUSE_LOCATION (c
),
2910 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2911 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2914 if (is_gimple_omp_offloaded (stmt
)
2915 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2917 error_at (gimple_location (stmt
),
2918 "OpenACC region inside of OpenACC routine, nested "
2919 "parallelism not supported yet");
2922 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2924 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
2926 if (is_gimple_omp (stmt
)
2927 && is_gimple_omp_oacc (stmt
)
2928 && is_gimple_omp (ctx
->stmt
))
2930 error_at (gimple_location (stmt
),
2931 "OpenACC construct inside of non-OpenACC region");
2937 const char *stmt_name
, *ctx_stmt_name
;
2938 switch (gimple_omp_target_kind (stmt
))
2940 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
2941 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
2942 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
2943 case GF_OMP_TARGET_KIND_ENTER_DATA
:
2944 stmt_name
= "target enter data"; break;
2945 case GF_OMP_TARGET_KIND_EXIT_DATA
:
2946 stmt_name
= "target exit data"; break;
2947 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
2948 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
2949 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
2950 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
2951 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
2952 stmt_name
= "enter/exit data"; break;
2953 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
2955 default: gcc_unreachable ();
2957 switch (gimple_omp_target_kind (ctx
->stmt
))
2959 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
2960 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
2961 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2962 ctx_stmt_name
= "parallel"; break;
2963 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2964 ctx_stmt_name
= "kernels"; break;
2965 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
2966 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
2967 ctx_stmt_name
= "host_data"; break;
2968 default: gcc_unreachable ();
2971 /* OpenACC/OpenMP mismatch? */
2972 if (is_gimple_omp_oacc (stmt
)
2973 != is_gimple_omp_oacc (ctx
->stmt
))
2975 error_at (gimple_location (stmt
),
2976 "%s %qs construct inside of %s %qs region",
2977 (is_gimple_omp_oacc (stmt
)
2978 ? "OpenACC" : "OpenMP"), stmt_name
,
2979 (is_gimple_omp_oacc (ctx
->stmt
)
2980 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
2983 if (is_gimple_omp_offloaded (ctx
->stmt
))
2985 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2986 if (is_gimple_omp_oacc (ctx
->stmt
))
2988 error_at (gimple_location (stmt
),
2989 "%qs construct inside of %qs region",
2990 stmt_name
, ctx_stmt_name
);
2995 warning_at (gimple_location (stmt
), 0,
2996 "%qs construct inside of %qs region",
2997 stmt_name
, ctx_stmt_name
);
3009 /* Helper function scan_omp.
3011 Callback for walk_tree or operators in walk_gimple_stmt used to
3012 scan for OMP directives in TP. */
3015 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3017 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3018 omp_context
*ctx
= (omp_context
*) wi
->info
;
3021 switch (TREE_CODE (t
))
3029 tree repl
= remap_decl (t
, &ctx
->cb
);
3030 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3036 if (ctx
&& TYPE_P (t
))
3037 *tp
= remap_type (t
, &ctx
->cb
);
3038 else if (!DECL_P (t
))
3043 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3044 if (tem
!= TREE_TYPE (t
))
3046 if (TREE_CODE (t
) == INTEGER_CST
)
3047 *tp
= wide_int_to_tree (tem
, t
);
3049 TREE_TYPE (t
) = tem
;
3059 /* Return true if FNDECL is a setjmp or a longjmp. */
3062 setjmp_or_longjmp_p (const_tree fndecl
)
3064 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3065 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SETJMP
3066 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LONGJMP
))
3069 tree declname
= DECL_NAME (fndecl
);
3072 const char *name
= IDENTIFIER_POINTER (declname
);
3073 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3077 /* Helper function for scan_omp.
3079 Callback for walk_gimple_stmt used to scan for OMP directives in
3080 the current statement in GSI. */
3083 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3084 struct walk_stmt_info
*wi
)
3086 gimple
*stmt
= gsi_stmt (*gsi
);
3087 omp_context
*ctx
= (omp_context
*) wi
->info
;
3089 if (gimple_has_location (stmt
))
3090 input_location
= gimple_location (stmt
);
3092 /* Check the nesting restrictions. */
3093 bool remove
= false;
3094 if (is_gimple_omp (stmt
))
3095 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3096 else if (is_gimple_call (stmt
))
3098 tree fndecl
= gimple_call_fndecl (stmt
);
3101 if (setjmp_or_longjmp_p (fndecl
)
3103 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3104 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3107 error_at (gimple_location (stmt
),
3108 "setjmp/longjmp inside simd construct");
3110 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3111 switch (DECL_FUNCTION_CODE (fndecl
))
3113 case BUILT_IN_GOMP_BARRIER
:
3114 case BUILT_IN_GOMP_CANCEL
:
3115 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3116 case BUILT_IN_GOMP_TASKYIELD
:
3117 case BUILT_IN_GOMP_TASKWAIT
:
3118 case BUILT_IN_GOMP_TASKGROUP_START
:
3119 case BUILT_IN_GOMP_TASKGROUP_END
:
3120 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3129 stmt
= gimple_build_nop ();
3130 gsi_replace (gsi
, stmt
, false);
3133 *handled_ops_p
= true;
3135 switch (gimple_code (stmt
))
3137 case GIMPLE_OMP_PARALLEL
:
3138 taskreg_nesting_level
++;
3139 scan_omp_parallel (gsi
, ctx
);
3140 taskreg_nesting_level
--;
3143 case GIMPLE_OMP_TASK
:
3144 taskreg_nesting_level
++;
3145 scan_omp_task (gsi
, ctx
);
3146 taskreg_nesting_level
--;
3149 case GIMPLE_OMP_FOR
:
3150 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3151 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3152 && omp_maybe_offloaded_ctx (ctx
)
3153 && omp_max_simt_vf ())
3154 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3156 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3159 case GIMPLE_OMP_SECTIONS
:
3160 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3163 case GIMPLE_OMP_SINGLE
:
3164 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3167 case GIMPLE_OMP_SECTION
:
3168 case GIMPLE_OMP_MASTER
:
3169 case GIMPLE_OMP_TASKGROUP
:
3170 case GIMPLE_OMP_ORDERED
:
3171 case GIMPLE_OMP_CRITICAL
:
3172 case GIMPLE_OMP_GRID_BODY
:
3173 ctx
= new_omp_context (stmt
, ctx
);
3174 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3177 case GIMPLE_OMP_TARGET
:
3178 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3181 case GIMPLE_OMP_TEAMS
:
3182 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3189 *handled_ops_p
= false;
3191 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3193 var
= DECL_CHAIN (var
))
3194 insert_decl_map (&ctx
->cb
, var
, var
);
3198 *handled_ops_p
= false;
3206 /* Scan all the statements starting at the current statement. CTX
3207 contains context information about the OMP directives and
3208 clauses found during the scan. */
3211 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3213 location_t saved_location
;
3214 struct walk_stmt_info wi
;
3216 memset (&wi
, 0, sizeof (wi
));
3218 wi
.want_locations
= true;
3220 saved_location
= input_location
;
3221 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3222 input_location
= saved_location
;
3225 /* Re-gimplification and code generation routines. */
3227 /* If a context was created for STMT when it was scanned, return it. */
3229 static omp_context
*
3230 maybe_lookup_ctx (gimple
*stmt
)
3233 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3234 return n
? (omp_context
*) n
->value
: NULL
;
3238 /* Find the mapping for DECL in CTX or the immediately enclosing
3239 context that has a mapping for DECL.
3241 If CTX is a nested parallel directive, we may have to use the decl
3242 mappings created in CTX's parent context. Suppose that we have the
3243 following parallel nesting (variable UIDs showed for clarity):
3246 #omp parallel shared(iD.1562) -> outer parallel
3247 iD.1562 = iD.1562 + 1;
3249 #omp parallel shared (iD.1562) -> inner parallel
3250 iD.1562 = iD.1562 - 1;
3252 Each parallel structure will create a distinct .omp_data_s structure
3253 for copying iD.1562 in/out of the directive:
3255 outer parallel .omp_data_s.1.i -> iD.1562
3256 inner parallel .omp_data_s.2.i -> iD.1562
3258 A shared variable mapping will produce a copy-out operation before
3259 the parallel directive and a copy-in operation after it. So, in
3260 this case we would have:
3263 .omp_data_o.1.i = iD.1562;
3264 #omp parallel shared(iD.1562) -> outer parallel
3265 .omp_data_i.1 = &.omp_data_o.1
3266 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3268 .omp_data_o.2.i = iD.1562; -> **
3269 #omp parallel shared(iD.1562) -> inner parallel
3270 .omp_data_i.2 = &.omp_data_o.2
3271 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3274 ** This is a problem. The symbol iD.1562 cannot be referenced
3275 inside the body of the outer parallel region. But since we are
3276 emitting this copy operation while expanding the inner parallel
3277 directive, we need to access the CTX structure of the outer
3278 parallel directive to get the correct mapping:
3280 .omp_data_o.2.i = .omp_data_i.1->i
3282 Since there may be other workshare or parallel directives enclosing
3283 the parallel directive, it may be necessary to walk up the context
3284 parent chain. This is not a problem in general because nested
3285 parallelism happens only rarely. */
3288 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3293 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3294 t
= maybe_lookup_decl (decl
, up
);
3296 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3298 return t
? t
: decl
;
3302 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3303 in outer contexts. */
3306 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3311 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3312 t
= maybe_lookup_decl (decl
, up
);
3314 return t
? t
: decl
;
3318 /* Construct the initialization value for reduction operation OP. */
3321 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3330 case TRUTH_ORIF_EXPR
:
3331 case TRUTH_XOR_EXPR
:
3333 return build_zero_cst (type
);
3336 case TRUTH_AND_EXPR
:
3337 case TRUTH_ANDIF_EXPR
:
3339 return fold_convert_loc (loc
, type
, integer_one_node
);
3342 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3345 if (SCALAR_FLOAT_TYPE_P (type
))
3347 REAL_VALUE_TYPE max
, min
;
3348 if (HONOR_INFINITIES (type
))
3351 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3354 real_maxval (&min
, 1, TYPE_MODE (type
));
3355 return build_real (type
, min
);
3357 else if (POINTER_TYPE_P (type
))
3360 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3361 return wide_int_to_tree (type
, min
);
3365 gcc_assert (INTEGRAL_TYPE_P (type
));
3366 return TYPE_MIN_VALUE (type
);
3370 if (SCALAR_FLOAT_TYPE_P (type
))
3372 REAL_VALUE_TYPE max
;
3373 if (HONOR_INFINITIES (type
))
3376 real_maxval (&max
, 0, TYPE_MODE (type
));
3377 return build_real (type
, max
);
3379 else if (POINTER_TYPE_P (type
))
3382 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3383 return wide_int_to_tree (type
, max
);
3387 gcc_assert (INTEGRAL_TYPE_P (type
));
3388 return TYPE_MAX_VALUE (type
);
3396 /* Construct the initialization value for reduction CLAUSE. */
3399 omp_reduction_init (tree clause
, tree type
)
3401 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3402 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3405 /* Return alignment to be assumed for var in CLAUSE, which should be
3406 OMP_CLAUSE_ALIGNED. */
3409 omp_clause_aligned_alignment (tree clause
)
3411 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3412 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3414 /* Otherwise return implementation defined alignment. */
3415 unsigned int al
= 1;
3416 machine_mode mode
, vmode
;
3417 int vs
= targetm
.vectorize
.autovectorize_vector_sizes ();
3419 vs
= 1 << floor_log2 (vs
);
3420 static enum mode_class classes
[]
3421 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3422 for (int i
= 0; i
< 4; i
+= 2)
3423 for (mode
= GET_CLASS_NARROWEST_MODE (classes
[i
]);
3425 mode
= GET_MODE_WIDER_MODE (mode
))
3427 vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3428 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3431 && GET_MODE_SIZE (vmode
) < vs
3432 && GET_MODE_2XWIDER_MODE (vmode
) != VOIDmode
)
3433 vmode
= GET_MODE_2XWIDER_MODE (vmode
);
3435 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3436 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3438 type
= build_vector_type (type
, GET_MODE_SIZE (vmode
)
3439 / GET_MODE_SIZE (mode
));
3440 if (TYPE_MODE (type
) != vmode
)
3442 if (TYPE_ALIGN_UNIT (type
) > al
)
3443 al
= TYPE_ALIGN_UNIT (type
);
3445 return build_int_cst (integer_type_node
, al
);
3448 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3452 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
, int &max_vf
,
3453 tree
&idx
, tree
&lane
, tree
&ivar
, tree
&lvar
)
3457 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3459 max_vf
= omp_max_simt_vf ();
3461 max_vf
= omp_max_vf ();
3464 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3465 OMP_CLAUSE_SAFELEN
);
3467 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c
)) != INTEGER_CST
3468 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c
)) != 1))
3470 else if (c
&& compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c
),
3472 max_vf
= tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c
));
3476 idx
= create_tmp_var (unsigned_type_node
);
3477 lane
= create_tmp_var (unsigned_type_node
);
3483 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), max_vf
);
3484 tree avar
= create_tmp_var_raw (atype
);
3485 if (TREE_ADDRESSABLE (new_var
))
3486 TREE_ADDRESSABLE (avar
) = 1;
3487 DECL_ATTRIBUTES (avar
)
3488 = tree_cons (get_identifier ("omp simd array"), NULL
,
3489 DECL_ATTRIBUTES (avar
));
3490 gimple_add_tmp_var (avar
);
3491 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, idx
,
3492 NULL_TREE
, NULL_TREE
);
3493 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, lane
,
3494 NULL_TREE
, NULL_TREE
);
3495 if (DECL_P (new_var
))
3497 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3498 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3503 /* Helper function of lower_rec_input_clauses. For a reference
3504 in simd reduction, add an underlying variable it will reference. */
3507 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3509 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3510 if (TREE_CONSTANT (z
))
3512 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3513 get_name (new_vard
));
3514 gimple_add_tmp_var (z
);
3515 TREE_ADDRESSABLE (z
) = 1;
3516 z
= build_fold_addr_expr_loc (loc
, z
);
3517 gimplify_assign (new_vard
, z
, ilist
);
3521 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3522 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3523 private variables. Initialization statements go in ILIST, while calls
3524 to destructors go in DLIST. */
3527 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3528 omp_context
*ctx
, struct omp_for_data
*fd
)
3530 tree c
, dtor
, copyin_seq
, x
, ptr
;
3531 bool copyin_by_ref
= false;
3532 bool lastprivate_firstprivate
= false;
3533 bool reduction_omp_orig_ref
= false;
3535 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3536 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3537 bool maybe_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3539 tree lane
= NULL_TREE
, idx
= NULL_TREE
;
3540 tree simt_lane
= NULL_TREE
;
3541 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
;
3542 gimple_seq llist
[3] = { };
3546 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3547 with data sharing clauses referencing variable sized vars. That
3548 is unnecessarily hard to support and very unlikely to result in
3549 vectorized code anyway. */
3551 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3552 switch (OMP_CLAUSE_CODE (c
))
3554 case OMP_CLAUSE_LINEAR
:
3555 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3558 case OMP_CLAUSE_PRIVATE
:
3559 case OMP_CLAUSE_FIRSTPRIVATE
:
3560 case OMP_CLAUSE_LASTPRIVATE
:
3561 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3564 case OMP_CLAUSE_REDUCTION
:
3565 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3566 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3573 /* Do all the fixed sized types in the first pass, and the variable sized
3574 types in the second pass. This makes sure that the scalar arguments to
3575 the variable sized types are processed before we use them in the
3576 variable sized operations. */
3577 for (pass
= 0; pass
< 2; ++pass
)
3579 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3581 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3584 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3588 case OMP_CLAUSE_PRIVATE
:
3589 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3592 case OMP_CLAUSE_SHARED
:
3593 /* Ignore shared directives in teams construct. */
3594 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3596 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3598 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3599 || is_global_var (OMP_CLAUSE_DECL (c
)));
3602 case OMP_CLAUSE_FIRSTPRIVATE
:
3603 case OMP_CLAUSE_COPYIN
:
3605 case OMP_CLAUSE_LINEAR
:
3606 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3607 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3608 lastprivate_firstprivate
= true;
3610 case OMP_CLAUSE_REDUCTION
:
3611 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3612 reduction_omp_orig_ref
= true;
3614 case OMP_CLAUSE__LOOPTEMP_
:
3615 /* Handle _looptemp_ clauses only on parallel/task. */
3619 case OMP_CLAUSE_LASTPRIVATE
:
3620 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3622 lastprivate_firstprivate
= true;
3623 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3626 /* Even without corresponding firstprivate, if
3627 decl is Fortran allocatable, it needs outer var
3630 && lang_hooks
.decls
.omp_private_outer_ref
3631 (OMP_CLAUSE_DECL (c
)))
3632 lastprivate_firstprivate
= true;
3634 case OMP_CLAUSE_ALIGNED
:
3637 var
= OMP_CLAUSE_DECL (c
);
3638 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3639 && !is_global_var (var
))
3641 new_var
= maybe_lookup_decl (var
, ctx
);
3642 if (new_var
== NULL_TREE
)
3643 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3644 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3645 tree alarg
= omp_clause_aligned_alignment (c
);
3646 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3647 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
3648 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3649 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
3650 gimplify_and_add (x
, ilist
);
3652 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
3653 && is_global_var (var
))
3655 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
3656 new_var
= lookup_decl (var
, ctx
);
3657 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3658 t
= build_fold_addr_expr_loc (clause_loc
, t
);
3659 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3660 tree alarg
= omp_clause_aligned_alignment (c
);
3661 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3662 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
3663 t
= fold_convert_loc (clause_loc
, ptype
, t
);
3664 x
= create_tmp_var (ptype
);
3665 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
3666 gimplify_and_add (t
, ilist
);
3667 t
= build_simple_mem_ref_loc (clause_loc
, x
);
3668 SET_DECL_VALUE_EXPR (new_var
, t
);
3669 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3676 new_var
= var
= OMP_CLAUSE_DECL (c
);
3677 if (c_kind
== OMP_CLAUSE_REDUCTION
&& TREE_CODE (var
) == MEM_REF
)
3679 var
= TREE_OPERAND (var
, 0);
3680 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
3681 var
= TREE_OPERAND (var
, 0);
3682 if (TREE_CODE (var
) == INDIRECT_REF
3683 || TREE_CODE (var
) == ADDR_EXPR
)
3684 var
= TREE_OPERAND (var
, 0);
3685 if (is_variable_sized (var
))
3687 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
3688 var
= DECL_VALUE_EXPR (var
);
3689 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
3690 var
= TREE_OPERAND (var
, 0);
3691 gcc_assert (DECL_P (var
));
3695 if (c_kind
!= OMP_CLAUSE_COPYIN
)
3696 new_var
= lookup_decl (var
, ctx
);
3698 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
3703 /* C/C++ array section reductions. */
3704 else if (c_kind
== OMP_CLAUSE_REDUCTION
3705 && var
!= OMP_CLAUSE_DECL (c
))
3710 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
3711 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
3712 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
3714 tree b
= TREE_OPERAND (orig_var
, 1);
3715 b
= maybe_lookup_decl (b
, ctx
);
3718 b
= TREE_OPERAND (orig_var
, 1);
3719 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
3721 if (integer_zerop (bias
))
3725 bias
= fold_convert_loc (clause_loc
,
3726 TREE_TYPE (b
), bias
);
3727 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3728 TREE_TYPE (b
), b
, bias
);
3730 orig_var
= TREE_OPERAND (orig_var
, 0);
3732 if (TREE_CODE (orig_var
) == INDIRECT_REF
3733 || TREE_CODE (orig_var
) == ADDR_EXPR
)
3734 orig_var
= TREE_OPERAND (orig_var
, 0);
3735 tree d
= OMP_CLAUSE_DECL (c
);
3736 tree type
= TREE_TYPE (d
);
3737 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
3738 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3739 const char *name
= get_name (orig_var
);
3740 if (TREE_CONSTANT (v
))
3742 x
= create_tmp_var_raw (type
, name
);
3743 gimple_add_tmp_var (x
);
3744 TREE_ADDRESSABLE (x
) = 1;
3745 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3750 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3751 tree t
= maybe_lookup_decl (v
, ctx
);
3755 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
3756 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
3757 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3759 build_int_cst (TREE_TYPE (v
), 1));
3760 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
3762 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3763 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
3764 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
3767 tree ptype
= build_pointer_type (TREE_TYPE (type
));
3768 x
= fold_convert_loc (clause_loc
, ptype
, x
);
3769 tree y
= create_tmp_var (ptype
, name
);
3770 gimplify_assign (y
, x
, ilist
);
3774 if (!integer_zerop (bias
))
3776 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3778 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3780 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
3781 pointer_sized_int_node
, yb
, bias
);
3782 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
3783 yb
= create_tmp_var (ptype
, name
);
3784 gimplify_assign (yb
, x
, ilist
);
3788 d
= TREE_OPERAND (d
, 0);
3789 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
3790 d
= TREE_OPERAND (d
, 0);
3791 if (TREE_CODE (d
) == ADDR_EXPR
)
3793 if (orig_var
!= var
)
3795 gcc_assert (is_variable_sized (orig_var
));
3796 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
3798 gimplify_assign (new_var
, x
, ilist
);
3799 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
3800 tree t
= build_fold_indirect_ref (new_var
);
3801 DECL_IGNORED_P (new_var
) = 0;
3802 TREE_THIS_NOTRAP (t
);
3803 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
3804 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
3808 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
3809 build_int_cst (ptype
, 0));
3810 SET_DECL_VALUE_EXPR (new_var
, x
);
3811 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3816 gcc_assert (orig_var
== var
);
3817 if (TREE_CODE (d
) == INDIRECT_REF
)
3819 x
= create_tmp_var (ptype
, name
);
3820 TREE_ADDRESSABLE (x
) = 1;
3821 gimplify_assign (x
, yb
, ilist
);
3822 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3824 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3825 gimplify_assign (new_var
, x
, ilist
);
3827 tree y1
= create_tmp_var (ptype
, NULL
);
3828 gimplify_assign (y1
, y
, ilist
);
3829 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
3830 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
3831 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
3832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
3834 y2
= create_tmp_var (ptype
, NULL
);
3835 gimplify_assign (y2
, y
, ilist
);
3836 tree ref
= build_outer_var_ref (var
, ctx
);
3837 /* For ref build_outer_var_ref already performs this. */
3838 if (TREE_CODE (d
) == INDIRECT_REF
)
3839 gcc_assert (omp_is_reference (var
));
3840 else if (TREE_CODE (d
) == ADDR_EXPR
)
3841 ref
= build_fold_addr_expr (ref
);
3842 else if (omp_is_reference (var
))
3843 ref
= build_fold_addr_expr (ref
);
3844 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
3845 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
3846 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3848 y3
= create_tmp_var (ptype
, NULL
);
3849 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
3853 y4
= create_tmp_var (ptype
, NULL
);
3854 gimplify_assign (y4
, ref
, dlist
);
3857 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
3858 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
3859 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
3860 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
3861 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
3864 i2
= create_tmp_var (TREE_TYPE (v
), NULL
);
3865 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
3866 body2
= create_artificial_label (UNKNOWN_LOCATION
);
3867 end2
= create_artificial_label (UNKNOWN_LOCATION
);
3868 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
3870 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
3872 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
3873 tree decl_placeholder
3874 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
3875 SET_DECL_VALUE_EXPR (decl_placeholder
,
3876 build_simple_mem_ref (y1
));
3877 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
3878 SET_DECL_VALUE_EXPR (placeholder
,
3879 y3
? build_simple_mem_ref (y3
)
3881 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
3882 x
= lang_hooks
.decls
.omp_clause_default_ctor
3883 (c
, build_simple_mem_ref (y1
),
3884 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
3886 gimplify_and_add (x
, ilist
);
3887 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
3889 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
3890 lower_omp (&tseq
, ctx
);
3891 gimple_seq_add_seq (ilist
, tseq
);
3893 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
3896 SET_DECL_VALUE_EXPR (decl_placeholder
,
3897 build_simple_mem_ref (y2
));
3898 SET_DECL_VALUE_EXPR (placeholder
,
3899 build_simple_mem_ref (y4
));
3900 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
3901 lower_omp (&tseq
, ctx
);
3902 gimple_seq_add_seq (dlist
, tseq
);
3903 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
3905 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
3906 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
3907 x
= lang_hooks
.decls
.omp_clause_dtor
3908 (c
, build_simple_mem_ref (y2
));
3911 gimple_seq tseq
= NULL
;
3913 gimplify_stmt (&dtor
, &tseq
);
3914 gimple_seq_add_seq (dlist
, tseq
);
3919 x
= omp_reduction_init (c
, TREE_TYPE (type
));
3920 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
3922 /* reduction(-:var) sums up the partial results, so it
3923 acts identically to reduction(+:var). */
3924 if (code
== MINUS_EXPR
)
3927 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
3930 x
= build2 (code
, TREE_TYPE (type
),
3931 build_simple_mem_ref (y4
),
3932 build_simple_mem_ref (y2
));
3933 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
3937 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
3938 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3939 gimple_seq_add_stmt (ilist
, g
);
3942 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
3943 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3944 gimple_seq_add_stmt (ilist
, g
);
3946 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
3947 build_int_cst (TREE_TYPE (i
), 1));
3948 gimple_seq_add_stmt (ilist
, g
);
3949 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
3950 gimple_seq_add_stmt (ilist
, g
);
3951 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
3954 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
3955 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3956 gimple_seq_add_stmt (dlist
, g
);
3959 g
= gimple_build_assign
3960 (y4
, POINTER_PLUS_EXPR
, y4
,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3962 gimple_seq_add_stmt (dlist
, g
);
3964 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
3965 build_int_cst (TREE_TYPE (i2
), 1));
3966 gimple_seq_add_stmt (dlist
, g
);
3967 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
3968 gimple_seq_add_stmt (dlist
, g
);
3969 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
3973 else if (is_variable_sized (var
))
3975 /* For variable sized types, we need to allocate the
3976 actual storage here. Call alloca and store the
3977 result in the pointer decl that we created elsewhere. */
3981 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
3986 ptr
= DECL_VALUE_EXPR (new_var
);
3987 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
3988 ptr
= TREE_OPERAND (ptr
, 0);
3989 gcc_assert (DECL_P (ptr
));
3990 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
3992 /* void *tmp = __builtin_alloca */
3993 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3994 stmt
= gimple_build_call (atmp
, 2, x
,
3995 size_int (DECL_ALIGN (var
)));
3996 tmp
= create_tmp_var_raw (ptr_type_node
);
3997 gimple_add_tmp_var (tmp
);
3998 gimple_call_set_lhs (stmt
, tmp
);
4000 gimple_seq_add_stmt (ilist
, stmt
);
4002 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4003 gimplify_assign (ptr
, x
, ilist
);
4006 else if (omp_is_reference (var
))
4008 /* For references that are being privatized for Fortran,
4009 allocate new backing storage for the new pointer
4010 variable. This allows us to avoid changing all the
4011 code that expects a pointer to something that expects
4012 a direct variable. */
4016 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4017 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4019 x
= build_receiver_ref (var
, false, ctx
);
4020 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4022 else if (TREE_CONSTANT (x
))
4024 /* For reduction in SIMD loop, defer adding the
4025 initialization of the reference, because if we decide
4026 to use SIMD array for it, the initilization could cause
4028 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4032 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4034 gimple_add_tmp_var (x
);
4035 TREE_ADDRESSABLE (x
) = 1;
4036 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4042 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4043 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4044 tree al
= size_int (TYPE_ALIGN (rtype
));
4045 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4050 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4051 gimplify_assign (new_var
, x
, ilist
);
4054 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4056 else if (c_kind
== OMP_CLAUSE_REDUCTION
4057 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4065 switch (OMP_CLAUSE_CODE (c
))
4067 case OMP_CLAUSE_SHARED
:
4068 /* Ignore shared directives in teams construct. */
4069 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
4071 /* Shared global vars are just accessed directly. */
4072 if (is_global_var (new_var
))
4074 /* For taskloop firstprivate/lastprivate, represented
4075 as firstprivate and shared clause on the task, new_var
4076 is the firstprivate var. */
4077 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4079 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4080 needs to be delayed until after fixup_child_record_type so
4081 that we get the correct type during the dereference. */
4082 by_ref
= use_pointer_for_field (var
, ctx
);
4083 x
= build_receiver_ref (var
, by_ref
, ctx
);
4084 SET_DECL_VALUE_EXPR (new_var
, x
);
4085 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4087 /* ??? If VAR is not passed by reference, and the variable
4088 hasn't been initialized yet, then we'll get a warning for
4089 the store into the omp_data_s structure. Ideally, we'd be
4090 able to notice this and not store anything at all, but
4091 we're generating code too early. Suppress the warning. */
4093 TREE_NO_WARNING (var
) = 1;
4096 case OMP_CLAUSE_LASTPRIVATE
:
4097 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4101 case OMP_CLAUSE_PRIVATE
:
4102 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4103 x
= build_outer_var_ref (var
, ctx
);
4104 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4106 if (is_task_ctx (ctx
))
4107 x
= build_receiver_ref (var
, false, ctx
);
4109 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4115 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4116 (c
, unshare_expr (new_var
), x
);
4119 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4120 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4121 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4122 && lower_rec_simd_input_clauses (new_var
, ctx
, max_vf
,
4123 idx
, lane
, ivar
, lvar
))
4126 x
= lang_hooks
.decls
.omp_clause_default_ctor
4127 (c
, unshare_expr (ivar
), x
);
4129 gimplify_and_add (x
, &llist
[0]);
4132 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4135 gimple_seq tseq
= NULL
;
4138 gimplify_stmt (&dtor
, &tseq
);
4139 gimple_seq_add_seq (&llist
[1], tseq
);
4146 gimplify_and_add (nx
, ilist
);
4150 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4153 gimple_seq tseq
= NULL
;
4156 gimplify_stmt (&dtor
, &tseq
);
4157 gimple_seq_add_seq (dlist
, tseq
);
4161 case OMP_CLAUSE_LINEAR
:
4162 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4163 goto do_firstprivate
;
4164 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4167 x
= build_outer_var_ref (var
, ctx
);
4170 case OMP_CLAUSE_FIRSTPRIVATE
:
4171 if (is_task_ctx (ctx
))
4173 if (omp_is_reference (var
) || is_variable_sized (var
))
4175 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4177 || use_pointer_for_field (var
, NULL
))
4179 x
= build_receiver_ref (var
, false, ctx
);
4180 SET_DECL_VALUE_EXPR (new_var
, x
);
4181 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4186 x
= build_outer_var_ref (var
, ctx
);
4189 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4190 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4192 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4193 tree stept
= TREE_TYPE (t
);
4194 tree ct
= omp_find_clause (clauses
,
4195 OMP_CLAUSE__LOOPTEMP_
);
4197 tree l
= OMP_CLAUSE_DECL (ct
);
4198 tree n1
= fd
->loop
.n1
;
4199 tree step
= fd
->loop
.step
;
4200 tree itype
= TREE_TYPE (l
);
4201 if (POINTER_TYPE_P (itype
))
4202 itype
= signed_type_for (itype
);
4203 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4204 if (TYPE_UNSIGNED (itype
)
4205 && fd
->loop
.cond_code
== GT_EXPR
)
4206 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4207 fold_build1 (NEGATE_EXPR
, itype
, l
),
4208 fold_build1 (NEGATE_EXPR
,
4211 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4212 t
= fold_build2 (MULT_EXPR
, stept
,
4213 fold_convert (stept
, l
), t
);
4215 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4217 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4219 gimplify_and_add (x
, ilist
);
4223 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4224 x
= fold_build2 (POINTER_PLUS_EXPR
,
4225 TREE_TYPE (x
), x
, t
);
4227 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4230 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4231 || TREE_ADDRESSABLE (new_var
))
4232 && lower_rec_simd_input_clauses (new_var
, ctx
, max_vf
,
4233 idx
, lane
, ivar
, lvar
))
4235 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4237 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4238 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4239 gimplify_and_add (x
, ilist
);
4240 gimple_stmt_iterator gsi
4241 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4243 = gimple_build_assign (unshare_expr (lvar
), iv
);
4244 gsi_insert_before_without_update (&gsi
, g
,
4246 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4247 enum tree_code code
= PLUS_EXPR
;
4248 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4249 code
= POINTER_PLUS_EXPR
;
4250 g
= gimple_build_assign (iv
, code
, iv
, t
);
4251 gsi_insert_before_without_update (&gsi
, g
,
4255 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4256 (c
, unshare_expr (ivar
), x
);
4257 gimplify_and_add (x
, &llist
[0]);
4258 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4261 gimple_seq tseq
= NULL
;
4264 gimplify_stmt (&dtor
, &tseq
);
4265 gimple_seq_add_seq (&llist
[1], tseq
);
4270 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4271 (c
, unshare_expr (new_var
), x
);
4272 gimplify_and_add (x
, ilist
);
4275 case OMP_CLAUSE__LOOPTEMP_
:
4276 gcc_assert (is_taskreg_ctx (ctx
));
4277 x
= build_outer_var_ref (var
, ctx
);
4278 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4279 gimplify_and_add (x
, ilist
);
4282 case OMP_CLAUSE_COPYIN
:
4283 by_ref
= use_pointer_for_field (var
, NULL
);
4284 x
= build_receiver_ref (var
, by_ref
, ctx
);
4285 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4286 append_to_statement_list (x
, ©in_seq
);
4287 copyin_by_ref
|= by_ref
;
4290 case OMP_CLAUSE_REDUCTION
:
4291 /* OpenACC reductions are initialized using the
4292 GOACC_REDUCTION internal function. */
4293 if (is_gimple_omp_oacc (ctx
->stmt
))
4295 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4297 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4299 x
= build_outer_var_ref (var
, ctx
);
4301 if (omp_is_reference (var
)
4302 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
4304 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4305 SET_DECL_VALUE_EXPR (placeholder
, x
);
4306 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4307 tree new_vard
= new_var
;
4308 if (omp_is_reference (var
))
4310 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4311 new_vard
= TREE_OPERAND (new_var
, 0);
4312 gcc_assert (DECL_P (new_vard
));
4315 && lower_rec_simd_input_clauses (new_var
, ctx
, max_vf
,
4316 idx
, lane
, ivar
, lvar
))
4318 if (new_vard
== new_var
)
4320 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4321 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4325 SET_DECL_VALUE_EXPR (new_vard
,
4326 build_fold_addr_expr (ivar
));
4327 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4329 x
= lang_hooks
.decls
.omp_clause_default_ctor
4330 (c
, unshare_expr (ivar
),
4331 build_outer_var_ref (var
, ctx
));
4333 gimplify_and_add (x
, &llist
[0]);
4334 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4336 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4337 lower_omp (&tseq
, ctx
);
4338 gimple_seq_add_seq (&llist
[0], tseq
);
4340 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4341 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4342 lower_omp (&tseq
, ctx
);
4343 gimple_seq_add_seq (&llist
[1], tseq
);
4344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4345 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4346 if (new_vard
== new_var
)
4347 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4349 SET_DECL_VALUE_EXPR (new_vard
,
4350 build_fold_addr_expr (lvar
));
4351 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4356 gimplify_stmt (&dtor
, &tseq
);
4357 gimple_seq_add_seq (&llist
[1], tseq
);
4361 /* If this is a reference to constant size reduction var
4362 with placeholder, we haven't emitted the initializer
4363 for it because it is undesirable if SIMD arrays are used.
4364 But if they aren't used, we need to emit the deferred
4365 initialization now. */
4366 else if (omp_is_reference (var
) && is_simd
)
4367 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4368 x
= lang_hooks
.decls
.omp_clause_default_ctor
4369 (c
, unshare_expr (new_var
),
4370 build_outer_var_ref (var
, ctx
));
4372 gimplify_and_add (x
, ilist
);
4373 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4375 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4376 lower_omp (&tseq
, ctx
);
4377 gimple_seq_add_seq (ilist
, tseq
);
4379 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4382 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4383 lower_omp (&tseq
, ctx
);
4384 gimple_seq_add_seq (dlist
, tseq
);
4385 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4387 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4392 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
4393 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
4394 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4396 /* reduction(-:var) sums up the partial results, so it
4397 acts identically to reduction(+:var). */
4398 if (code
== MINUS_EXPR
)
4401 tree new_vard
= new_var
;
4402 if (is_simd
&& omp_is_reference (var
))
4404 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4405 new_vard
= TREE_OPERAND (new_var
, 0);
4406 gcc_assert (DECL_P (new_vard
));
4409 && lower_rec_simd_input_clauses (new_var
, ctx
, max_vf
,
4410 idx
, lane
, ivar
, lvar
))
4412 tree ref
= build_outer_var_ref (var
, ctx
);
4414 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
4419 simt_lane
= create_tmp_var (unsigned_type_node
);
4420 x
= build_call_expr_internal_loc
4421 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
4422 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
4423 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
4424 gimplify_assign (ivar
, x
, &llist
[2]);
4426 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
4427 ref
= build_outer_var_ref (var
, ctx
);
4428 gimplify_assign (ref
, x
, &llist
[1]);
4430 if (new_vard
!= new_var
)
4432 SET_DECL_VALUE_EXPR (new_vard
,
4433 build_fold_addr_expr (lvar
));
4434 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4439 if (omp_is_reference (var
) && is_simd
)
4440 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4441 gimplify_assign (new_var
, x
, ilist
);
4444 tree ref
= build_outer_var_ref (var
, ctx
);
4446 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
4447 ref
= build_outer_var_ref (var
, ctx
);
4448 gimplify_assign (ref
, x
, dlist
);
4462 tree uid
= create_tmp_var (ptr_type_node
, "simduid");
4463 /* Don't want uninit warnings on simduid, it is always uninitialized,
4464 but we use it not for the value, but for the DECL_UID only. */
4465 TREE_NO_WARNING (uid
) = 1;
4467 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
4468 gimple_call_set_lhs (g
, lane
);
4469 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4470 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
4471 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
4472 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
4473 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4474 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4475 g
= gimple_build_assign (lane
, INTEGER_CST
,
4476 build_int_cst (unsigned_type_node
, 0));
4477 gimple_seq_add_stmt (ilist
, g
);
4478 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4481 tree simt_vf
= create_tmp_var (unsigned_type_node
);
4482 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
4483 gimple_call_set_lhs (g
, simt_vf
);
4484 gimple_seq_add_stmt (dlist
, g
);
4486 tree t
= build_int_cst (unsigned_type_node
, 1);
4487 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
4488 gimple_seq_add_stmt (dlist
, g
);
4490 t
= build_int_cst (unsigned_type_node
, 0);
4491 g
= gimple_build_assign (idx
, INTEGER_CST
, t
);
4492 gimple_seq_add_stmt (dlist
, g
);
4494 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4495 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4496 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4497 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
4498 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
4500 gimple_seq_add_seq (dlist
, llist
[2]);
4502 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
4503 gimple_seq_add_stmt (dlist
, g
);
4505 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
4506 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
4507 gimple_seq_add_stmt (dlist
, g
);
4509 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
4511 for (int i
= 0; i
< 2; i
++)
4514 tree vf
= create_tmp_var (unsigned_type_node
);
4515 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
4516 gimple_call_set_lhs (g
, vf
);
4517 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
4518 gimple_seq_add_stmt (seq
, g
);
4519 tree t
= build_int_cst (unsigned_type_node
, 0);
4520 g
= gimple_build_assign (idx
, INTEGER_CST
, t
);
4521 gimple_seq_add_stmt (seq
, g
);
4522 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4523 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4524 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4525 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
4526 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
4527 gimple_seq_add_seq (seq
, llist
[i
]);
4528 t
= build_int_cst (unsigned_type_node
, 1);
4529 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx
, t
);
4530 gimple_seq_add_stmt (seq
, g
);
4531 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
4532 g
= gimple_build_cond (LT_EXPR
, idx
, vf
, body
, end
);
4533 gimple_seq_add_stmt (seq
, g
);
4534 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
4538 /* The copyin sequence is not to be executed by the main thread, since
4539 that would result in self-copies. Perhaps not visible to scalars,
4540 but it certainly is to C++ operator=. */
4543 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
4545 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
4546 build_int_cst (TREE_TYPE (x
), 0));
4547 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
4548 gimplify_and_add (x
, ilist
);
4551 /* If any copyin variable is passed by reference, we must ensure the
4552 master thread doesn't modify it before it is copied over in all
4553 threads. Similarly for variables in both firstprivate and
4554 lastprivate clauses we need to ensure the lastprivate copying
4555 happens after firstprivate copying in all threads. And similarly
4556 for UDRs if initializer expression refers to omp_orig. */
4557 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
4559 /* Don't add any barrier for #pragma omp simd or
4560 #pragma omp distribute. */
4561 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
4562 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
)
4563 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
4566 /* If max_vf is non-zero, then we can use only a vectorization factor
4567 up to the max_vf we chose. So stick it into the safelen clause. */
4570 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4571 OMP_CLAUSE_SAFELEN
);
4573 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c
)) == INTEGER_CST
4574 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c
),
4577 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
4578 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
4580 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4581 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4587 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4588 both parallel and workshare constructs. PREDICATE may be NULL if it's
4592 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
4595 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
4596 bool par_clauses
= false;
4597 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
4599 /* Early exit if there are no lastprivate or linear clauses. */
4600 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
4601 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
4602 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
4603 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
4605 if (clauses
== NULL
)
4607 /* If this was a workshare clause, see if it had been combined
4608 with its parallel. In that case, look for the clauses on the
4609 parallel statement itself. */
4610 if (is_parallel_ctx (ctx
))
4614 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4617 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4618 OMP_CLAUSE_LASTPRIVATE
);
4619 if (clauses
== NULL
)
4624 bool maybe_simt
= false;
4625 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4626 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
4628 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
4629 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
4631 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
4637 tree label_true
, arm1
, arm2
;
4638 enum tree_code pred_code
= TREE_CODE (predicate
);
4640 label
= create_artificial_label (UNKNOWN_LOCATION
);
4641 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4642 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
4644 arm1
= TREE_OPERAND (predicate
, 0);
4645 arm2
= TREE_OPERAND (predicate
, 1);
4646 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4647 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4652 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4653 arm2
= boolean_false_node
;
4654 pred_code
= NE_EXPR
;
4658 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
4659 c
= fold_convert (integer_type_node
, c
);
4660 simtcond
= create_tmp_var (integer_type_node
);
4661 gimplify_assign (simtcond
, c
, stmt_list
);
4662 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
4664 c
= create_tmp_var (integer_type_node
);
4665 gimple_call_set_lhs (g
, c
);
4666 gimple_seq_add_stmt (stmt_list
, g
);
4667 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
4671 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
4672 gimple_seq_add_stmt (stmt_list
, stmt
);
4673 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
4676 for (c
= clauses
; c
;)
4679 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4681 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4682 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4683 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
4685 var
= OMP_CLAUSE_DECL (c
);
4686 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4687 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
4688 && is_taskloop_ctx (ctx
))
4690 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
4691 new_var
= lookup_decl (var
, ctx
->outer
);
4695 new_var
= lookup_decl (var
, ctx
);
4696 /* Avoid uninitialized warnings for lastprivate and
4697 for linear iterators. */
4699 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4700 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
4701 TREE_NO_WARNING (new_var
) = 1;
4704 if (simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
4706 tree val
= DECL_VALUE_EXPR (new_var
);
4707 if (TREE_CODE (val
) == ARRAY_REF
4708 && VAR_P (TREE_OPERAND (val
, 0))
4709 && lookup_attribute ("omp simd array",
4710 DECL_ATTRIBUTES (TREE_OPERAND (val
,
4713 if (lastlane
== NULL
)
4715 lastlane
= create_tmp_var (unsigned_type_node
);
4717 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
4719 TREE_OPERAND (val
, 1));
4720 gimple_call_set_lhs (g
, lastlane
);
4721 gimple_seq_add_stmt (stmt_list
, g
);
4723 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
4724 TREE_OPERAND (val
, 0), lastlane
,
4725 NULL_TREE
, NULL_TREE
);
4729 if (simtlast
== NULL
)
4731 simtlast
= create_tmp_var (unsigned_type_node
);
4732 g
= gimple_build_call_internal
4733 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
4734 gimple_call_set_lhs (g
, simtlast
);
4735 gimple_seq_add_stmt (stmt_list
, g
);
4737 x
= build_call_expr_internal_loc
4738 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
4739 TREE_TYPE (new_var
), 2, new_var
, simtlast
);
4740 new_var
= unshare_expr (new_var
);
4741 gimplify_assign (new_var
, x
, stmt_list
);
4742 new_var
= unshare_expr (new_var
);
4747 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4748 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
4750 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
4751 gimple_seq_add_seq (stmt_list
,
4752 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
4753 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
4755 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4756 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
4758 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
4759 gimple_seq_add_seq (stmt_list
,
4760 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
4761 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
4765 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4766 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
4768 gcc_checking_assert (is_taskloop_ctx (ctx
));
4769 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
4771 if (is_global_var (ovar
))
4775 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
4776 if (omp_is_reference (var
))
4777 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4778 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
4779 gimplify_and_add (x
, stmt_list
);
4781 c
= OMP_CLAUSE_CHAIN (c
);
4782 if (c
== NULL
&& !par_clauses
)
4784 /* If this was a workshare clause, see if it had been combined
4785 with its parallel. In that case, continue looking for the
4786 clauses also on the parallel statement itself. */
4787 if (is_parallel_ctx (ctx
))
4791 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4794 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4795 OMP_CLAUSE_LASTPRIVATE
);
4801 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
4804 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4805 (which might be a placeholder). INNER is true if this is an inner
4806 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4807 join markers. Generate the before-loop forking sequence in
4808 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4809 general form of these sequences is
4811 GOACC_REDUCTION_SETUP
4813 GOACC_REDUCTION_INIT
4815 GOACC_REDUCTION_FINI
4817 GOACC_REDUCTION_TEARDOWN. */
4820 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
4821 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
4822 gimple_seq
*join_seq
, omp_context
*ctx
)
4824 gimple_seq before_fork
= NULL
;
4825 gimple_seq after_fork
= NULL
;
4826 gimple_seq before_join
= NULL
;
4827 gimple_seq after_join
= NULL
;
4828 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
4829 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
4830 unsigned offset
= 0;
4832 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4833 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4835 tree orig
= OMP_CLAUSE_DECL (c
);
4836 tree var
= maybe_lookup_decl (orig
, ctx
);
4837 tree ref_to_res
= NULL_TREE
;
4838 tree incoming
, outgoing
, v1
, v2
, v3
;
4839 bool is_private
= false;
4841 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
4842 if (rcode
== MINUS_EXPR
)
4844 else if (rcode
== TRUTH_ANDIF_EXPR
)
4845 rcode
= BIT_AND_EXPR
;
4846 else if (rcode
== TRUTH_ORIF_EXPR
)
4847 rcode
= BIT_IOR_EXPR
;
4848 tree op
= build_int_cst (unsigned_type_node
, rcode
);
4853 incoming
= outgoing
= var
;
4857 /* See if an outer construct also reduces this variable. */
4858 omp_context
*outer
= ctx
;
4860 while (omp_context
*probe
= outer
->outer
)
4862 enum gimple_code type
= gimple_code (probe
->stmt
);
4867 case GIMPLE_OMP_FOR
:
4868 cls
= gimple_omp_for_clauses (probe
->stmt
);
4871 case GIMPLE_OMP_TARGET
:
4872 if (gimple_omp_target_kind (probe
->stmt
)
4873 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
4876 cls
= gimple_omp_target_clauses (probe
->stmt
);
4884 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
4885 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
4886 && orig
== OMP_CLAUSE_DECL (cls
))
4888 incoming
= outgoing
= lookup_decl (orig
, probe
);
4889 goto has_outer_reduction
;
4891 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
4892 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
4893 && orig
== OMP_CLAUSE_DECL (cls
))
4901 /* This is the outermost construct with this reduction,
4902 see if there's a mapping for it. */
4903 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
4904 && maybe_lookup_field (orig
, outer
) && !is_private
)
4906 ref_to_res
= build_receiver_ref (orig
, false, outer
);
4907 if (omp_is_reference (orig
))
4908 ref_to_res
= build_simple_mem_ref (ref_to_res
);
4910 tree type
= TREE_TYPE (var
);
4911 if (POINTER_TYPE_P (type
))
4912 type
= TREE_TYPE (type
);
4915 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
4919 /* Try to look at enclosing contexts for reduction var,
4920 use original if no mapping found. */
4922 omp_context
*c
= ctx
->outer
;
4925 t
= maybe_lookup_decl (orig
, c
);
4928 incoming
= outgoing
= (t
? t
: orig
);
4931 has_outer_reduction
:;
4935 ref_to_res
= integer_zero_node
;
4937 if (omp_is_reference (orig
))
4939 tree type
= TREE_TYPE (var
);
4940 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
4944 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
4945 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
4948 v1
= create_tmp_var (type
, id
);
4949 v2
= create_tmp_var (type
, id
);
4950 v3
= create_tmp_var (type
, id
);
4952 gimplify_assign (v1
, var
, fork_seq
);
4953 gimplify_assign (v2
, var
, fork_seq
);
4954 gimplify_assign (v3
, var
, fork_seq
);
4956 var
= build_simple_mem_ref (var
);
4957 v1
= build_simple_mem_ref (v1
);
4958 v2
= build_simple_mem_ref (v2
);
4959 v3
= build_simple_mem_ref (v3
);
4960 outgoing
= build_simple_mem_ref (outgoing
);
4962 if (!TREE_CONSTANT (incoming
))
4963 incoming
= build_simple_mem_ref (incoming
);
4968 /* Determine position in reduction buffer, which may be used
4970 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (var
));
4971 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
4972 offset
= (offset
+ align
- 1) & ~(align
- 1);
4973 tree off
= build_int_cst (sizetype
, offset
);
4974 offset
+= GET_MODE_SIZE (mode
);
4978 init_code
= build_int_cst (integer_type_node
,
4979 IFN_GOACC_REDUCTION_INIT
);
4980 fini_code
= build_int_cst (integer_type_node
,
4981 IFN_GOACC_REDUCTION_FINI
);
4982 setup_code
= build_int_cst (integer_type_node
,
4983 IFN_GOACC_REDUCTION_SETUP
);
4984 teardown_code
= build_int_cst (integer_type_node
,
4985 IFN_GOACC_REDUCTION_TEARDOWN
);
4989 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
4990 TREE_TYPE (var
), 6, setup_code
,
4991 unshare_expr (ref_to_res
),
4992 incoming
, level
, op
, off
);
4994 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
4995 TREE_TYPE (var
), 6, init_code
,
4996 unshare_expr (ref_to_res
),
4997 v1
, level
, op
, off
);
4999 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5000 TREE_TYPE (var
), 6, fini_code
,
5001 unshare_expr (ref_to_res
),
5002 v2
, level
, op
, off
);
5004 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5005 TREE_TYPE (var
), 6, teardown_code
,
5006 ref_to_res
, v3
, level
, op
, off
);
5008 gimplify_assign (v1
, setup_call
, &before_fork
);
5009 gimplify_assign (v2
, init_call
, &after_fork
);
5010 gimplify_assign (v3
, fini_call
, &before_join
);
5011 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5014 /* Now stitch things together. */
5015 gimple_seq_add_seq (fork_seq
, before_fork
);
5017 gimple_seq_add_stmt (fork_seq
, fork
);
5018 gimple_seq_add_seq (fork_seq
, after_fork
);
5020 gimple_seq_add_seq (join_seq
, before_join
);
5022 gimple_seq_add_stmt (join_seq
, join
);
5023 gimple_seq_add_seq (join_seq
, after_join
);
5026 /* Generate code to implement the REDUCTION clauses. */
5029 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5031 gimple_seq sub_seq
= NULL
;
5036 /* OpenACC loop reductions are handled elsewhere. */
5037 if (is_gimple_omp_oacc (ctx
->stmt
))
5040 /* SIMD reductions are handled in lower_rec_input_clauses. */
5041 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5042 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5045 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5046 update in that case, otherwise use a lock. */
5047 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5048 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5050 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5051 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5053 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5063 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5065 tree var
, ref
, new_var
, orig_var
;
5066 enum tree_code code
;
5067 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5069 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5072 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5073 if (TREE_CODE (var
) == MEM_REF
)
5075 var
= TREE_OPERAND (var
, 0);
5076 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5077 var
= TREE_OPERAND (var
, 0);
5078 if (TREE_CODE (var
) == INDIRECT_REF
5079 || TREE_CODE (var
) == ADDR_EXPR
)
5080 var
= TREE_OPERAND (var
, 0);
5082 if (is_variable_sized (var
))
5084 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5085 var
= DECL_VALUE_EXPR (var
);
5086 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5087 var
= TREE_OPERAND (var
, 0);
5088 gcc_assert (DECL_P (var
));
5091 new_var
= lookup_decl (var
, ctx
);
5092 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5093 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5094 ref
= build_outer_var_ref (var
, ctx
);
5095 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5097 /* reduction(-:var) sums up the partial results, so it acts
5098 identically to reduction(+:var). */
5099 if (code
== MINUS_EXPR
)
5104 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5106 addr
= save_expr (addr
);
5107 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5108 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5109 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5110 gimplify_and_add (x
, stmt_seqp
);
5113 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5115 tree d
= OMP_CLAUSE_DECL (c
);
5116 tree type
= TREE_TYPE (d
);
5117 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5118 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
5119 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5120 tree bias
= TREE_OPERAND (d
, 1);
5121 d
= TREE_OPERAND (d
, 0);
5122 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5124 tree b
= TREE_OPERAND (d
, 1);
5125 b
= maybe_lookup_decl (b
, ctx
);
5128 b
= TREE_OPERAND (d
, 1);
5129 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5131 if (integer_zerop (bias
))
5135 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5136 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5137 TREE_TYPE (b
), b
, bias
);
5139 d
= TREE_OPERAND (d
, 0);
5141 /* For ref build_outer_var_ref already performs this, so
5142 only new_var needs a dereference. */
5143 if (TREE_CODE (d
) == INDIRECT_REF
)
5145 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5146 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5148 else if (TREE_CODE (d
) == ADDR_EXPR
)
5150 if (orig_var
== var
)
5152 new_var
= build_fold_addr_expr (new_var
);
5153 ref
= build_fold_addr_expr (ref
);
5158 gcc_assert (orig_var
== var
);
5159 if (omp_is_reference (var
))
5160 ref
= build_fold_addr_expr (ref
);
5164 tree t
= maybe_lookup_decl (v
, ctx
);
5168 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5169 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5171 if (!integer_zerop (bias
))
5173 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5174 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5175 TREE_TYPE (new_var
), new_var
,
5176 unshare_expr (bias
));
5177 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5178 TREE_TYPE (ref
), ref
, bias
);
5180 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5181 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5182 tree m
= create_tmp_var (ptype
, NULL
);
5183 gimplify_assign (m
, new_var
, stmt_seqp
);
5185 m
= create_tmp_var (ptype
, NULL
);
5186 gimplify_assign (m
, ref
, stmt_seqp
);
5188 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5189 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5190 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5191 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5192 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5193 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5194 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5196 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5197 tree decl_placeholder
5198 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5199 SET_DECL_VALUE_EXPR (placeholder
, out
);
5200 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5201 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5202 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5203 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5204 gimple_seq_add_seq (&sub_seq
,
5205 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5206 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5207 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5208 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5212 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5213 out
= unshare_expr (out
);
5214 gimplify_assign (out
, x
, &sub_seq
);
5216 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5217 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5218 gimple_seq_add_stmt (&sub_seq
, g
);
5219 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5220 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5221 gimple_seq_add_stmt (&sub_seq
, g
);
5222 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5223 build_int_cst (TREE_TYPE (i
), 1));
5224 gimple_seq_add_stmt (&sub_seq
, g
);
5225 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5226 gimple_seq_add_stmt (&sub_seq
, g
);
5227 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5229 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5231 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5233 if (omp_is_reference (var
)
5234 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
5236 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
5237 SET_DECL_VALUE_EXPR (placeholder
, ref
);
5238 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5239 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5240 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5241 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5242 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5246 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5247 ref
= build_outer_var_ref (var
, ctx
);
5248 gimplify_assign (ref
, x
, &sub_seq
);
5252 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
5254 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5256 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
5258 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
5260 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5264 /* Generate code to implement the COPYPRIVATE clauses. */
5267 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
5272 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5274 tree var
, new_var
, ref
, x
;
5276 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5278 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
5281 var
= OMP_CLAUSE_DECL (c
);
5282 by_ref
= use_pointer_for_field (var
, NULL
);
5284 ref
= build_sender_ref (var
, ctx
);
5285 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
5288 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
5289 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
5291 gimplify_assign (ref
, x
, slist
);
5293 ref
= build_receiver_ref (var
, false, ctx
);
5296 ref
= fold_convert_loc (clause_loc
,
5297 build_pointer_type (TREE_TYPE (new_var
)),
5299 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
5301 if (omp_is_reference (var
))
5303 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
5304 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
5305 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5307 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
5308 gimplify_and_add (x
, rlist
);
5313 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5314 and REDUCTION from the sender (aka parent) side. */
5317 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
5321 int ignored_looptemp
= 0;
5322 bool is_taskloop
= false;
5324 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5325 by GOMP_taskloop. */
5326 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
5328 ignored_looptemp
= 2;
5332 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5334 tree val
, ref
, x
, var
;
5335 bool by_ref
, do_in
= false, do_out
= false;
5336 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5338 switch (OMP_CLAUSE_CODE (c
))
5340 case OMP_CLAUSE_PRIVATE
:
5341 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5344 case OMP_CLAUSE_FIRSTPRIVATE
:
5345 case OMP_CLAUSE_COPYIN
:
5346 case OMP_CLAUSE_LASTPRIVATE
:
5347 case OMP_CLAUSE_REDUCTION
:
5349 case OMP_CLAUSE_SHARED
:
5350 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5353 case OMP_CLAUSE__LOOPTEMP_
:
5354 if (ignored_looptemp
)
5364 val
= OMP_CLAUSE_DECL (c
);
5365 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5366 && TREE_CODE (val
) == MEM_REF
)
5368 val
= TREE_OPERAND (val
, 0);
5369 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
5370 val
= TREE_OPERAND (val
, 0);
5371 if (TREE_CODE (val
) == INDIRECT_REF
5372 || TREE_CODE (val
) == ADDR_EXPR
)
5373 val
= TREE_OPERAND (val
, 0);
5374 if (is_variable_sized (val
))
5378 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5379 outer taskloop region. */
5380 omp_context
*ctx_for_o
= ctx
;
5382 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
5383 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5384 ctx_for_o
= ctx
->outer
;
5386 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
5388 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
5389 && is_global_var (var
))
5392 t
= omp_member_access_dummy_var (var
);
5395 var
= DECL_VALUE_EXPR (var
);
5396 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
5398 var
= unshare_and_remap (var
, t
, o
);
5400 var
= unshare_expr (var
);
5403 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
5405 /* Handle taskloop firstprivate/lastprivate, where the
5406 lastprivate on GIMPLE_OMP_TASK is represented as
5407 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5408 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
5409 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
5410 if (use_pointer_for_field (val
, ctx
))
5411 var
= build_fold_addr_expr (var
);
5412 gimplify_assign (x
, var
, ilist
);
5413 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
5417 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5418 || val
== OMP_CLAUSE_DECL (c
))
5419 && is_variable_sized (val
))
5421 by_ref
= use_pointer_for_field (val
, NULL
);
5423 switch (OMP_CLAUSE_CODE (c
))
5425 case OMP_CLAUSE_FIRSTPRIVATE
:
5426 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
5428 && is_task_ctx (ctx
))
5429 TREE_NO_WARNING (var
) = 1;
5433 case OMP_CLAUSE_PRIVATE
:
5434 case OMP_CLAUSE_COPYIN
:
5435 case OMP_CLAUSE__LOOPTEMP_
:
5439 case OMP_CLAUSE_LASTPRIVATE
:
5440 if (by_ref
|| omp_is_reference (val
))
5442 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5449 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
5454 case OMP_CLAUSE_REDUCTION
:
5456 if (val
== OMP_CLAUSE_DECL (c
))
5457 do_out
= !(by_ref
|| omp_is_reference (val
));
5459 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
5468 ref
= build_sender_ref (val
, ctx
);
5469 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
5470 gimplify_assign (ref
, x
, ilist
);
5471 if (is_task_ctx (ctx
))
5472 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
5477 ref
= build_sender_ref (val
, ctx
);
5478 gimplify_assign (var
, ref
, olist
);
5483 /* Generate code to implement SHARED from the sender (aka parent)
5484 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5485 list things that got automatically shared. */
5488 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
5490 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
5492 if (ctx
->record_type
== NULL
)
5495 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
5496 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
5498 ovar
= DECL_ABSTRACT_ORIGIN (f
);
5499 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
5502 nvar
= maybe_lookup_decl (ovar
, ctx
);
5503 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
5506 /* If CTX is a nested parallel directive. Find the immediately
5507 enclosing parallel or workshare construct that contains a
5508 mapping for OVAR. */
5509 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
5511 t
= omp_member_access_dummy_var (var
);
5514 var
= DECL_VALUE_EXPR (var
);
5515 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
5517 var
= unshare_and_remap (var
, t
, o
);
5519 var
= unshare_expr (var
);
5522 if (use_pointer_for_field (ovar
, ctx
))
5524 x
= build_sender_ref (ovar
, ctx
);
5525 var
= build_fold_addr_expr (var
);
5526 gimplify_assign (x
, var
, ilist
);
5530 x
= build_sender_ref (ovar
, ctx
);
5531 gimplify_assign (x
, var
, ilist
);
5533 if (!TREE_READONLY (var
)
5534 /* We don't need to receive a new reference to a result
5535 or parm decl. In fact we may not store to it as we will
5536 invalidate any pending RSO and generate wrong gimple
5538 && !((TREE_CODE (var
) == RESULT_DECL
5539 || TREE_CODE (var
) == PARM_DECL
)
5540 && DECL_BY_REFERENCE (var
)))
5542 x
= build_sender_ref (ovar
, ctx
);
5543 gimplify_assign (var
, x
, olist
);
5549 /* Emit an OpenACC head marker call, encapulating the partitioning and
5550 other information that must be processed by the target compiler.
5551 Return the maximum number of dimensions the associated loop might
5552 be partitioned over. */
5555 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
5556 gimple_seq
*seq
, omp_context
*ctx
)
5558 unsigned levels
= 0;
5560 tree gang_static
= NULL_TREE
;
5561 auto_vec
<tree
, 5> args
;
5563 args
.quick_push (build_int_cst
5564 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
5565 args
.quick_push (ddvar
);
5566 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5568 switch (OMP_CLAUSE_CODE (c
))
5570 case OMP_CLAUSE_GANG
:
5571 tag
|= OLF_DIM_GANG
;
5572 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
5573 /* static:* is represented by -1, and we can ignore it, as
5574 scheduling is always static. */
5575 if (gang_static
&& integer_minus_onep (gang_static
))
5576 gang_static
= NULL_TREE
;
5580 case OMP_CLAUSE_WORKER
:
5581 tag
|= OLF_DIM_WORKER
;
5585 case OMP_CLAUSE_VECTOR
:
5586 tag
|= OLF_DIM_VECTOR
;
5590 case OMP_CLAUSE_SEQ
:
5594 case OMP_CLAUSE_AUTO
:
5598 case OMP_CLAUSE_INDEPENDENT
:
5599 tag
|= OLF_INDEPENDENT
;
5609 if (DECL_P (gang_static
))
5610 gang_static
= build_outer_var_ref (gang_static
, ctx
);
5611 tag
|= OLF_GANG_STATIC
;
5614 /* In a parallel region, loops are implicitly INDEPENDENT. */
5615 omp_context
*tgt
= enclosing_target_ctx (ctx
);
5616 if (!tgt
|| is_oacc_parallel (tgt
))
5617 tag
|= OLF_INDEPENDENT
;
5619 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR is implicitly AUTO. */
5620 if (!(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1) << OLF_DIM_BASE
)
5624 /* Ensure at least one level. */
5628 args
.quick_push (build_int_cst (integer_type_node
, levels
));
5629 args
.quick_push (build_int_cst (integer_type_node
, tag
));
5631 args
.quick_push (gang_static
);
5633 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
5634 gimple_set_location (call
, loc
);
5635 gimple_set_lhs (call
, ddvar
);
5636 gimple_seq_add_stmt (seq
, call
);
5641 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5642 partitioning level of the enclosed region. */
5645 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
5646 tree tofollow
, gimple_seq
*seq
)
5648 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
5649 : IFN_UNIQUE_OACC_TAIL_MARK
);
5650 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
5651 int nargs
= 2 + (tofollow
!= NULL_TREE
);
5652 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
5653 marker
, ddvar
, tofollow
);
5654 gimple_set_location (call
, loc
);
5655 gimple_set_lhs (call
, ddvar
);
5656 gimple_seq_add_stmt (seq
, call
);
5659 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5660 the loop clauses, from which we extract reductions. Initialize
5664 lower_oacc_head_tail (location_t loc
, tree clauses
,
5665 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
5668 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
5669 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
5671 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
5672 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
5673 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
5676 for (unsigned done
= 1; count
; count
--, done
++)
5678 gimple_seq fork_seq
= NULL
;
5679 gimple_seq join_seq
= NULL
;
5681 tree place
= build_int_cst (integer_type_node
, -1);
5682 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5683 fork_kind
, ddvar
, place
);
5684 gimple_set_location (fork
, loc
);
5685 gimple_set_lhs (fork
, ddvar
);
5687 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5688 join_kind
, ddvar
, place
);
5689 gimple_set_location (join
, loc
);
5690 gimple_set_lhs (join
, ddvar
);
5692 /* Mark the beginning of this level sequence. */
5694 lower_oacc_loop_marker (loc
, ddvar
, true,
5695 build_int_cst (integer_type_node
, count
),
5697 lower_oacc_loop_marker (loc
, ddvar
, false,
5698 build_int_cst (integer_type_node
, done
),
5701 lower_oacc_reductions (loc
, clauses
, place
, inner
,
5702 fork
, join
, &fork_seq
, &join_seq
, ctx
);
5704 /* Append this level to head. */
5705 gimple_seq_add_seq (head
, fork_seq
);
5706 /* Prepend it to tail. */
5707 gimple_seq_add_seq (&join_seq
, *tail
);
5713 /* Mark the end of the sequence. */
5714 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
5715 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
5718 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5719 catch handler and return it. This prevents programs from violating the
5720 structured block semantics with throws. */
5723 maybe_catch_exception (gimple_seq body
)
5728 if (!flag_exceptions
)
5731 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
5732 decl
= lang_hooks
.eh_protect_cleanup_actions ();
5734 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
5736 g
= gimple_build_eh_must_not_throw (decl
);
5737 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
5740 return gimple_seq_alloc_with_stmt (g
);
5744 /* Routines to lower OMP directives into OMP-GIMPLE. */
5746 /* If ctx is a worksharing context inside of a cancellable parallel
5747 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5748 and conditional branch to parallel's cancel_label to handle
5749 cancellation in the implicit barrier. */
5752 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple_seq
*body
)
5754 gimple
*omp_return
= gimple_seq_last_stmt (*body
);
5755 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
5756 if (gimple_omp_return_nowait_p (omp_return
))
5759 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_PARALLEL
5760 && ctx
->outer
->cancellable
)
5762 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5763 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
5764 tree lhs
= create_tmp_var (c_bool_type
);
5765 gimple_omp_return_set_lhs (omp_return
, lhs
);
5766 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5767 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
5768 fold_convert (c_bool_type
,
5769 boolean_false_node
),
5770 ctx
->outer
->cancel_label
, fallthru_label
);
5771 gimple_seq_add_stmt (body
, g
);
5772 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
5776 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5777 CTX is the enclosing OMP context for the current statement. */
5780 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
5782 tree block
, control
;
5783 gimple_stmt_iterator tgsi
;
5784 gomp_sections
*stmt
;
5786 gbind
*new_stmt
, *bind
;
5787 gimple_seq ilist
, dlist
, olist
, new_body
;
5789 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
5791 push_gimplify_context ();
5795 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
5796 &ilist
, &dlist
, ctx
, NULL
);
5798 new_body
= gimple_omp_body (stmt
);
5799 gimple_omp_set_body (stmt
, NULL
);
5800 tgsi
= gsi_start (new_body
);
5801 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
5806 sec_start
= gsi_stmt (tgsi
);
5807 sctx
= maybe_lookup_ctx (sec_start
);
5810 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
5811 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
5812 GSI_CONTINUE_LINKING
);
5813 gimple_omp_set_body (sec_start
, NULL
);
5815 if (gsi_one_before_end_p (tgsi
))
5817 gimple_seq l
= NULL
;
5818 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
5820 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
5821 gimple_omp_section_set_last (sec_start
);
5824 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
5825 GSI_CONTINUE_LINKING
);
5828 block
= make_node (BLOCK
);
5829 bind
= gimple_build_bind (NULL
, new_body
, block
);
5832 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
5834 block
= make_node (BLOCK
);
5835 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
5836 gsi_replace (gsi_p
, new_stmt
, true);
5838 pop_gimplify_context (new_stmt
);
5839 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
5840 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
5841 if (BLOCK_VARS (block
))
5842 TREE_USED (block
) = 1;
5845 gimple_seq_add_seq (&new_body
, ilist
);
5846 gimple_seq_add_stmt (&new_body
, stmt
);
5847 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
5848 gimple_seq_add_stmt (&new_body
, bind
);
5850 control
= create_tmp_var (unsigned_type_node
, ".section");
5851 t
= gimple_build_omp_continue (control
, control
);
5852 gimple_omp_sections_set_control (stmt
, control
);
5853 gimple_seq_add_stmt (&new_body
, t
);
5855 gimple_seq_add_seq (&new_body
, olist
);
5856 if (ctx
->cancellable
)
5857 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
5858 gimple_seq_add_seq (&new_body
, dlist
);
5860 new_body
= maybe_catch_exception (new_body
);
5862 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
5863 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
5864 t
= gimple_build_omp_return (nowait
);
5865 gimple_seq_add_stmt (&new_body
, t
);
5866 maybe_add_implicit_barrier_cancel (ctx
, &new_body
);
5868 gimple_bind_set_body (new_stmt
, new_body
);
5872 /* A subroutine of lower_omp_single. Expand the simple form of
5873 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5875 if (GOMP_single_start ())
5877 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5879 FIXME. It may be better to delay expanding the logic of this until
5880 pass_expand_omp. The expanded logic may make the job more difficult
5881 to a synchronization analysis pass. */
5884 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
5886 location_t loc
= gimple_location (single_stmt
);
5887 tree tlabel
= create_artificial_label (loc
);
5888 tree flabel
= create_artificial_label (loc
);
5889 gimple
*call
, *cond
;
5892 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
5893 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
5894 call
= gimple_build_call (decl
, 0);
5895 gimple_call_set_lhs (call
, lhs
);
5896 gimple_seq_add_stmt (pre_p
, call
);
5898 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
5899 fold_convert_loc (loc
, TREE_TYPE (lhs
),
5902 gimple_seq_add_stmt (pre_p
, cond
);
5903 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
5904 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
5905 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
5909 /* A subroutine of lower_omp_single. Expand the simple form of
5910 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5912 #pragma omp single copyprivate (a, b, c)
5914 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5917 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5923 GOMP_single_copy_end (©out);
5934 FIXME. It may be better to delay expanding the logic of this until
5935 pass_expand_omp. The expanded logic may make the job more difficult
5936 to a synchronization analysis pass. */
5939 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
5942 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
5943 gimple_seq copyin_seq
;
5944 location_t loc
= gimple_location (single_stmt
);
5946 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
5948 ptr_type
= build_pointer_type (ctx
->record_type
);
5949 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
5951 l0
= create_artificial_label (loc
);
5952 l1
= create_artificial_label (loc
);
5953 l2
= create_artificial_label (loc
);
5955 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
5956 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
5957 t
= fold_convert_loc (loc
, ptr_type
, t
);
5958 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
5960 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
5961 build_int_cst (ptr_type
, 0));
5962 t
= build3 (COND_EXPR
, void_type_node
, t
,
5963 build_and_jump (&l0
), build_and_jump (&l1
));
5964 gimplify_and_add (t
, pre_p
);
5966 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
5968 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
5971 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
5974 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
5975 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
5976 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
5977 gimplify_and_add (t
, pre_p
);
5979 t
= build_and_jump (&l2
);
5980 gimplify_and_add (t
, pre_p
);
5982 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
5984 gimple_seq_add_seq (pre_p
, copyin_seq
);
5986 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
5990 /* Expand code for an OpenMP single directive. */
5993 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
5996 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
5998 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6000 push_gimplify_context ();
6002 block
= make_node (BLOCK
);
6003 bind
= gimple_build_bind (NULL
, NULL
, block
);
6004 gsi_replace (gsi_p
, bind
, true);
6007 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6008 &bind_body
, &dlist
, ctx
, NULL
);
6009 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6011 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6013 if (ctx
->record_type
)
6014 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6016 lower_omp_single_simple (single_stmt
, &bind_body
);
6018 gimple_omp_set_body (single_stmt
, NULL
);
6020 gimple_seq_add_seq (&bind_body
, dlist
);
6022 bind_body
= maybe_catch_exception (bind_body
);
6024 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6025 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6026 gimple
*g
= gimple_build_omp_return (nowait
);
6027 gimple_seq_add_stmt (&bind_body_tail
, g
);
6028 maybe_add_implicit_barrier_cancel (ctx
, &bind_body_tail
);
6029 if (ctx
->record_type
)
6031 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6032 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6033 TREE_THIS_VOLATILE (clobber
) = 1;
6034 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6035 clobber
), GSI_SAME_STMT
);
6037 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6038 gimple_bind_set_body (bind
, bind_body
);
6040 pop_gimplify_context (bind
);
6042 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6043 BLOCK_VARS (block
) = ctx
->block_vars
;
6044 if (BLOCK_VARS (block
))
6045 TREE_USED (block
) = 1;
6049 /* Expand code for an OpenMP master directive. */
6052 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6054 tree block
, lab
= NULL
, x
, bfn_decl
;
6055 gimple
*stmt
= gsi_stmt (*gsi_p
);
6057 location_t loc
= gimple_location (stmt
);
6060 push_gimplify_context ();
6062 block
= make_node (BLOCK
);
6063 bind
= gimple_build_bind (NULL
, NULL
, block
);
6064 gsi_replace (gsi_p
, bind
, true);
6065 gimple_bind_add_stmt (bind
, stmt
);
6067 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6068 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6069 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6070 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6072 gimplify_and_add (x
, &tseq
);
6073 gimple_bind_add_seq (bind
, tseq
);
6075 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6076 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6077 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6078 gimple_omp_set_body (stmt
, NULL
);
6080 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6082 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6084 pop_gimplify_context (bind
);
6086 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6087 BLOCK_VARS (block
) = ctx
->block_vars
;
6091 /* Expand code for an OpenMP taskgroup directive. */
6094 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6096 gimple
*stmt
= gsi_stmt (*gsi_p
);
6099 tree block
= make_node (BLOCK
);
6101 bind
= gimple_build_bind (NULL
, NULL
, block
);
6102 gsi_replace (gsi_p
, bind
, true);
6103 gimple_bind_add_stmt (bind
, stmt
);
6105 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
6107 gimple_bind_add_stmt (bind
, x
);
6109 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6110 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6111 gimple_omp_set_body (stmt
, NULL
);
6113 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6115 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6116 BLOCK_VARS (block
) = ctx
->block_vars
;
6120 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6123 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
6126 struct omp_for_data fd
;
6127 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
6130 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
6131 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
6132 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
6136 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6137 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
6138 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
6139 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
6141 /* Merge depend clauses from multiple adjacent
6142 #pragma omp ordered depend(sink:...) constructs
6143 into one #pragma omp ordered depend(sink:...), so that
6144 we can optimize them together. */
6145 gimple_stmt_iterator gsi
= *gsi_p
;
6147 while (!gsi_end_p (gsi
))
6149 gimple
*stmt
= gsi_stmt (gsi
);
6150 if (is_gimple_debug (stmt
)
6151 || gimple_code (stmt
) == GIMPLE_NOP
)
6156 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
6158 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
6159 c
= gimple_omp_ordered_clauses (ord_stmt2
);
6161 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
6162 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6165 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
6167 gsi_remove (&gsi
, true);
6171 /* Canonicalize sink dependence clauses into one folded clause if
6174 The basic algorithm is to create a sink vector whose first
6175 element is the GCD of all the first elements, and whose remaining
6176 elements are the minimum of the subsequent columns.
6178 We ignore dependence vectors whose first element is zero because
6179 such dependencies are known to be executed by the same thread.
6181 We take into account the direction of the loop, so a minimum
6182 becomes a maximum if the loop is iterating forwards. We also
6183 ignore sink clauses where the loop direction is unknown, or where
6184 the offsets are clearly invalid because they are not a multiple
6185 of the loop increment.
6189 #pragma omp for ordered(2)
6190 for (i=0; i < N; ++i)
6191 for (j=0; j < M; ++j)
6193 #pragma omp ordered \
6194 depend(sink:i-8,j-2) \
6195 depend(sink:i,j-1) \ // Completely ignored because i+0.
6196 depend(sink:i-4,j-3) \
6197 depend(sink:i-6,j-4)
6198 #pragma omp ordered depend(source)
6203 depend(sink:-gcd(8,4,6),-min(2,3,4))
6208 /* FIXME: Computing GCD's where the first element is zero is
6209 non-trivial in the presence of collapsed loops. Do this later. */
6210 if (fd
.collapse
> 1)
6213 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
6214 memset (folded_deps
, 0, sizeof (*folded_deps
) * (2 * len
- 1));
6215 tree folded_dep
= NULL_TREE
;
6216 /* TRUE if the first dimension's offset is negative. */
6217 bool neg_offset_p
= false;
6219 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6221 while ((c
= *list_p
) != NULL
)
6223 bool remove
= false;
6225 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
6226 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6227 goto next_ordered_clause
;
6230 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
6231 vec
&& TREE_CODE (vec
) == TREE_LIST
;
6232 vec
= TREE_CHAIN (vec
), ++i
)
6234 gcc_assert (i
< len
);
6236 /* omp_extract_for_data has canonicalized the condition. */
6237 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
6238 || fd
.loops
[i
].cond_code
== GT_EXPR
);
6239 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
6240 bool maybe_lexically_later
= true;
6242 /* While the committee makes up its mind, bail if we have any
6243 non-constant steps. */
6244 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
6245 goto lower_omp_ordered_ret
;
6247 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
6248 if (POINTER_TYPE_P (itype
))
6250 wide_int offset
= wide_int::from (TREE_PURPOSE (vec
),
6251 TYPE_PRECISION (itype
),
6254 /* Ignore invalid offsets that are not multiples of the step. */
6255 if (!wi::multiple_of_p
6256 (wi::abs (offset
), wi::abs ((wide_int
) fd
.loops
[i
].step
),
6259 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
6260 "ignoring sink clause with offset that is not "
6261 "a multiple of the loop step");
6263 goto next_ordered_clause
;
6266 /* Calculate the first dimension. The first dimension of
6267 the folded dependency vector is the GCD of the first
6268 elements, while ignoring any first elements whose offset
6272 /* Ignore dependence vectors whose first dimension is 0. */
6276 goto next_ordered_clause
;
6280 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
6282 error_at (OMP_CLAUSE_LOCATION (c
),
6283 "first offset must be in opposite direction "
6284 "of loop iterations");
6285 goto lower_omp_ordered_ret
;
6289 neg_offset_p
= forward
;
6290 /* Initialize the first time around. */
6291 if (folded_dep
== NULL_TREE
)
6294 folded_deps
[0] = offset
;
6297 folded_deps
[0] = wi::gcd (folded_deps
[0],
6301 /* Calculate minimum for the remaining dimensions. */
6304 folded_deps
[len
+ i
- 1] = offset
;
6305 if (folded_dep
== c
)
6306 folded_deps
[i
] = offset
;
6307 else if (maybe_lexically_later
6308 && !wi::eq_p (folded_deps
[i
], offset
))
6310 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
6314 for (j
= 1; j
<= i
; j
++)
6315 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
6318 maybe_lexically_later
= false;
6322 gcc_assert (i
== len
);
6326 next_ordered_clause
:
6328 *list_p
= OMP_CLAUSE_CHAIN (c
);
6330 list_p
= &OMP_CLAUSE_CHAIN (c
);
6336 folded_deps
[0] = -folded_deps
[0];
6338 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
6339 if (POINTER_TYPE_P (itype
))
6342 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
6343 = wide_int_to_tree (itype
, folded_deps
[0]);
6344 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
6345 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
6348 lower_omp_ordered_ret
:
6350 /* Ordered without clauses is #pragma omp threads, while we want
6351 a nop instead if we remove all clauses. */
6352 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
6353 gsi_replace (gsi_p
, gimple_build_nop (), true);
6357 /* Expand code for an OpenMP ordered directive. */
6360 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6363 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
6364 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
6367 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6369 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6372 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
6373 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6374 OMP_CLAUSE_THREADS
);
6376 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6379 /* FIXME: This is needs to be moved to the expansion to verify various
6380 conditions only testable on cfg with dominators computed, and also
6381 all the depend clauses to be merged still might need to be available
6382 for the runtime checks. */
6384 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
6388 push_gimplify_context ();
6390 block
= make_node (BLOCK
);
6391 bind
= gimple_build_bind (NULL
, NULL
, block
);
6392 gsi_replace (gsi_p
, bind
, true);
6393 gimple_bind_add_stmt (bind
, stmt
);
6397 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
6398 build_int_cst (NULL_TREE
, threads
));
6399 cfun
->has_simduid_loops
= true;
6402 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
6404 gimple_bind_add_stmt (bind
, x
);
6406 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
6409 counter
= create_tmp_var (integer_type_node
);
6410 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
6411 gimple_call_set_lhs (g
, counter
);
6412 gimple_bind_add_stmt (bind
, g
);
6414 body
= create_artificial_label (UNKNOWN_LOCATION
);
6415 test
= create_artificial_label (UNKNOWN_LOCATION
);
6416 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
6418 tree simt_pred
= create_tmp_var (integer_type_node
);
6419 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
6420 gimple_call_set_lhs (g
, simt_pred
);
6421 gimple_bind_add_stmt (bind
, g
);
6423 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
6424 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
6425 gimple_bind_add_stmt (bind
, g
);
6427 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
6429 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6430 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6431 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6432 gimple_omp_set_body (stmt
, NULL
);
6436 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
6437 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
6438 gimple_bind_add_stmt (bind
, g
);
6440 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
6441 tree nonneg
= create_tmp_var (integer_type_node
);
6442 gimple_seq tseq
= NULL
;
6443 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
6444 gimple_bind_add_seq (bind
, tseq
);
6446 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
6447 gimple_call_set_lhs (g
, nonneg
);
6448 gimple_bind_add_stmt (bind
, g
);
6450 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6451 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
6452 gimple_bind_add_stmt (bind
, g
);
6454 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
6457 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
6458 build_int_cst (NULL_TREE
, threads
));
6460 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
6462 gimple_bind_add_stmt (bind
, x
);
6464 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6466 pop_gimplify_context (bind
);
6468 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6469 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6473 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6474 substitution of a couple of function calls. But in the NAMED case,
6475 requires that languages coordinate a symbol name. It is therefore
6476 best put here in common code. */
6478 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
6481 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6484 tree name
, lock
, unlock
;
6485 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
6487 location_t loc
= gimple_location (stmt
);
6490 name
= gimple_omp_critical_name (stmt
);
6495 if (!critical_name_mutexes
)
6496 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
6498 tree
*n
= critical_name_mutexes
->get (name
);
6503 decl
= create_tmp_var_raw (ptr_type_node
);
6505 new_str
= ACONCAT ((".gomp_critical_user_",
6506 IDENTIFIER_POINTER (name
), NULL
));
6507 DECL_NAME (decl
) = get_identifier (new_str
);
6508 TREE_PUBLIC (decl
) = 1;
6509 TREE_STATIC (decl
) = 1;
6510 DECL_COMMON (decl
) = 1;
6511 DECL_ARTIFICIAL (decl
) = 1;
6512 DECL_IGNORED_P (decl
) = 1;
6514 varpool_node::finalize_decl (decl
);
6516 critical_name_mutexes
->put (name
, decl
);
6521 /* If '#pragma omp critical' is inside offloaded region or
6522 inside function marked as offloadable, the symbol must be
6523 marked as offloadable too. */
6525 if (cgraph_node::get (current_function_decl
)->offloadable
)
6526 varpool_node::get_create (decl
)->offloadable
= 1;
6528 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
6529 if (is_gimple_omp_offloaded (octx
->stmt
))
6531 varpool_node::get_create (decl
)->offloadable
= 1;
6535 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
6536 lock
= build_call_expr_loc (loc
, lock
, 1,
6537 build_fold_addr_expr_loc (loc
, decl
));
6539 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
6540 unlock
= build_call_expr_loc (loc
, unlock
, 1,
6541 build_fold_addr_expr_loc (loc
, decl
));
6545 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
6546 lock
= build_call_expr_loc (loc
, lock
, 0);
6548 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
6549 unlock
= build_call_expr_loc (loc
, unlock
, 0);
6552 push_gimplify_context ();
6554 block
= make_node (BLOCK
);
6555 bind
= gimple_build_bind (NULL
, NULL
, block
);
6556 gsi_replace (gsi_p
, bind
, true);
6557 gimple_bind_add_stmt (bind
, stmt
);
6559 tbody
= gimple_bind_body (bind
);
6560 gimplify_and_add (lock
, &tbody
);
6561 gimple_bind_set_body (bind
, tbody
);
6563 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6564 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6565 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6566 gimple_omp_set_body (stmt
, NULL
);
6568 tbody
= gimple_bind_body (bind
);
6569 gimplify_and_add (unlock
, &tbody
);
6570 gimple_bind_set_body (bind
, tbody
);
6572 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6574 pop_gimplify_context (bind
);
6575 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6576 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6579 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6580 for a lastprivate clause. Given a loop control predicate of (V
6581 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6582 is appended to *DLIST, iterator initialization is appended to
6586 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
6587 gimple_seq
*dlist
, struct omp_context
*ctx
)
6589 tree clauses
, cond
, vinit
;
6590 enum tree_code cond_code
;
6593 cond_code
= fd
->loop
.cond_code
;
6594 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
6596 /* When possible, use a strict equality expression. This can let VRP
6597 type optimizations deduce the value and remove a copy. */
6598 if (tree_fits_shwi_p (fd
->loop
.step
))
6600 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
6601 if (step
== 1 || step
== -1)
6602 cond_code
= EQ_EXPR
;
6605 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
6606 || gimple_omp_for_grid_phony (fd
->for_stmt
))
6607 cond
= omp_grid_lastprivate_predicate (fd
);
6610 tree n2
= fd
->loop
.n2
;
6611 if (fd
->collapse
> 1
6612 && TREE_CODE (n2
) != INTEGER_CST
6613 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
6615 struct omp_context
*taskreg_ctx
= NULL
;
6616 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
6618 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
6619 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
6620 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
6622 if (gimple_omp_for_combined_into_p (gfor
))
6624 gcc_assert (ctx
->outer
->outer
6625 && is_parallel_ctx (ctx
->outer
->outer
));
6626 taskreg_ctx
= ctx
->outer
->outer
;
6630 struct omp_for_data outer_fd
;
6631 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
6632 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
6635 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
6636 taskreg_ctx
= ctx
->outer
->outer
;
6638 else if (is_taskreg_ctx (ctx
->outer
))
6639 taskreg_ctx
= ctx
->outer
;
6643 tree taskreg_clauses
6644 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
6645 tree innerc
= omp_find_clause (taskreg_clauses
,
6646 OMP_CLAUSE__LOOPTEMP_
);
6647 gcc_assert (innerc
);
6648 for (i
= 0; i
< fd
->collapse
; i
++)
6650 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6651 OMP_CLAUSE__LOOPTEMP_
);
6652 gcc_assert (innerc
);
6654 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6655 OMP_CLAUSE__LOOPTEMP_
);
6657 n2
= fold_convert (TREE_TYPE (n2
),
6658 lookup_decl (OMP_CLAUSE_DECL (innerc
),
6662 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
6665 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
6667 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
6668 if (!gimple_seq_empty_p (stmts
))
6670 gimple_seq_add_seq (&stmts
, *dlist
);
6673 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6674 vinit
= fd
->loop
.n1
;
6675 if (cond_code
== EQ_EXPR
6676 && tree_fits_shwi_p (fd
->loop
.n2
)
6677 && ! integer_zerop (fd
->loop
.n2
))
6678 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
6680 vinit
= unshare_expr (vinit
);
6682 /* Initialize the iterator variable, so that threads that don't execute
6683 any iterations don't execute the lastprivate clauses by accident. */
6684 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
6689 /* Lower code for an OMP loop directive. */
6692 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6695 struct omp_for_data fd
, *fdp
= NULL
;
6696 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
6698 gimple_seq omp_for_body
, body
, dlist
;
6699 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
6702 push_gimplify_context ();
6704 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
6706 block
= make_node (BLOCK
);
6707 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6708 /* Replace at gsi right away, so that 'stmt' is no member
6709 of a sequence anymore as we're going to add to a different
6711 gsi_replace (gsi_p
, new_stmt
, true);
6713 /* Move declaration of temporaries in the loop body before we make
6715 omp_for_body
= gimple_omp_body (stmt
);
6716 if (!gimple_seq_empty_p (omp_for_body
)
6717 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
6720 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
6721 tree vars
= gimple_bind_vars (inner_bind
);
6722 gimple_bind_append_vars (new_stmt
, vars
);
6723 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6724 keep them on the inner_bind and it's block. */
6725 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
6726 if (gimple_bind_block (inner_bind
))
6727 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
6730 if (gimple_omp_for_combined_into_p (stmt
))
6732 omp_extract_for_data (stmt
, &fd
, NULL
);
6735 /* We need two temporaries with fd.loop.v type (istart/iend)
6736 and then (fd.collapse - 1) temporaries with the same
6737 type for count2 ... countN-1 vars if not constant. */
6739 tree type
= fd
.iter_type
;
6741 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
6742 count
+= fd
.collapse
- 1;
6744 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
6745 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
6746 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
6750 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
6751 OMP_CLAUSE__LOOPTEMP_
);
6752 for (i
= 0; i
< count
; i
++)
6757 gcc_assert (outerc
);
6758 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
6759 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
6760 OMP_CLAUSE__LOOPTEMP_
);
6764 temp
= create_tmp_var (type
);
6765 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
6767 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
6768 OMP_CLAUSE_DECL (*pc
) = temp
;
6769 pc
= &OMP_CLAUSE_CHAIN (*pc
);
6774 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6777 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
6779 gimple_seq_add_seq (&body
, gimple_omp_for_pre_body (stmt
));
6781 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6783 /* Lower the header expressions. At this point, we can assume that
6784 the header is of the form:
6786 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6788 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6789 using the .omp_data_s mapping, if needed. */
6790 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
6792 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
6793 if (!is_gimple_min_invariant (*rhs_p
))
6794 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6796 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
6797 if (!is_gimple_min_invariant (*rhs_p
))
6798 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6800 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
6801 if (!is_gimple_min_invariant (*rhs_p
))
6802 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6805 /* Once lowered, extract the bounds and clauses. */
6806 omp_extract_for_data (stmt
, &fd
, NULL
);
6808 if (is_gimple_omp_oacc (ctx
->stmt
)
6809 && !ctx_in_oacc_kernels_region (ctx
))
6810 lower_oacc_head_tail (gimple_location (stmt
),
6811 gimple_omp_for_clauses (stmt
),
6812 &oacc_head
, &oacc_tail
, ctx
);
6814 /* Add OpenACC partitioning and reduction markers just before the loop. */
6816 gimple_seq_add_seq (&body
, oacc_head
);
6818 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
6820 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
6821 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
6822 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6823 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6825 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6826 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
6827 OMP_CLAUSE_LINEAR_STEP (c
)
6828 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
6832 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
6833 && gimple_omp_for_grid_phony (stmt
));
6835 gimple_seq_add_stmt (&body
, stmt
);
6836 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
6839 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
6842 /* After the loop, add exit clauses. */
6843 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
6845 if (ctx
->cancellable
)
6846 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
6848 gimple_seq_add_seq (&body
, dlist
);
6850 body
= maybe_catch_exception (body
);
6854 /* Region exit marker goes at the end of the loop body. */
6855 gimple_seq_add_stmt (&body
, gimple_build_omp_return (fd
.have_nowait
));
6856 maybe_add_implicit_barrier_cancel (ctx
, &body
);
6859 /* Add OpenACC joining and reduction markers just after the loop. */
6861 gimple_seq_add_seq (&body
, oacc_tail
);
6863 pop_gimplify_context (new_stmt
);
6865 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
6866 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
6867 if (BLOCK_VARS (block
))
6868 TREE_USED (block
) = 1;
6870 gimple_bind_set_body (new_stmt
, body
);
6871 gimple_omp_set_body (stmt
, NULL
);
6872 gimple_omp_for_set_pre_body (stmt
, NULL
);
6875 /* Callback for walk_stmts. Check if the current statement only contains
6876 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6879 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
6880 bool *handled_ops_p
,
6881 struct walk_stmt_info
*wi
)
6883 int *info
= (int *) wi
->info
;
6884 gimple
*stmt
= gsi_stmt (*gsi_p
);
6886 *handled_ops_p
= true;
6887 switch (gimple_code (stmt
))
6891 case GIMPLE_OMP_FOR
:
6892 case GIMPLE_OMP_SECTIONS
:
6893 *info
= *info
== 0 ? 1 : -1;
6902 struct omp_taskcopy_context
6904 /* This field must be at the beginning, as we do "inheritance": Some
6905 callback functions for tree-inline.c (e.g., omp_copy_decl)
6906 receive a copy_body_data pointer that is up-casted to an
6907 omp_context pointer. */
6913 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
6915 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
6917 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
6918 return create_tmp_var (TREE_TYPE (var
));
6924 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
6926 tree name
, new_fields
= NULL
, type
, f
;
6928 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
6929 name
= DECL_NAME (TYPE_NAME (orig_type
));
6930 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
6931 TYPE_DECL
, name
, type
);
6932 TYPE_NAME (type
) = name
;
6934 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
6936 tree new_f
= copy_node (f
);
6937 DECL_CONTEXT (new_f
) = type
;
6938 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
6939 TREE_CHAIN (new_f
) = new_fields
;
6940 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
6941 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
6942 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
6945 tcctx
->cb
.decl_map
->put (f
, new_f
);
6947 TYPE_FIELDS (type
) = nreverse (new_fields
);
6952 /* Create task copyfn. */
6955 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
6957 struct function
*child_cfun
;
6958 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
6959 tree record_type
, srecord_type
, bind
, list
;
6960 bool record_needs_remap
= false, srecord_needs_remap
= false;
6962 struct omp_taskcopy_context tcctx
;
6963 location_t loc
= gimple_location (task_stmt
);
6965 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
6966 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
6967 gcc_assert (child_cfun
->cfg
== NULL
);
6968 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
6970 /* Reset DECL_CONTEXT on function arguments. */
6971 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
6972 DECL_CONTEXT (t
) = child_fn
;
6974 /* Populate the function. */
6975 push_gimplify_context ();
6976 push_cfun (child_cfun
);
6978 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
6979 TREE_SIDE_EFFECTS (bind
) = 1;
6981 DECL_SAVED_TREE (child_fn
) = bind
;
6982 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
6984 /* Remap src and dst argument types if needed. */
6985 record_type
= ctx
->record_type
;
6986 srecord_type
= ctx
->srecord_type
;
6987 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
6988 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
6990 record_needs_remap
= true;
6993 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
6994 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
6996 srecord_needs_remap
= true;
7000 if (record_needs_remap
|| srecord_needs_remap
)
7002 memset (&tcctx
, '\0', sizeof (tcctx
));
7003 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
7004 tcctx
.cb
.dst_fn
= child_fn
;
7005 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
7006 gcc_checking_assert (tcctx
.cb
.src_node
);
7007 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
7008 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
7009 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
7010 tcctx
.cb
.eh_lp_nr
= 0;
7011 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
7012 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
7015 if (record_needs_remap
)
7016 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
7017 if (srecord_needs_remap
)
7018 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
7021 tcctx
.cb
.decl_map
= NULL
;
7023 arg
= DECL_ARGUMENTS (child_fn
);
7024 TREE_TYPE (arg
) = build_pointer_type (record_type
);
7025 sarg
= DECL_CHAIN (arg
);
7026 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
7028 /* First pass: initialize temporaries used in record_type and srecord_type
7029 sizes and field offsets. */
7030 if (tcctx
.cb
.decl_map
)
7031 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7032 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7036 decl
= OMP_CLAUSE_DECL (c
);
7037 p
= tcctx
.cb
.decl_map
->get (decl
);
7040 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7041 sf
= (tree
) n
->value
;
7042 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7043 src
= build_simple_mem_ref_loc (loc
, sarg
);
7044 src
= omp_build_component_ref (src
, sf
);
7045 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
7046 append_to_statement_list (t
, &list
);
7049 /* Second pass: copy shared var pointers and copy construct non-VLA
7050 firstprivate vars. */
7051 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7052 switch (OMP_CLAUSE_CODE (c
))
7055 case OMP_CLAUSE_SHARED
:
7056 decl
= OMP_CLAUSE_DECL (c
);
7057 key
= (splay_tree_key
) decl
;
7058 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7059 key
= (splay_tree_key
) &DECL_UID (decl
);
7060 n
= splay_tree_lookup (ctx
->field_map
, key
);
7063 f
= (tree
) n
->value
;
7064 if (tcctx
.cb
.decl_map
)
7065 f
= *tcctx
.cb
.decl_map
->get (f
);
7066 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
7067 sf
= (tree
) n
->value
;
7068 if (tcctx
.cb
.decl_map
)
7069 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7070 src
= build_simple_mem_ref_loc (loc
, sarg
);
7071 src
= omp_build_component_ref (src
, sf
);
7072 dst
= build_simple_mem_ref_loc (loc
, arg
);
7073 dst
= omp_build_component_ref (dst
, f
);
7074 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7075 append_to_statement_list (t
, &list
);
7077 case OMP_CLAUSE_FIRSTPRIVATE
:
7078 decl
= OMP_CLAUSE_DECL (c
);
7079 if (is_variable_sized (decl
))
7081 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7084 f
= (tree
) n
->value
;
7085 if (tcctx
.cb
.decl_map
)
7086 f
= *tcctx
.cb
.decl_map
->get (f
);
7087 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7090 sf
= (tree
) n
->value
;
7091 if (tcctx
.cb
.decl_map
)
7092 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7093 src
= build_simple_mem_ref_loc (loc
, sarg
);
7094 src
= omp_build_component_ref (src
, sf
);
7095 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
7096 src
= build_simple_mem_ref_loc (loc
, src
);
7100 dst
= build_simple_mem_ref_loc (loc
, arg
);
7101 dst
= omp_build_component_ref (dst
, f
);
7102 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7103 append_to_statement_list (t
, &list
);
7105 case OMP_CLAUSE_PRIVATE
:
7106 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7108 decl
= OMP_CLAUSE_DECL (c
);
7109 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7110 f
= (tree
) n
->value
;
7111 if (tcctx
.cb
.decl_map
)
7112 f
= *tcctx
.cb
.decl_map
->get (f
);
7113 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7116 sf
= (tree
) n
->value
;
7117 if (tcctx
.cb
.decl_map
)
7118 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7119 src
= build_simple_mem_ref_loc (loc
, sarg
);
7120 src
= omp_build_component_ref (src
, sf
);
7121 if (use_pointer_for_field (decl
, NULL
))
7122 src
= build_simple_mem_ref_loc (loc
, src
);
7126 dst
= build_simple_mem_ref_loc (loc
, arg
);
7127 dst
= omp_build_component_ref (dst
, f
);
7128 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7129 append_to_statement_list (t
, &list
);
7135 /* Last pass: handle VLA firstprivates. */
7136 if (tcctx
.cb
.decl_map
)
7137 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7138 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7142 decl
= OMP_CLAUSE_DECL (c
);
7143 if (!is_variable_sized (decl
))
7145 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7148 f
= (tree
) n
->value
;
7149 f
= *tcctx
.cb
.decl_map
->get (f
);
7150 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
7151 ind
= DECL_VALUE_EXPR (decl
);
7152 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
7153 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
7154 n
= splay_tree_lookup (ctx
->sfield_map
,
7155 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7156 sf
= (tree
) n
->value
;
7157 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7158 src
= build_simple_mem_ref_loc (loc
, sarg
);
7159 src
= omp_build_component_ref (src
, sf
);
7160 src
= build_simple_mem_ref_loc (loc
, src
);
7161 dst
= build_simple_mem_ref_loc (loc
, arg
);
7162 dst
= omp_build_component_ref (dst
, f
);
7163 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7164 append_to_statement_list (t
, &list
);
7165 n
= splay_tree_lookup (ctx
->field_map
,
7166 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7167 df
= (tree
) n
->value
;
7168 df
= *tcctx
.cb
.decl_map
->get (df
);
7169 ptr
= build_simple_mem_ref_loc (loc
, arg
);
7170 ptr
= omp_build_component_ref (ptr
, df
);
7171 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
7172 build_fold_addr_expr_loc (loc
, dst
));
7173 append_to_statement_list (t
, &list
);
7176 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
7177 append_to_statement_list (t
, &list
);
7179 if (tcctx
.cb
.decl_map
)
7180 delete tcctx
.cb
.decl_map
;
7181 pop_gimplify_context (NULL
);
7182 BIND_EXPR_BODY (bind
) = list
;
7187 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
7191 size_t n_in
= 0, n_out
= 0, idx
= 2, i
;
7193 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
7194 gcc_assert (clauses
);
7195 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7196 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7197 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7199 case OMP_CLAUSE_DEPEND_IN
:
7202 case OMP_CLAUSE_DEPEND_OUT
:
7203 case OMP_CLAUSE_DEPEND_INOUT
:
7206 case OMP_CLAUSE_DEPEND_SOURCE
:
7207 case OMP_CLAUSE_DEPEND_SINK
:
7212 tree type
= build_array_type_nelts (ptr_type_node
, n_in
+ n_out
+ 2);
7213 tree array
= create_tmp_var (type
);
7214 TREE_ADDRESSABLE (array
) = 1;
7215 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7217 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_in
+ n_out
));
7218 gimple_seq_add_stmt (iseq
, g
);
7219 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7221 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_out
));
7222 gimple_seq_add_stmt (iseq
, g
);
7223 for (i
= 0; i
< 2; i
++)
7225 if ((i
? n_in
: n_out
) == 0)
7227 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7228 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7229 && ((OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_IN
) ^ i
))
7231 tree t
= OMP_CLAUSE_DECL (c
);
7232 t
= fold_convert (ptr_type_node
, t
);
7233 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
7234 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
7235 NULL_TREE
, NULL_TREE
);
7236 g
= gimple_build_assign (r
, t
);
7237 gimple_seq_add_stmt (iseq
, g
);
7240 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
7241 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
7242 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
7244 tree clobber
= build_constructor (type
, NULL
);
7245 TREE_THIS_VOLATILE (clobber
) = 1;
7246 g
= gimple_build_assign (array
, clobber
);
7247 gimple_seq_add_stmt (oseq
, g
);
7250 /* Lower the OpenMP parallel or task directive in the current statement
7251 in GSI_P. CTX holds context information for the directive. */
7254 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7258 gimple
*stmt
= gsi_stmt (*gsi_p
);
7259 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
7260 gimple_seq par_body
, olist
, ilist
, par_olist
, par_rlist
, par_ilist
, new_body
;
7261 location_t loc
= gimple_location (stmt
);
7263 clauses
= gimple_omp_taskreg_clauses (stmt
);
7265 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
7266 par_body
= gimple_bind_body (par_bind
);
7267 child_fn
= ctx
->cb
.dst_fn
;
7268 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7269 && !gimple_omp_parallel_combined_p (stmt
))
7271 struct walk_stmt_info wi
;
7274 memset (&wi
, 0, sizeof (wi
));
7277 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
7279 gimple_omp_parallel_set_combined_p (stmt
, true);
7281 gimple_seq dep_ilist
= NULL
;
7282 gimple_seq dep_olist
= NULL
;
7283 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
7284 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7286 push_gimplify_context ();
7287 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7288 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
7289 &dep_ilist
, &dep_olist
);
7292 if (ctx
->srecord_type
)
7293 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
7295 push_gimplify_context ();
7300 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7301 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
7302 if (phony_construct
&& ctx
->record_type
)
7304 gcc_checking_assert (!ctx
->receiver_decl
);
7305 ctx
->receiver_decl
= create_tmp_var
7306 (build_reference_type (ctx
->record_type
), ".omp_rec");
7308 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
7309 lower_omp (&par_body
, ctx
);
7310 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
7311 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
7313 /* Declare all the variables created by mapping and the variables
7314 declared in the scope of the parallel body. */
7315 record_vars_into (ctx
->block_vars
, child_fn
);
7316 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
7318 if (ctx
->record_type
)
7321 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
7322 : ctx
->record_type
, ".omp_data_o");
7323 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7324 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7325 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
7330 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
7331 lower_send_shared_vars (&ilist
, &olist
, ctx
);
7333 if (ctx
->record_type
)
7335 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
7336 TREE_THIS_VOLATILE (clobber
) = 1;
7337 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
7341 /* Once all the expansions are done, sequence all the different
7342 fragments inside gimple_omp_body. */
7346 if (ctx
->record_type
)
7348 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7349 /* fixup_child_record_type might have changed receiver_decl's type. */
7350 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
7351 gimple_seq_add_stmt (&new_body
,
7352 gimple_build_assign (ctx
->receiver_decl
, t
));
7355 gimple_seq_add_seq (&new_body
, par_ilist
);
7356 gimple_seq_add_seq (&new_body
, par_body
);
7357 gimple_seq_add_seq (&new_body
, par_rlist
);
7358 if (ctx
->cancellable
)
7359 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7360 gimple_seq_add_seq (&new_body
, par_olist
);
7361 new_body
= maybe_catch_exception (new_body
);
7362 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
7363 gimple_seq_add_stmt (&new_body
,
7364 gimple_build_omp_continue (integer_zero_node
,
7365 integer_zero_node
));
7366 if (!phony_construct
)
7368 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
7369 gimple_omp_set_body (stmt
, new_body
);
7372 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
7373 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
7374 gimple_bind_add_seq (bind
, ilist
);
7375 if (!phony_construct
)
7376 gimple_bind_add_stmt (bind
, stmt
);
7378 gimple_bind_add_seq (bind
, new_body
);
7379 gimple_bind_add_seq (bind
, olist
);
7381 pop_gimplify_context (NULL
);
7385 gimple_bind_add_seq (dep_bind
, dep_ilist
);
7386 gimple_bind_add_stmt (dep_bind
, bind
);
7387 gimple_bind_add_seq (dep_bind
, dep_olist
);
7388 pop_gimplify_context (dep_bind
);
7392 /* Lower the GIMPLE_OMP_TARGET in the current statement
7393 in GSI_P. CTX holds context information for the directive. */
7396 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7399 tree child_fn
, t
, c
;
7400 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
7401 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
7402 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
7403 location_t loc
= gimple_location (stmt
);
7404 bool offloaded
, data_region
;
7405 unsigned int map_cnt
= 0;
7407 offloaded
= is_gimple_omp_offloaded (stmt
);
7408 switch (gimple_omp_target_kind (stmt
))
7410 case GF_OMP_TARGET_KIND_REGION
:
7411 case GF_OMP_TARGET_KIND_UPDATE
:
7412 case GF_OMP_TARGET_KIND_ENTER_DATA
:
7413 case GF_OMP_TARGET_KIND_EXIT_DATA
:
7414 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
7415 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
7416 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
7417 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
7418 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
7419 data_region
= false;
7421 case GF_OMP_TARGET_KIND_DATA
:
7422 case GF_OMP_TARGET_KIND_OACC_DATA
:
7423 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
7430 clauses
= gimple_omp_target_clauses (stmt
);
7432 gimple_seq dep_ilist
= NULL
;
7433 gimple_seq dep_olist
= NULL
;
7434 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7436 push_gimplify_context ();
7437 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7438 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
7439 &dep_ilist
, &dep_olist
);
7446 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
7447 tgt_body
= gimple_bind_body (tgt_bind
);
7449 else if (data_region
)
7450 tgt_body
= gimple_omp_body (stmt
);
7451 child_fn
= ctx
->cb
.dst_fn
;
7453 push_gimplify_context ();
7456 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7457 switch (OMP_CLAUSE_CODE (c
))
7463 case OMP_CLAUSE_MAP
:
7465 /* First check what we're prepared to handle in the following. */
7466 switch (OMP_CLAUSE_MAP_KIND (c
))
7468 case GOMP_MAP_ALLOC
:
7471 case GOMP_MAP_TOFROM
:
7472 case GOMP_MAP_POINTER
:
7473 case GOMP_MAP_TO_PSET
:
7474 case GOMP_MAP_DELETE
:
7475 case GOMP_MAP_RELEASE
:
7476 case GOMP_MAP_ALWAYS_TO
:
7477 case GOMP_MAP_ALWAYS_FROM
:
7478 case GOMP_MAP_ALWAYS_TOFROM
:
7479 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
7480 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
7481 case GOMP_MAP_STRUCT
:
7482 case GOMP_MAP_ALWAYS_POINTER
:
7484 case GOMP_MAP_FORCE_ALLOC
:
7485 case GOMP_MAP_FORCE_TO
:
7486 case GOMP_MAP_FORCE_FROM
:
7487 case GOMP_MAP_FORCE_TOFROM
:
7488 case GOMP_MAP_FORCE_PRESENT
:
7489 case GOMP_MAP_FORCE_DEVICEPTR
:
7490 case GOMP_MAP_DEVICE_RESIDENT
:
7492 gcc_assert (is_gimple_omp_oacc (stmt
));
7500 case OMP_CLAUSE_FROM
:
7502 var
= OMP_CLAUSE_DECL (c
);
7505 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
7506 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7507 && (OMP_CLAUSE_MAP_KIND (c
)
7508 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
7514 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
7516 tree var2
= DECL_VALUE_EXPR (var
);
7517 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
7518 var2
= TREE_OPERAND (var2
, 0);
7519 gcc_assert (DECL_P (var2
));
7524 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7525 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7526 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7528 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7530 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
7531 && varpool_node::get_create (var
)->offloadable
)
7534 tree type
= build_pointer_type (TREE_TYPE (var
));
7535 tree new_var
= lookup_decl (var
, ctx
);
7536 x
= create_tmp_var_raw (type
, get_name (new_var
));
7537 gimple_add_tmp_var (x
);
7538 x
= build_simple_mem_ref (x
);
7539 SET_DECL_VALUE_EXPR (new_var
, x
);
7540 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7545 if (!maybe_lookup_field (var
, ctx
))
7548 /* Don't remap oacc parallel reduction variables, because the
7549 intermediate result must be local to each gang. */
7550 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7551 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
7553 x
= build_receiver_ref (var
, true, ctx
);
7554 tree new_var
= lookup_decl (var
, ctx
);
7556 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7557 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7558 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7559 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7560 x
= build_simple_mem_ref (x
);
7561 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7563 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7564 if (omp_is_reference (new_var
))
7566 /* Create a local object to hold the instance
7568 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
7569 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
7570 tree inst
= create_tmp_var (type
, id
);
7571 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
7572 x
= build_fold_addr_expr (inst
);
7574 gimplify_assign (new_var
, x
, &fplist
);
7576 else if (DECL_P (new_var
))
7578 SET_DECL_VALUE_EXPR (new_var
, x
);
7579 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7587 case OMP_CLAUSE_FIRSTPRIVATE
:
7588 if (is_oacc_parallel (ctx
))
7589 goto oacc_firstprivate
;
7591 var
= OMP_CLAUSE_DECL (c
);
7592 if (!omp_is_reference (var
)
7593 && !is_gimple_reg_type (TREE_TYPE (var
)))
7595 tree new_var
= lookup_decl (var
, ctx
);
7596 if (is_variable_sized (var
))
7598 tree pvar
= DECL_VALUE_EXPR (var
);
7599 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7600 pvar
= TREE_OPERAND (pvar
, 0);
7601 gcc_assert (DECL_P (pvar
));
7602 tree new_pvar
= lookup_decl (pvar
, ctx
);
7603 x
= build_fold_indirect_ref (new_pvar
);
7604 TREE_THIS_NOTRAP (x
) = 1;
7607 x
= build_receiver_ref (var
, true, ctx
);
7608 SET_DECL_VALUE_EXPR (new_var
, x
);
7609 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7613 case OMP_CLAUSE_PRIVATE
:
7614 if (is_gimple_omp_oacc (ctx
->stmt
))
7616 var
= OMP_CLAUSE_DECL (c
);
7617 if (is_variable_sized (var
))
7619 tree new_var
= lookup_decl (var
, ctx
);
7620 tree pvar
= DECL_VALUE_EXPR (var
);
7621 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7622 pvar
= TREE_OPERAND (pvar
, 0);
7623 gcc_assert (DECL_P (pvar
));
7624 tree new_pvar
= lookup_decl (pvar
, ctx
);
7625 x
= build_fold_indirect_ref (new_pvar
);
7626 TREE_THIS_NOTRAP (x
) = 1;
7627 SET_DECL_VALUE_EXPR (new_var
, x
);
7628 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7632 case OMP_CLAUSE_USE_DEVICE_PTR
:
7633 case OMP_CLAUSE_IS_DEVICE_PTR
:
7634 var
= OMP_CLAUSE_DECL (c
);
7636 if (is_variable_sized (var
))
7638 tree new_var
= lookup_decl (var
, ctx
);
7639 tree pvar
= DECL_VALUE_EXPR (var
);
7640 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7641 pvar
= TREE_OPERAND (pvar
, 0);
7642 gcc_assert (DECL_P (pvar
));
7643 tree new_pvar
= lookup_decl (pvar
, ctx
);
7644 x
= build_fold_indirect_ref (new_pvar
);
7645 TREE_THIS_NOTRAP (x
) = 1;
7646 SET_DECL_VALUE_EXPR (new_var
, x
);
7647 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7649 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7651 tree new_var
= lookup_decl (var
, ctx
);
7652 tree type
= build_pointer_type (TREE_TYPE (var
));
7653 x
= create_tmp_var_raw (type
, get_name (new_var
));
7654 gimple_add_tmp_var (x
);
7655 x
= build_simple_mem_ref (x
);
7656 SET_DECL_VALUE_EXPR (new_var
, x
);
7657 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7661 tree new_var
= lookup_decl (var
, ctx
);
7662 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
7663 gimple_add_tmp_var (x
);
7664 SET_DECL_VALUE_EXPR (new_var
, x
);
7665 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7672 target_nesting_level
++;
7673 lower_omp (&tgt_body
, ctx
);
7674 target_nesting_level
--;
7676 else if (data_region
)
7677 lower_omp (&tgt_body
, ctx
);
7681 /* Declare all the variables created by mapping and the variables
7682 declared in the scope of the target body. */
7683 record_vars_into (ctx
->block_vars
, child_fn
);
7684 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
7689 if (ctx
->record_type
)
7692 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
7693 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7694 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7695 t
= make_tree_vec (3);
7696 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
7698 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
7700 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
7701 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
7702 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
7703 tree tkind_type
= short_unsigned_type_node
;
7704 int talign_shift
= 8;
7706 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
7708 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
7709 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
7710 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
7711 gimple_omp_target_set_data_arg (stmt
, t
);
7713 vec
<constructor_elt
, va_gc
> *vsize
;
7714 vec
<constructor_elt
, va_gc
> *vkind
;
7715 vec_alloc (vsize
, map_cnt
);
7716 vec_alloc (vkind
, map_cnt
);
7717 unsigned int map_idx
= 0;
7719 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7720 switch (OMP_CLAUSE_CODE (c
))
7722 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
7723 unsigned int talign
;
7728 case OMP_CLAUSE_MAP
:
7730 case OMP_CLAUSE_FROM
:
7731 oacc_firstprivate_map
:
7733 ovar
= OMP_CLAUSE_DECL (c
);
7734 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7735 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7736 || (OMP_CLAUSE_MAP_KIND (c
)
7737 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
7741 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7742 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
7744 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
7745 == get_base_address (ovar
));
7746 nc
= OMP_CLAUSE_CHAIN (c
);
7747 ovar
= OMP_CLAUSE_DECL (nc
);
7751 tree x
= build_sender_ref (ovar
, ctx
);
7753 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
7754 gimplify_assign (x
, v
, &ilist
);
7760 if (DECL_SIZE (ovar
)
7761 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
7763 tree ovar2
= DECL_VALUE_EXPR (ovar
);
7764 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
7765 ovar2
= TREE_OPERAND (ovar2
, 0);
7766 gcc_assert (DECL_P (ovar2
));
7769 if (!maybe_lookup_field (ovar
, ctx
))
7773 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
7774 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
7775 talign
= DECL_ALIGN_UNIT (ovar
);
7778 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7779 x
= build_sender_ref (ovar
, ctx
);
7781 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7782 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7783 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7784 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
7786 gcc_assert (offloaded
);
7788 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
7789 mark_addressable (avar
);
7790 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
7791 talign
= DECL_ALIGN_UNIT (avar
);
7792 avar
= build_fold_addr_expr (avar
);
7793 gimplify_assign (x
, avar
, &ilist
);
7795 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7797 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7798 if (!omp_is_reference (var
))
7800 if (is_gimple_reg (var
)
7801 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7802 TREE_NO_WARNING (var
) = 1;
7803 var
= build_fold_addr_expr (var
);
7806 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7807 gimplify_assign (x
, var
, &ilist
);
7809 else if (is_gimple_reg (var
))
7811 gcc_assert (offloaded
);
7812 tree avar
= create_tmp_var (TREE_TYPE (var
));
7813 mark_addressable (avar
);
7814 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
7815 if (GOMP_MAP_COPY_TO_P (map_kind
)
7816 || map_kind
== GOMP_MAP_POINTER
7817 || map_kind
== GOMP_MAP_TO_PSET
7818 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7820 /* If we need to initialize a temporary
7821 with VAR because it is not addressable, and
7822 the variable hasn't been initialized yet, then
7823 we'll get a warning for the store to avar.
7824 Don't warn in that case, the mapping might
7826 TREE_NO_WARNING (var
) = 1;
7827 gimplify_assign (avar
, var
, &ilist
);
7829 avar
= build_fold_addr_expr (avar
);
7830 gimplify_assign (x
, avar
, &ilist
);
7831 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
7832 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7833 && !TYPE_READONLY (TREE_TYPE (var
)))
7835 x
= unshare_expr (x
);
7836 x
= build_simple_mem_ref (x
);
7837 gimplify_assign (var
, x
, &olist
);
7842 var
= build_fold_addr_expr (var
);
7843 gimplify_assign (x
, var
, &ilist
);
7847 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7849 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7850 s
= TREE_TYPE (ovar
);
7851 if (TREE_CODE (s
) == REFERENCE_TYPE
)
7853 s
= TYPE_SIZE_UNIT (s
);
7856 s
= OMP_CLAUSE_SIZE (c
);
7858 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
7859 s
= fold_convert (size_type_node
, s
);
7860 purpose
= size_int (map_idx
++);
7861 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
7862 if (TREE_CODE (s
) != INTEGER_CST
)
7863 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
7865 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
7866 switch (OMP_CLAUSE_CODE (c
))
7868 case OMP_CLAUSE_MAP
:
7869 tkind
= OMP_CLAUSE_MAP_KIND (c
);
7871 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
7874 case GOMP_MAP_ALLOC
:
7877 case GOMP_MAP_TOFROM
:
7878 case GOMP_MAP_ALWAYS_TO
:
7879 case GOMP_MAP_ALWAYS_FROM
:
7880 case GOMP_MAP_ALWAYS_TOFROM
:
7881 case GOMP_MAP_RELEASE
:
7882 case GOMP_MAP_FORCE_TO
:
7883 case GOMP_MAP_FORCE_FROM
:
7884 case GOMP_MAP_FORCE_TOFROM
:
7885 case GOMP_MAP_FORCE_PRESENT
:
7886 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
7888 case GOMP_MAP_DELETE
:
7889 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
7893 if (tkind_zero
!= tkind
)
7895 if (integer_zerop (s
))
7897 else if (integer_nonzerop (s
))
7901 case OMP_CLAUSE_FIRSTPRIVATE
:
7902 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7903 tkind
= GOMP_MAP_TO
;
7907 tkind
= GOMP_MAP_TO
;
7910 case OMP_CLAUSE_FROM
:
7911 tkind
= GOMP_MAP_FROM
;
7917 gcc_checking_assert (tkind
7918 < (HOST_WIDE_INT_C (1U) << talign_shift
));
7919 gcc_checking_assert (tkind_zero
7920 < (HOST_WIDE_INT_C (1U) << talign_shift
));
7921 talign
= ceil_log2 (talign
);
7922 tkind
|= talign
<< talign_shift
;
7923 tkind_zero
|= talign
<< talign_shift
;
7924 gcc_checking_assert (tkind
7925 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
7926 gcc_checking_assert (tkind_zero
7927 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
7928 if (tkind
== tkind_zero
)
7929 x
= build_int_cstu (tkind_type
, tkind
);
7932 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
7933 x
= build3 (COND_EXPR
, tkind_type
,
7934 fold_build2 (EQ_EXPR
, boolean_type_node
,
7935 unshare_expr (s
), size_zero_node
),
7936 build_int_cstu (tkind_type
, tkind_zero
),
7937 build_int_cstu (tkind_type
, tkind
));
7939 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
7944 case OMP_CLAUSE_FIRSTPRIVATE
:
7945 if (is_oacc_parallel (ctx
))
7946 goto oacc_firstprivate_map
;
7947 ovar
= OMP_CLAUSE_DECL (c
);
7948 if (omp_is_reference (ovar
))
7949 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7951 talign
= DECL_ALIGN_UNIT (ovar
);
7952 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7953 x
= build_sender_ref (ovar
, ctx
);
7954 tkind
= GOMP_MAP_FIRSTPRIVATE
;
7955 type
= TREE_TYPE (ovar
);
7956 if (omp_is_reference (ovar
))
7957 type
= TREE_TYPE (type
);
7958 if ((INTEGRAL_TYPE_P (type
)
7959 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
7960 || TREE_CODE (type
) == POINTER_TYPE
)
7962 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
7964 if (omp_is_reference (var
))
7965 t
= build_simple_mem_ref (var
);
7966 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7967 TREE_NO_WARNING (var
) = 1;
7968 if (TREE_CODE (type
) != POINTER_TYPE
)
7969 t
= fold_convert (pointer_sized_int_node
, t
);
7970 t
= fold_convert (TREE_TYPE (x
), t
);
7971 gimplify_assign (x
, t
, &ilist
);
7973 else if (omp_is_reference (var
))
7974 gimplify_assign (x
, var
, &ilist
);
7975 else if (is_gimple_reg (var
))
7977 tree avar
= create_tmp_var (TREE_TYPE (var
));
7978 mark_addressable (avar
);
7979 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7980 TREE_NO_WARNING (var
) = 1;
7981 gimplify_assign (avar
, var
, &ilist
);
7982 avar
= build_fold_addr_expr (avar
);
7983 gimplify_assign (x
, avar
, &ilist
);
7987 var
= build_fold_addr_expr (var
);
7988 gimplify_assign (x
, var
, &ilist
);
7990 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
7992 else if (omp_is_reference (ovar
))
7993 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7995 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
7996 s
= fold_convert (size_type_node
, s
);
7997 purpose
= size_int (map_idx
++);
7998 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
7999 if (TREE_CODE (s
) != INTEGER_CST
)
8000 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8002 gcc_checking_assert (tkind
8003 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8004 talign
= ceil_log2 (talign
);
8005 tkind
|= talign
<< talign_shift
;
8006 gcc_checking_assert (tkind
8007 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8008 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8009 build_int_cstu (tkind_type
, tkind
));
8012 case OMP_CLAUSE_USE_DEVICE_PTR
:
8013 case OMP_CLAUSE_IS_DEVICE_PTR
:
8014 ovar
= OMP_CLAUSE_DECL (c
);
8015 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8016 x
= build_sender_ref (ovar
, ctx
);
8017 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8018 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
8020 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8021 type
= TREE_TYPE (ovar
);
8022 if (TREE_CODE (type
) == ARRAY_TYPE
)
8023 var
= build_fold_addr_expr (var
);
8026 if (omp_is_reference (ovar
))
8028 type
= TREE_TYPE (type
);
8029 if (TREE_CODE (type
) != ARRAY_TYPE
)
8030 var
= build_simple_mem_ref (var
);
8031 var
= fold_convert (TREE_TYPE (x
), var
);
8034 gimplify_assign (x
, var
, &ilist
);
8036 purpose
= size_int (map_idx
++);
8037 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8038 gcc_checking_assert (tkind
8039 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8040 gcc_checking_assert (tkind
8041 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8042 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8043 build_int_cstu (tkind_type
, tkind
));
8047 gcc_assert (map_idx
== map_cnt
);
8049 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
8050 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
8051 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
8052 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
8053 for (int i
= 1; i
<= 2; i
++)
8054 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
8056 gimple_seq initlist
= NULL
;
8057 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
8058 TREE_VEC_ELT (t
, i
)),
8059 &initlist
, true, NULL_TREE
);
8060 gimple_seq_add_seq (&ilist
, initlist
);
8062 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
8064 TREE_THIS_VOLATILE (clobber
) = 1;
8065 gimple_seq_add_stmt (&olist
,
8066 gimple_build_assign (TREE_VEC_ELT (t
, i
),
8070 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
8071 TREE_THIS_VOLATILE (clobber
) = 1;
8072 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
8076 /* Once all the expansions are done, sequence all the different
8077 fragments inside gimple_omp_body. */
8082 && ctx
->record_type
)
8084 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8085 /* fixup_child_record_type might have changed receiver_decl's type. */
8086 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
8087 gimple_seq_add_stmt (&new_body
,
8088 gimple_build_assign (ctx
->receiver_decl
, t
));
8090 gimple_seq_add_seq (&new_body
, fplist
);
8092 if (offloaded
|| data_region
)
8094 tree prev
= NULL_TREE
;
8095 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8096 switch (OMP_CLAUSE_CODE (c
))
8101 case OMP_CLAUSE_FIRSTPRIVATE
:
8102 if (is_gimple_omp_oacc (ctx
->stmt
))
8104 var
= OMP_CLAUSE_DECL (c
);
8105 if (omp_is_reference (var
)
8106 || is_gimple_reg_type (TREE_TYPE (var
)))
8108 tree new_var
= lookup_decl (var
, ctx
);
8110 type
= TREE_TYPE (var
);
8111 if (omp_is_reference (var
))
8112 type
= TREE_TYPE (type
);
8113 if ((INTEGRAL_TYPE_P (type
)
8114 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8115 || TREE_CODE (type
) == POINTER_TYPE
)
8117 x
= build_receiver_ref (var
, false, ctx
);
8118 if (TREE_CODE (type
) != POINTER_TYPE
)
8119 x
= fold_convert (pointer_sized_int_node
, x
);
8120 x
= fold_convert (type
, x
);
8121 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8123 if (omp_is_reference (var
))
8125 tree v
= create_tmp_var_raw (type
, get_name (var
));
8126 gimple_add_tmp_var (v
);
8127 TREE_ADDRESSABLE (v
) = 1;
8128 gimple_seq_add_stmt (&new_body
,
8129 gimple_build_assign (v
, x
));
8130 x
= build_fold_addr_expr (v
);
8132 gimple_seq_add_stmt (&new_body
,
8133 gimple_build_assign (new_var
, x
));
8137 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
8138 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8140 gimple_seq_add_stmt (&new_body
,
8141 gimple_build_assign (new_var
, x
));
8144 else if (is_variable_sized (var
))
8146 tree pvar
= DECL_VALUE_EXPR (var
);
8147 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8148 pvar
= TREE_OPERAND (pvar
, 0);
8149 gcc_assert (DECL_P (pvar
));
8150 tree new_var
= lookup_decl (pvar
, ctx
);
8151 x
= build_receiver_ref (var
, false, ctx
);
8152 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8153 gimple_seq_add_stmt (&new_body
,
8154 gimple_build_assign (new_var
, x
));
8157 case OMP_CLAUSE_PRIVATE
:
8158 if (is_gimple_omp_oacc (ctx
->stmt
))
8160 var
= OMP_CLAUSE_DECL (c
);
8161 if (omp_is_reference (var
))
8163 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8164 tree new_var
= lookup_decl (var
, ctx
);
8165 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8166 if (TREE_CONSTANT (x
))
8168 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
8170 gimple_add_tmp_var (x
);
8171 TREE_ADDRESSABLE (x
) = 1;
8172 x
= build_fold_addr_expr_loc (clause_loc
, x
);
8177 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8178 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8179 gimple_seq_add_stmt (&new_body
,
8180 gimple_build_assign (new_var
, x
));
8183 case OMP_CLAUSE_USE_DEVICE_PTR
:
8184 case OMP_CLAUSE_IS_DEVICE_PTR
:
8185 var
= OMP_CLAUSE_DECL (c
);
8186 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8187 x
= build_sender_ref (var
, ctx
);
8189 x
= build_receiver_ref (var
, false, ctx
);
8190 if (is_variable_sized (var
))
8192 tree pvar
= DECL_VALUE_EXPR (var
);
8193 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8194 pvar
= TREE_OPERAND (pvar
, 0);
8195 gcc_assert (DECL_P (pvar
));
8196 tree new_var
= lookup_decl (pvar
, ctx
);
8197 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8198 gimple_seq_add_stmt (&new_body
,
8199 gimple_build_assign (new_var
, x
));
8201 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
8203 tree new_var
= lookup_decl (var
, ctx
);
8204 new_var
= DECL_VALUE_EXPR (new_var
);
8205 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
8206 new_var
= TREE_OPERAND (new_var
, 0);
8207 gcc_assert (DECL_P (new_var
));
8208 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8209 gimple_seq_add_stmt (&new_body
,
8210 gimple_build_assign (new_var
, x
));
8214 tree type
= TREE_TYPE (var
);
8215 tree new_var
= lookup_decl (var
, ctx
);
8216 if (omp_is_reference (var
))
8218 type
= TREE_TYPE (type
);
8219 if (TREE_CODE (type
) != ARRAY_TYPE
)
8221 tree v
= create_tmp_var_raw (type
, get_name (var
));
8222 gimple_add_tmp_var (v
);
8223 TREE_ADDRESSABLE (v
) = 1;
8224 x
= fold_convert (type
, x
);
8225 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8227 gimple_seq_add_stmt (&new_body
,
8228 gimple_build_assign (v
, x
));
8229 x
= build_fold_addr_expr (v
);
8232 new_var
= DECL_VALUE_EXPR (new_var
);
8233 x
= fold_convert (TREE_TYPE (new_var
), x
);
8234 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8235 gimple_seq_add_stmt (&new_body
,
8236 gimple_build_assign (new_var
, x
));
8240 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8241 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8242 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8243 or references to VLAs. */
8244 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8245 switch (OMP_CLAUSE_CODE (c
))
8250 case OMP_CLAUSE_MAP
:
8251 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8252 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8254 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8255 HOST_WIDE_INT offset
= 0;
8257 var
= OMP_CLAUSE_DECL (c
);
8259 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
8260 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
8262 && varpool_node::get_create (var
)->offloadable
)
8264 if (TREE_CODE (var
) == INDIRECT_REF
8265 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
8266 var
= TREE_OPERAND (var
, 0);
8267 if (TREE_CODE (var
) == COMPONENT_REF
)
8269 var
= get_addr_base_and_unit_offset (var
, &offset
);
8270 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
8272 else if (DECL_SIZE (var
)
8273 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
8275 tree var2
= DECL_VALUE_EXPR (var
);
8276 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
8277 var2
= TREE_OPERAND (var2
, 0);
8278 gcc_assert (DECL_P (var2
));
8281 tree new_var
= lookup_decl (var
, ctx
), x
;
8282 tree type
= TREE_TYPE (new_var
);
8284 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
8285 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8288 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
8290 new_var
= build2 (MEM_REF
, type
,
8291 build_fold_addr_expr (new_var
),
8292 build_int_cst (build_pointer_type (type
),
8295 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
8297 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
8298 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
8299 new_var
= build2 (MEM_REF
, type
,
8300 build_fold_addr_expr (new_var
),
8301 build_int_cst (build_pointer_type (type
),
8305 is_ref
= omp_is_reference (var
);
8306 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8308 bool ref_to_array
= false;
8311 type
= TREE_TYPE (type
);
8312 if (TREE_CODE (type
) == ARRAY_TYPE
)
8314 type
= build_pointer_type (type
);
8315 ref_to_array
= true;
8318 else if (TREE_CODE (type
) == ARRAY_TYPE
)
8320 tree decl2
= DECL_VALUE_EXPR (new_var
);
8321 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
8322 decl2
= TREE_OPERAND (decl2
, 0);
8323 gcc_assert (DECL_P (decl2
));
8325 type
= TREE_TYPE (new_var
);
8327 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
8328 x
= fold_convert_loc (clause_loc
, type
, x
);
8329 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
8331 tree bias
= OMP_CLAUSE_SIZE (c
);
8333 bias
= lookup_decl (bias
, ctx
);
8334 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
8335 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
8337 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
8338 TREE_TYPE (x
), x
, bias
);
8341 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8342 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8343 if (is_ref
&& !ref_to_array
)
8345 tree t
= create_tmp_var_raw (type
, get_name (var
));
8346 gimple_add_tmp_var (t
);
8347 TREE_ADDRESSABLE (t
) = 1;
8348 gimple_seq_add_stmt (&new_body
,
8349 gimple_build_assign (t
, x
));
8350 x
= build_fold_addr_expr_loc (clause_loc
, t
);
8352 gimple_seq_add_stmt (&new_body
,
8353 gimple_build_assign (new_var
, x
));
8356 else if (OMP_CLAUSE_CHAIN (c
)
8357 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
8359 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8360 == GOMP_MAP_FIRSTPRIVATE_POINTER
8361 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8362 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
8365 case OMP_CLAUSE_PRIVATE
:
8366 var
= OMP_CLAUSE_DECL (c
);
8367 if (is_variable_sized (var
))
8369 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8370 tree new_var
= lookup_decl (var
, ctx
);
8371 tree pvar
= DECL_VALUE_EXPR (var
);
8372 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8373 pvar
= TREE_OPERAND (pvar
, 0);
8374 gcc_assert (DECL_P (pvar
));
8375 tree new_pvar
= lookup_decl (pvar
, ctx
);
8376 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8377 tree al
= size_int (DECL_ALIGN (var
));
8378 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
8379 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8380 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
8381 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8382 gimple_seq_add_stmt (&new_body
,
8383 gimple_build_assign (new_pvar
, x
));
8385 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
8387 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8388 tree new_var
= lookup_decl (var
, ctx
);
8389 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8390 if (TREE_CONSTANT (x
))
8395 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8396 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
8397 tree al
= size_int (TYPE_ALIGN (rtype
));
8398 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8401 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8402 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8403 gimple_seq_add_stmt (&new_body
,
8404 gimple_build_assign (new_var
, x
));
8409 gimple_seq fork_seq
= NULL
;
8410 gimple_seq join_seq
= NULL
;
8412 if (is_oacc_parallel (ctx
))
8414 /* If there are reductions on the offloaded region itself, treat
8415 them as a dummy GANG loop. */
8416 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
8418 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
8419 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
8422 gimple_seq_add_seq (&new_body
, fork_seq
);
8423 gimple_seq_add_seq (&new_body
, tgt_body
);
8424 gimple_seq_add_seq (&new_body
, join_seq
);
8427 new_body
= maybe_catch_exception (new_body
);
8429 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
8430 gimple_omp_set_body (stmt
, new_body
);
8433 bind
= gimple_build_bind (NULL
, NULL
,
8434 tgt_bind
? gimple_bind_block (tgt_bind
)
8436 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
8437 gimple_bind_add_seq (bind
, ilist
);
8438 gimple_bind_add_stmt (bind
, stmt
);
8439 gimple_bind_add_seq (bind
, olist
);
8441 pop_gimplify_context (NULL
);
8445 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8446 gimple_bind_add_stmt (dep_bind
, bind
);
8447 gimple_bind_add_seq (dep_bind
, dep_olist
);
8448 pop_gimplify_context (dep_bind
);
8452 /* Expand code for an OpenMP teams directive. */
8455 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8457 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
8458 push_gimplify_context ();
8460 tree block
= make_node (BLOCK
);
8461 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
8462 gsi_replace (gsi_p
, bind
, true);
8463 gimple_seq bind_body
= NULL
;
8464 gimple_seq dlist
= NULL
;
8465 gimple_seq olist
= NULL
;
8467 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8468 OMP_CLAUSE_NUM_TEAMS
);
8469 if (num_teams
== NULL_TREE
)
8470 num_teams
= build_int_cst (unsigned_type_node
, 0);
8473 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
8474 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
8475 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
8477 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8478 OMP_CLAUSE_THREAD_LIMIT
);
8479 if (thread_limit
== NULL_TREE
)
8480 thread_limit
= build_int_cst (unsigned_type_node
, 0);
8483 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
8484 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
8485 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
8489 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
8490 &bind_body
, &dlist
, ctx
, NULL
);
8491 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
8492 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
8493 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8495 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
8496 location_t loc
= gimple_location (teams_stmt
);
8497 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
8498 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
8499 gimple_set_location (call
, loc
);
8500 gimple_seq_add_stmt (&bind_body
, call
);
8503 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
8504 gimple_omp_set_body (teams_stmt
, NULL
);
8505 gimple_seq_add_seq (&bind_body
, olist
);
8506 gimple_seq_add_seq (&bind_body
, dlist
);
8507 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8508 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
8509 gimple_bind_set_body (bind
, bind_body
);
8511 pop_gimplify_context (bind
);
8513 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8514 BLOCK_VARS (block
) = ctx
->block_vars
;
8515 if (BLOCK_VARS (block
))
8516 TREE_USED (block
) = 1;
8519 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8522 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8524 gimple
*stmt
= gsi_stmt (*gsi_p
);
8525 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8526 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
8527 gimple_build_omp_return (false));
8531 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8532 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8533 of OMP context, but with task_shared_vars set. */
8536 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
8541 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8542 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
8545 if (task_shared_vars
8547 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
8550 /* If a global variable has been privatized, TREE_CONSTANT on
8551 ADDR_EXPR might be wrong. */
8552 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
8553 recompute_tree_invariant_for_addr_expr (t
);
8555 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
8559 /* Data to be communicated between lower_omp_regimplify_operands and
8560 lower_omp_regimplify_operands_p. */
8562 struct lower_omp_regimplify_operands_data
8568 /* Helper function for lower_omp_regimplify_operands. Find
8569 omp_member_access_dummy_var vars and adjust temporarily their
8570 DECL_VALUE_EXPRs if needed. */
8573 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
8576 tree t
= omp_member_access_dummy_var (*tp
);
8579 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8580 lower_omp_regimplify_operands_data
*ldata
8581 = (lower_omp_regimplify_operands_data
*) wi
->info
;
8582 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
8585 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
8586 ldata
->decls
->safe_push (*tp
);
8587 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
8588 SET_DECL_VALUE_EXPR (*tp
, v
);
8591 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
8595 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8596 of omp_member_access_dummy_var vars during regimplification. */
8599 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
8600 gimple_stmt_iterator
*gsi_p
)
8602 auto_vec
<tree
, 10> decls
;
8605 struct walk_stmt_info wi
;
8606 memset (&wi
, '\0', sizeof (wi
));
8607 struct lower_omp_regimplify_operands_data data
;
8609 data
.decls
= &decls
;
8611 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
8613 gimple_regimplify_operands (stmt
, gsi_p
);
8614 while (!decls
.is_empty ())
8616 tree t
= decls
.pop ();
8617 tree v
= decls
.pop ();
8618 SET_DECL_VALUE_EXPR (t
, v
);
8623 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8625 gimple
*stmt
= gsi_stmt (*gsi_p
);
8626 struct walk_stmt_info wi
;
8629 if (gimple_has_location (stmt
))
8630 input_location
= gimple_location (stmt
);
8632 if (task_shared_vars
)
8633 memset (&wi
, '\0', sizeof (wi
));
8635 /* If we have issued syntax errors, avoid doing any heavy lifting.
8636 Just replace the OMP directives with a NOP to avoid
8637 confusing RTL expansion. */
8638 if (seen_error () && is_gimple_omp (stmt
))
8640 gsi_replace (gsi_p
, gimple_build_nop (), true);
8644 switch (gimple_code (stmt
))
8648 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
8649 if ((ctx
|| task_shared_vars
)
8650 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
8651 lower_omp_regimplify_p
,
8652 ctx
? NULL
: &wi
, NULL
)
8653 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
8654 lower_omp_regimplify_p
,
8655 ctx
? NULL
: &wi
, NULL
)))
8656 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
8660 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
8662 case GIMPLE_EH_FILTER
:
8663 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
8666 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
8667 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
8669 case GIMPLE_TRANSACTION
:
8670 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
8674 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
8676 case GIMPLE_OMP_PARALLEL
:
8677 case GIMPLE_OMP_TASK
:
8678 ctx
= maybe_lookup_ctx (stmt
);
8680 if (ctx
->cancellable
)
8681 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8682 lower_omp_taskreg (gsi_p
, ctx
);
8684 case GIMPLE_OMP_FOR
:
8685 ctx
= maybe_lookup_ctx (stmt
);
8687 if (ctx
->cancellable
)
8688 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8689 lower_omp_for (gsi_p
, ctx
);
8691 case GIMPLE_OMP_SECTIONS
:
8692 ctx
= maybe_lookup_ctx (stmt
);
8694 if (ctx
->cancellable
)
8695 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8696 lower_omp_sections (gsi_p
, ctx
);
8698 case GIMPLE_OMP_SINGLE
:
8699 ctx
= maybe_lookup_ctx (stmt
);
8701 lower_omp_single (gsi_p
, ctx
);
8703 case GIMPLE_OMP_MASTER
:
8704 ctx
= maybe_lookup_ctx (stmt
);
8706 lower_omp_master (gsi_p
, ctx
);
8708 case GIMPLE_OMP_TASKGROUP
:
8709 ctx
= maybe_lookup_ctx (stmt
);
8711 lower_omp_taskgroup (gsi_p
, ctx
);
8713 case GIMPLE_OMP_ORDERED
:
8714 ctx
= maybe_lookup_ctx (stmt
);
8716 lower_omp_ordered (gsi_p
, ctx
);
8718 case GIMPLE_OMP_CRITICAL
:
8719 ctx
= maybe_lookup_ctx (stmt
);
8721 lower_omp_critical (gsi_p
, ctx
);
8723 case GIMPLE_OMP_ATOMIC_LOAD
:
8724 if ((ctx
|| task_shared_vars
)
8725 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8726 as_a
<gomp_atomic_load
*> (stmt
)),
8727 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
8728 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8730 case GIMPLE_OMP_TARGET
:
8731 ctx
= maybe_lookup_ctx (stmt
);
8733 lower_omp_target (gsi_p
, ctx
);
8735 case GIMPLE_OMP_TEAMS
:
8736 ctx
= maybe_lookup_ctx (stmt
);
8738 lower_omp_teams (gsi_p
, ctx
);
8740 case GIMPLE_OMP_GRID_BODY
:
8741 ctx
= maybe_lookup_ctx (stmt
);
8743 lower_omp_grid_body (gsi_p
, ctx
);
8747 call_stmt
= as_a
<gcall
*> (stmt
);
8748 fndecl
= gimple_call_fndecl (call_stmt
);
8750 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8751 switch (DECL_FUNCTION_CODE (fndecl
))
8753 case BUILT_IN_GOMP_BARRIER
:
8757 case BUILT_IN_GOMP_CANCEL
:
8758 case BUILT_IN_GOMP_CANCELLATION_POINT
:
8761 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
8763 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
8764 if (!cctx
->cancellable
)
8766 if (DECL_FUNCTION_CODE (fndecl
)
8767 == BUILT_IN_GOMP_CANCELLATION_POINT
)
8769 stmt
= gimple_build_nop ();
8770 gsi_replace (gsi_p
, stmt
, false);
8774 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
8776 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
8777 gimple_call_set_fndecl (call_stmt
, fndecl
);
8778 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
8781 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
8782 gimple_call_set_lhs (call_stmt
, lhs
);
8783 tree fallthru_label
;
8784 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8786 g
= gimple_build_label (fallthru_label
);
8787 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8788 g
= gimple_build_cond (NE_EXPR
, lhs
,
8789 fold_convert (TREE_TYPE (lhs
),
8790 boolean_false_node
),
8791 cctx
->cancel_label
, fallthru_label
);
8792 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8799 if ((ctx
|| task_shared_vars
)
8800 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
8803 /* Just remove clobbers, this should happen only if we have
8804 "privatized" local addressable variables in SIMD regions,
8805 the clobber isn't needed in that case and gimplifying address
8806 of the ARRAY_REF into a pointer and creating MEM_REF based
8807 clobber would create worse code than we get with the clobber
8809 if (gimple_clobber_p (stmt
))
8811 gsi_replace (gsi_p
, gimple_build_nop (), true);
8814 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8821 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
8823 location_t saved_location
= input_location
;
8824 gimple_stmt_iterator gsi
;
8825 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8826 lower_omp_1 (&gsi
, ctx
);
8827 /* During gimplification, we haven't folded statments inside offloading
8828 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8829 if (target_nesting_level
|| taskreg_nesting_level
)
8830 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8832 input_location
= saved_location
;
8835 /* Main entry point. */
8838 execute_lower_omp (void)
8844 /* This pass always runs, to provide PROP_gimple_lomp.
8845 But often, there is nothing to do. */
8846 if (flag_cilkplus
== 0 && flag_openacc
== 0 && flag_openmp
== 0
8847 && flag_openmp_simd
== 0)
8850 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
8851 delete_omp_context
);
8853 body
= gimple_body (current_function_decl
);
8855 if (hsa_gen_requested_p ())
8856 omp_grid_gridify_all_targets (&body
);
8858 scan_omp (&body
, NULL
);
8859 gcc_assert (taskreg_nesting_level
== 0);
8860 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
8861 finish_taskreg_scan (ctx
);
8862 taskreg_contexts
.release ();
8864 if (all_contexts
->root
)
8866 if (task_shared_vars
)
8867 push_gimplify_context ();
8868 lower_omp (&body
, NULL
);
8869 if (task_shared_vars
)
8870 pop_gimplify_context (NULL
);
8875 splay_tree_delete (all_contexts
);
8876 all_contexts
= NULL
;
8878 BITMAP_FREE (task_shared_vars
);
8884 const pass_data pass_data_lower_omp
=
8886 GIMPLE_PASS
, /* type */
8887 "omplower", /* name */
8888 OPTGROUP_OPENMP
, /* optinfo_flags */
8889 TV_NONE
, /* tv_id */
8890 PROP_gimple_any
, /* properties_required */
8891 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
8892 0, /* properties_destroyed */
8893 0, /* todo_flags_start */
8894 0, /* todo_flags_finish */
8897 class pass_lower_omp
: public gimple_opt_pass
8900 pass_lower_omp (gcc::context
*ctxt
)
8901 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
8904 /* opt_pass methods: */
8905 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
8907 }; // class pass_lower_omp
8912 make_pass_lower_omp (gcc::context
*ctxt
)
8914 return new pass_lower_omp (ctxt
);
8917 /* The following is a utility to diagnose structured block violations.
8918 It is not part of the "omplower" pass, as that's invoked too late. It
8919 should be invoked by the respective front ends after gimplification. */
8921 static splay_tree all_labels
;
8923 /* Check for mismatched contexts and generate an error if needed. Return
8924 true if an error is detected. */
8927 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
8928 gimple
*branch_ctx
, gimple
*label_ctx
)
8930 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
8931 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
8933 if (label_ctx
== branch_ctx
)
8936 const char* kind
= NULL
;
8941 && gimple_code (branch_ctx
) == GIMPLE_OMP_FOR
8942 && gimple_omp_for_kind (branch_ctx
) == GF_OMP_FOR_KIND_CILKSIMD
)
8944 && gimple_code (label_ctx
) == GIMPLE_OMP_FOR
8945 && gimple_omp_for_kind (label_ctx
) == GF_OMP_FOR_KIND_CILKSIMD
))
8950 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
8951 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
8953 gcc_checking_assert (kind
== NULL
);
8959 gcc_checking_assert (flag_openmp
);
8963 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
8964 so we could traverse it and issue a correct "exit" or "enter" error
8965 message upon a structured block violation.
8967 We built the context by building a list with tree_cons'ing, but there is
8968 no easy counterpart in gimple tuples. It seems like far too much work
8969 for issuing exit/enter error messages. If someone really misses the
8970 distinct error message... patches welcome. */
8973 /* Try to avoid confusing the user by producing and error message
8974 with correct "exit" or "enter" verbiage. We prefer "exit"
8975 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
8976 if (branch_ctx
== NULL
)
8982 if (TREE_VALUE (label_ctx
) == branch_ctx
)
8987 label_ctx
= TREE_CHAIN (label_ctx
);
8992 error ("invalid exit from %s structured block", kind
);
8994 error ("invalid entry to %s structured block", kind
);
8997 /* If it's obvious we have an invalid entry, be specific about the error. */
8998 if (branch_ctx
== NULL
)
8999 error ("invalid entry to %s structured block", kind
);
9002 /* Otherwise, be vague and lazy, but efficient. */
9003 error ("invalid branch to/from %s structured block", kind
);
9006 gsi_replace (gsi_p
, gimple_build_nop (), false);
9010 /* Pass 1: Create a minimal tree of structured blocks, and record
9011 where each label is found. */
9014 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9015 struct walk_stmt_info
*wi
)
9017 gimple
*context
= (gimple
*) wi
->info
;
9018 gimple
*inner_context
;
9019 gimple
*stmt
= gsi_stmt (*gsi_p
);
9021 *handled_ops_p
= true;
9023 switch (gimple_code (stmt
))
9027 case GIMPLE_OMP_PARALLEL
:
9028 case GIMPLE_OMP_TASK
:
9029 case GIMPLE_OMP_SECTIONS
:
9030 case GIMPLE_OMP_SINGLE
:
9031 case GIMPLE_OMP_SECTION
:
9032 case GIMPLE_OMP_MASTER
:
9033 case GIMPLE_OMP_ORDERED
:
9034 case GIMPLE_OMP_CRITICAL
:
9035 case GIMPLE_OMP_TARGET
:
9036 case GIMPLE_OMP_TEAMS
:
9037 case GIMPLE_OMP_TASKGROUP
:
9038 /* The minimal context here is just the current OMP construct. */
9039 inner_context
= stmt
;
9040 wi
->info
= inner_context
;
9041 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9045 case GIMPLE_OMP_FOR
:
9046 inner_context
= stmt
;
9047 wi
->info
= inner_context
;
9048 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9050 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9051 diagnose_sb_1
, NULL
, wi
);
9052 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9057 splay_tree_insert (all_labels
,
9058 (splay_tree_key
) gimple_label_label (
9059 as_a
<glabel
*> (stmt
)),
9060 (splay_tree_value
) context
);
9070 /* Pass 2: Check each branch and see if its context differs from that of
9071 the destination label's context. */
9074 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9075 struct walk_stmt_info
*wi
)
9077 gimple
*context
= (gimple
*) wi
->info
;
9079 gimple
*stmt
= gsi_stmt (*gsi_p
);
9081 *handled_ops_p
= true;
9083 switch (gimple_code (stmt
))
9087 case GIMPLE_OMP_PARALLEL
:
9088 case GIMPLE_OMP_TASK
:
9089 case GIMPLE_OMP_SECTIONS
:
9090 case GIMPLE_OMP_SINGLE
:
9091 case GIMPLE_OMP_SECTION
:
9092 case GIMPLE_OMP_MASTER
:
9093 case GIMPLE_OMP_ORDERED
:
9094 case GIMPLE_OMP_CRITICAL
:
9095 case GIMPLE_OMP_TARGET
:
9096 case GIMPLE_OMP_TEAMS
:
9097 case GIMPLE_OMP_TASKGROUP
:
9099 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9103 case GIMPLE_OMP_FOR
:
9105 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9107 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
9108 diagnose_sb_2
, NULL
, wi
);
9109 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9115 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
9116 tree lab
= gimple_cond_true_label (cond_stmt
);
9119 n
= splay_tree_lookup (all_labels
,
9120 (splay_tree_key
) lab
);
9121 diagnose_sb_0 (gsi_p
, context
,
9122 n
? (gimple
*) n
->value
: NULL
);
9124 lab
= gimple_cond_false_label (cond_stmt
);
9127 n
= splay_tree_lookup (all_labels
,
9128 (splay_tree_key
) lab
);
9129 diagnose_sb_0 (gsi_p
, context
,
9130 n
? (gimple
*) n
->value
: NULL
);
9137 tree lab
= gimple_goto_dest (stmt
);
9138 if (TREE_CODE (lab
) != LABEL_DECL
)
9141 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9142 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
9148 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
9150 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
9152 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
9153 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9154 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
9161 diagnose_sb_0 (gsi_p
, context
, NULL
);
9172 diagnose_omp_structured_block_errors (void)
9174 struct walk_stmt_info wi
;
9175 gimple_seq body
= gimple_body (current_function_decl
);
9177 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
9179 memset (&wi
, 0, sizeof (wi
));
9180 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
9182 memset (&wi
, 0, sizeof (wi
));
9183 wi
.want_locations
= true;
9184 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
9186 gimple_set_body (current_function_decl
, body
);
9188 splay_tree_delete (all_labels
);
9196 const pass_data pass_data_diagnose_omp_blocks
=
9198 GIMPLE_PASS
, /* type */
9199 "*diagnose_omp_blocks", /* name */
9200 OPTGROUP_OPENMP
, /* optinfo_flags */
9201 TV_NONE
, /* tv_id */
9202 PROP_gimple_any
, /* properties_required */
9203 0, /* properties_provided */
9204 0, /* properties_destroyed */
9205 0, /* todo_flags_start */
9206 0, /* todo_flags_finish */
9209 class pass_diagnose_omp_blocks
: public gimple_opt_pass
9212 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9213 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
9216 /* opt_pass methods: */
9217 virtual bool gate (function
*)
9219 return flag_cilkplus
|| flag_openacc
|| flag_openmp
;
9221 virtual unsigned int execute (function
*)
9223 return diagnose_omp_structured_block_errors ();
9226 }; // class pass_diagnose_omp_blocks
9231 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9233 return new pass_diagnose_omp_blocks (ctxt
);
9237 #include "gt-omp-low.h"