1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context
*outer
;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map
;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
115 /* What to do with variables with implicitly determined sharing
117 enum omp_clause_default_kind default_kind
;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
124 /* True if this parallel directive is nested within another. */
127 /* True if this construct can be cancelled. */
131 static splay_tree all_contexts
;
132 static int taskreg_nesting_level
;
133 static int target_nesting_level
;
134 static bitmap task_shared_vars
;
135 static vec
<omp_context
*> taskreg_contexts
;
137 static void scan_omp (gimple_seq
*, omp_context
*);
138 static tree
scan_omp_1_op (tree
*, int *, void *);
140 #define WALK_SUBSTMTS \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
150 /* Return true if CTX corresponds to an oacc parallel region. */
153 is_oacc_parallel (omp_context
*ctx
)
155 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
156 return ((outer_type
== GIMPLE_OMP_TARGET
)
157 && (gimple_omp_target_kind (ctx
->stmt
)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
161 /* Return true if CTX corresponds to an oacc kernels region. */
164 is_oacc_kernels (omp_context
*ctx
)
166 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
167 return ((outer_type
== GIMPLE_OMP_TARGET
)
168 && (gimple_omp_target_kind (ctx
->stmt
)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
177 omp_member_access_dummy_var (tree decl
)
180 || !DECL_ARTIFICIAL (decl
)
181 || !DECL_IGNORED_P (decl
)
182 || !DECL_HAS_VALUE_EXPR_P (decl
)
183 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
186 tree v
= DECL_VALUE_EXPR (decl
);
187 if (TREE_CODE (v
) != COMPONENT_REF
)
191 switch (TREE_CODE (v
))
197 case POINTER_PLUS_EXPR
:
198 v
= TREE_OPERAND (v
, 0);
201 if (DECL_CONTEXT (v
) == current_function_decl
202 && DECL_ARTIFICIAL (v
)
203 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
211 /* Helper for unshare_and_remap, called through walk_tree. */
214 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
216 tree
*pair
= (tree
*) data
;
219 *tp
= unshare_expr (pair
[1]);
222 else if (IS_TYPE_OR_DECL_P (*tp
))
227 /* Return unshare_expr (X) with all occurrences of FROM
231 unshare_and_remap (tree x
, tree from
, tree to
)
233 tree pair
[2] = { from
, to
};
234 x
= unshare_expr (x
);
235 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
242 scan_omp_op (tree
*tp
, omp_context
*ctx
)
244 struct walk_stmt_info wi
;
246 memset (&wi
, 0, sizeof (wi
));
248 wi
.want_locations
= true;
250 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
253 static void lower_omp (gimple_seq
*, omp_context
*);
254 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
255 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
257 /* Return true if CTX is for an omp parallel. */
260 is_parallel_ctx (omp_context
*ctx
)
262 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
266 /* Return true if CTX is for an omp task. */
269 is_task_ctx (omp_context
*ctx
)
271 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
275 /* Return true if CTX is for an omp taskloop. */
278 is_taskloop_ctx (omp_context
*ctx
)
280 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
285 /* Return true if CTX is for an omp parallel or omp task. */
288 is_taskreg_ctx (omp_context
*ctx
)
290 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
);
293 /* Return true if EXPR is variable sized. */
296 is_variable_sized (const_tree expr
)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
306 lookup_decl (tree var
, omp_context
*ctx
)
308 tree
*n
= ctx
->cb
.decl_map
->get (var
);
313 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
315 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
316 return n
? *n
: NULL_TREE
;
320 lookup_field (tree var
, omp_context
*ctx
)
323 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
324 return (tree
) n
->value
;
328 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
331 n
= splay_tree_lookup (ctx
->sfield_map
332 ? ctx
->sfield_map
: ctx
->field_map
, key
);
333 return (tree
) n
->value
;
337 lookup_sfield (tree var
, omp_context
*ctx
)
339 return lookup_sfield ((splay_tree_key
) var
, ctx
);
343 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
346 n
= splay_tree_lookup (ctx
->field_map
, key
);
347 return n
? (tree
) n
->value
: NULL_TREE
;
351 maybe_lookup_field (tree var
, omp_context
*ctx
)
353 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
360 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
363 || TYPE_ATOMIC (TREE_TYPE (decl
)))
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
386 /* Do not use copy-in/copy-out for variables that have their
388 if (TREE_ADDRESSABLE (decl
))
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
393 if (TREE_READONLY (decl
)
394 || ((TREE_CODE (decl
) == RESULT_DECL
395 || TREE_CODE (decl
) == PARM_DECL
)
396 && DECL_BY_REFERENCE (decl
)))
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx
->is_nested
)
408 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
409 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
416 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
417 c
; c
= OMP_CLAUSE_CHAIN (c
))
418 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c
) == decl
)
423 goto maybe_mark_addressable_and_ret
;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx
))
433 maybe_mark_addressable_and_ret
:
434 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
435 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
440 if (!task_shared_vars
)
441 task_shared_vars
= BITMAP_ALLOC (NULL
);
442 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
443 TREE_ADDRESSABLE (outer
) = 1;
452 /* Construct a new automatic decl similar to VAR. */
455 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
457 tree copy
= copy_var_decl (var
, name
, type
);
459 DECL_CONTEXT (copy
) = current_function_decl
;
460 DECL_CHAIN (copy
) = ctx
->block_vars
;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
465 if (TREE_ADDRESSABLE (var
)
467 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
468 TREE_ADDRESSABLE (copy
) = 0;
469 ctx
->block_vars
= copy
;
475 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
477 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
483 omp_build_component_ref (tree obj
, tree field
)
485 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
486 if (TREE_THIS_VOLATILE (field
))
487 TREE_THIS_VOLATILE (ret
) |= 1;
488 if (TREE_READONLY (field
))
489 TREE_READONLY (ret
) |= 1;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
496 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
498 tree x
, field
= lookup_field (var
, ctx
);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x
= maybe_lookup_field (field
, ctx
);
506 x
= build_simple_mem_ref (ctx
->receiver_decl
);
507 TREE_THIS_NOTRAP (x
) = 1;
508 x
= omp_build_component_ref (x
, field
);
511 x
= build_simple_mem_ref (x
);
512 TREE_THIS_NOTRAP (x
) = 1;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
523 build_outer_var_ref (tree var
, omp_context
*ctx
,
524 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
530 else if (is_variable_sized (var
))
532 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
533 x
= build_outer_var_ref (x
, ctx
, code
);
534 x
= build_simple_mem_ref (x
);
536 else if (is_taskreg_ctx (ctx
))
538 bool by_ref
= use_pointer_for_field (var
, NULL
);
539 x
= build_receiver_ref (var
, by_ref
, ctx
);
541 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
543 || (code
== OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
545 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
553 if (ctx
->outer
&& is_taskreg_ctx (ctx
))
554 x
= lookup_decl (var
, ctx
->outer
);
556 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
560 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
562 gcc_assert (ctx
->outer
);
564 = splay_tree_lookup (ctx
->outer
->field_map
,
565 (splay_tree_key
) &DECL_UID (var
));
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
->outer
)))
571 x
= lookup_decl (var
, ctx
->outer
);
575 tree field
= (tree
) n
->value
;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x
= maybe_lookup_field (field
, ctx
->outer
);
582 x
= build_simple_mem_ref (ctx
->outer
->receiver_decl
);
583 x
= omp_build_component_ref (x
, field
);
584 if (use_pointer_for_field (var
, ctx
->outer
))
585 x
= build_simple_mem_ref (x
);
590 omp_context
*outer
= ctx
->outer
;
591 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
593 outer
= outer
->outer
;
595 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
597 x
= lookup_decl (var
, outer
);
599 else if (omp_is_reference (var
))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
603 else if (omp_member_access_dummy_var (var
))
610 tree t
= omp_member_access_dummy_var (var
);
613 x
= DECL_VALUE_EXPR (var
);
614 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
616 x
= unshare_and_remap (x
, t
, o
);
618 x
= unshare_expr (x
);
622 if (omp_is_reference (var
))
623 x
= build_simple_mem_ref (x
);
628 /* Build tree nodes to access the field for VAR on the sender side. */
631 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
633 tree field
= lookup_sfield (key
, ctx
);
634 return omp_build_component_ref (ctx
->sender_decl
, field
);
638 build_sender_ref (tree var
, omp_context
*ctx
)
640 return build_sender_ref ((splay_tree_key
) var
, ctx
);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
647 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
,
648 bool base_pointers_restrict
= false)
650 tree field
, type
, sfield
= NULL_TREE
;
651 splay_tree_key key
= (splay_tree_key
) var
;
655 key
= (splay_tree_key
) &DECL_UID (var
);
656 gcc_checking_assert (key
!= (splay_tree_key
) var
);
658 gcc_assert ((mask
& 1) == 0
659 || !splay_tree_lookup (ctx
->field_map
, key
));
660 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
661 || !splay_tree_lookup (ctx
->sfield_map
, key
));
662 gcc_assert ((mask
& 3) == 3
663 || !is_gimple_omp_oacc (ctx
->stmt
));
665 type
= TREE_TYPE (var
);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type
)
670 && TYPE_RESTRICT (type
))
671 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
675 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
676 type
= build_pointer_type (build_pointer_type (type
));
680 type
= build_pointer_type (type
);
681 if (base_pointers_restrict
)
682 type
= build_qualified_type (type
, TYPE_QUAL_RESTRICT
);
684 else if ((mask
& 3) == 1 && omp_is_reference (var
))
685 type
= TREE_TYPE (type
);
687 field
= build_decl (DECL_SOURCE_LOCATION (var
),
688 FIELD_DECL
, DECL_NAME (var
), type
);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field
) = var
;
694 if (type
== TREE_TYPE (var
))
696 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
697 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
698 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
701 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
705 insert_field_into_struct (ctx
->record_type
, field
);
706 if (ctx
->srecord_type
)
708 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
709 FIELD_DECL
, DECL_NAME (var
), type
);
710 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
711 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
712 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
713 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
714 insert_field_into_struct (ctx
->srecord_type
, sfield
);
719 if (ctx
->srecord_type
== NULL_TREE
)
723 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
724 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
725 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
727 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
728 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
729 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
730 insert_field_into_struct (ctx
->srecord_type
, sfield
);
731 splay_tree_insert (ctx
->sfield_map
,
732 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
733 (splay_tree_value
) sfield
);
737 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
738 : ctx
->srecord_type
, field
);
742 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
743 if ((mask
& 2) && ctx
->sfield_map
)
744 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
748 install_var_local (tree var
, omp_context
*ctx
)
750 tree new_var
= omp_copy_decl_1 (var
, ctx
);
751 insert_decl_map (&ctx
->cb
, var
, new_var
);
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
759 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
763 new_decl
= lookup_decl (decl
, ctx
);
765 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
768 && DECL_HAS_VALUE_EXPR_P (decl
))
770 tree ve
= DECL_VALUE_EXPR (decl
);
771 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
772 SET_DECL_VALUE_EXPR (new_decl
, ve
);
773 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
778 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
779 if (size
== error_mark_node
)
780 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
781 DECL_SIZE (new_decl
) = size
;
783 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
784 if (size
== error_mark_node
)
785 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
786 DECL_SIZE_UNIT (new_decl
) = size
;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
796 omp_copy_decl (tree var
, copy_body_data
*cb
)
798 omp_context
*ctx
= (omp_context
*) cb
;
801 if (TREE_CODE (var
) == LABEL_DECL
)
803 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
804 DECL_CONTEXT (new_var
) = current_function_decl
;
805 insert_decl_map (&ctx
->cb
, var
, new_var
);
809 while (!is_taskreg_ctx (ctx
))
814 new_var
= maybe_lookup_decl (var
, ctx
);
819 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
822 return error_mark_node
;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
828 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
830 omp_context
*ctx
= XCNEW (omp_context
);
832 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
833 (splay_tree_value
) ctx
);
838 ctx
->outer
= outer_ctx
;
839 ctx
->cb
= outer_ctx
->cb
;
840 ctx
->cb
.block
= NULL
;
841 ctx
->depth
= outer_ctx
->depth
+ 1;
845 ctx
->cb
.src_fn
= current_function_decl
;
846 ctx
->cb
.dst_fn
= current_function_decl
;
847 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
848 gcc_checking_assert (ctx
->cb
.src_node
);
849 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
850 ctx
->cb
.src_cfun
= cfun
;
851 ctx
->cb
.copy_decl
= omp_copy_decl
;
852 ctx
->cb
.eh_lp_nr
= 0;
853 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
857 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
862 static gimple_seq
maybe_catch_exception (gimple_seq
);
864 /* Finalize task copyfn. */
867 finalize_task_copyfn (gomp_task
*task_stmt
)
869 struct function
*child_cfun
;
871 gimple_seq seq
= NULL
, new_seq
;
874 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
875 if (child_fn
== NULL_TREE
)
878 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
879 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
881 push_cfun (child_cfun
);
882 bind
= gimplify_body (child_fn
, false);
883 gimple_seq_add_stmt (&seq
, bind
);
884 new_seq
= maybe_catch_exception (seq
);
887 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
889 gimple_seq_add_stmt (&seq
, bind
);
891 gimple_set_body (child_fn
, seq
);
894 /* Inform the callgraph about the new function. */
895 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
896 node
->parallelized_function
= 1;
897 cgraph_node::add_new_function (child_fn
, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
904 delete_omp_context (splay_tree_value value
)
906 omp_context
*ctx
= (omp_context
*) value
;
908 delete ctx
->cb
.decl_map
;
911 splay_tree_delete (ctx
->field_map
);
913 splay_tree_delete (ctx
->sfield_map
);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx
->record_type
)
920 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
921 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
923 if (ctx
->srecord_type
)
926 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
927 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
930 if (is_task_ctx (ctx
))
931 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
940 fixup_child_record_type (omp_context
*ctx
)
942 tree f
, type
= ctx
->record_type
;
944 if (!ctx
->receiver_decl
)
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
951 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
955 tree name
, new_fields
= NULL
;
957 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
958 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
959 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
960 TYPE_DECL
, name
, type
);
961 TYPE_NAME (type
) = name
;
963 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
965 tree new_f
= copy_node (f
);
966 DECL_CONTEXT (new_f
) = type
;
967 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
968 DECL_CHAIN (new_f
) = new_fields
;
969 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
970 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
972 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
979 (splay_tree_value
) new_f
);
981 TYPE_FIELDS (type
) = nreverse (new_fields
);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx
->stmt
))
988 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
990 TREE_TYPE (ctx
->receiver_decl
)
991 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
999 scan_sharing_clauses (tree clauses
, omp_context
*ctx
,
1000 bool base_pointers_restrict
= false)
1003 bool scan_array_reductions
= false;
1005 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1009 switch (OMP_CLAUSE_CODE (c
))
1011 case OMP_CLAUSE_PRIVATE
:
1012 decl
= OMP_CLAUSE_DECL (c
);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1015 else if (!is_variable_sized (decl
))
1016 install_var_local (decl
, ctx
);
1019 case OMP_CLAUSE_SHARED
:
1020 decl
= OMP_CLAUSE_DECL (c
);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1027 if (is_global_var (odecl
))
1029 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1032 gcc_assert (is_taskreg_ctx (ctx
));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1034 || !is_variable_sized (decl
));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1041 use_pointer_for_field (decl
, ctx
);
1044 by_ref
= use_pointer_for_field (decl
, NULL
);
1045 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1046 || TREE_ADDRESSABLE (decl
)
1048 || omp_is_reference (decl
))
1050 by_ref
= use_pointer_for_field (decl
, ctx
);
1051 install_var_field (decl
, by_ref
, 3, ctx
);
1052 install_var_local (decl
, ctx
);
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1059 case OMP_CLAUSE_REDUCTION
:
1060 decl
= OMP_CLAUSE_DECL (c
);
1061 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl
) == MEM_REF
)
1064 tree t
= TREE_OPERAND (decl
, 0);
1065 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1066 t
= TREE_OPERAND (t
, 0);
1067 if (TREE_CODE (t
) == INDIRECT_REF
1068 || TREE_CODE (t
) == ADDR_EXPR
)
1069 t
= TREE_OPERAND (t
, 0);
1070 install_var_local (t
, ctx
);
1071 if (is_taskreg_ctx (ctx
)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1073 && !is_variable_sized (t
))
1075 by_ref
= use_pointer_for_field (t
, ctx
);
1076 install_var_field (t
, by_ref
, 3, ctx
);
1082 case OMP_CLAUSE_LASTPRIVATE
:
1083 /* Let the corresponding firstprivate clause create
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1089 case OMP_CLAUSE_FIRSTPRIVATE
:
1090 case OMP_CLAUSE_LINEAR
:
1091 decl
= OMP_CLAUSE_DECL (c
);
1093 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1095 && is_gimple_omp_offloaded (ctx
->stmt
))
1097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1098 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1099 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1100 install_var_field (decl
, true, 3, ctx
);
1102 install_var_field (decl
, false, 3, ctx
);
1104 if (is_variable_sized (decl
))
1106 if (is_task_ctx (ctx
))
1107 install_var_field (decl
, false, 1, ctx
);
1110 else if (is_taskreg_ctx (ctx
))
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1114 by_ref
= use_pointer_for_field (decl
, NULL
);
1116 if (is_task_ctx (ctx
)
1117 && (global
|| by_ref
|| omp_is_reference (decl
)))
1119 install_var_field (decl
, false, 1, ctx
);
1121 install_var_field (decl
, by_ref
, 2, ctx
);
1124 install_var_field (decl
, by_ref
, 3, ctx
);
1126 install_var_local (decl
, ctx
);
1129 case OMP_CLAUSE_USE_DEVICE_PTR
:
1130 decl
= OMP_CLAUSE_DECL (c
);
1131 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1132 install_var_field (decl
, true, 3, ctx
);
1134 install_var_field (decl
, false, 3, ctx
);
1135 if (DECL_SIZE (decl
)
1136 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1138 tree decl2
= DECL_VALUE_EXPR (decl
);
1139 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1140 decl2
= TREE_OPERAND (decl2
, 0);
1141 gcc_assert (DECL_P (decl2
));
1142 install_var_local (decl2
, ctx
);
1144 install_var_local (decl
, ctx
);
1147 case OMP_CLAUSE_IS_DEVICE_PTR
:
1148 decl
= OMP_CLAUSE_DECL (c
);
1151 case OMP_CLAUSE__LOOPTEMP_
:
1152 gcc_assert (is_taskreg_ctx (ctx
));
1153 decl
= OMP_CLAUSE_DECL (c
);
1154 install_var_field (decl
, false, 3, ctx
);
1155 install_var_local (decl
, ctx
);
1158 case OMP_CLAUSE_COPYPRIVATE
:
1159 case OMP_CLAUSE_COPYIN
:
1160 decl
= OMP_CLAUSE_DECL (c
);
1161 by_ref
= use_pointer_for_field (decl
, NULL
);
1162 install_var_field (decl
, by_ref
, 3, ctx
);
1165 case OMP_CLAUSE_DEFAULT
:
1166 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
1169 case OMP_CLAUSE_FINAL
:
1171 case OMP_CLAUSE_NUM_THREADS
:
1172 case OMP_CLAUSE_NUM_TEAMS
:
1173 case OMP_CLAUSE_THREAD_LIMIT
:
1174 case OMP_CLAUSE_DEVICE
:
1175 case OMP_CLAUSE_SCHEDULE
:
1176 case OMP_CLAUSE_DIST_SCHEDULE
:
1177 case OMP_CLAUSE_DEPEND
:
1178 case OMP_CLAUSE_PRIORITY
:
1179 case OMP_CLAUSE_GRAINSIZE
:
1180 case OMP_CLAUSE_NUM_TASKS
:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1182 case OMP_CLAUSE_NUM_GANGS
:
1183 case OMP_CLAUSE_NUM_WORKERS
:
1184 case OMP_CLAUSE_VECTOR_LENGTH
:
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1190 case OMP_CLAUSE_FROM
:
1191 case OMP_CLAUSE_MAP
:
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1194 decl
= OMP_CLAUSE_DECL (c
);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1201 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c
)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1204 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1206 && varpool_node::get_create (decl
)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl
)))
1210 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1220 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c
)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1225 if (TREE_CODE (decl
) == COMPONENT_REF
1226 || (TREE_CODE (decl
) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1229 == REFERENCE_TYPE
)))
1231 if (DECL_SIZE (decl
)
1232 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1234 tree decl2
= DECL_VALUE_EXPR (decl
);
1235 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1236 decl2
= TREE_OPERAND (decl2
, 0);
1237 gcc_assert (DECL_P (decl2
));
1238 install_var_local (decl2
, ctx
);
1240 install_var_local (decl
, ctx
);
1245 if (DECL_SIZE (decl
)
1246 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1248 tree decl2
= DECL_VALUE_EXPR (decl
);
1249 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1250 decl2
= TREE_OPERAND (decl2
, 0);
1251 gcc_assert (DECL_P (decl2
));
1252 install_var_field (decl2
, true, 3, ctx
);
1253 install_var_local (decl2
, ctx
);
1254 install_var_local (decl
, ctx
);
1258 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1261 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1262 install_var_field (decl
, true, 7, ctx
);
1264 install_var_field (decl
, true, 3, ctx
,
1265 base_pointers_restrict
);
1266 if (is_gimple_omp_offloaded (ctx
->stmt
)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1268 install_var_local (decl
, ctx
);
1273 tree base
= get_base_address (decl
);
1274 tree nc
= OMP_CLAUSE_CHAIN (c
);
1277 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc
) == base
1279 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1289 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1290 decl
= OMP_CLAUSE_DECL (c
);
1292 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1293 (splay_tree_key
) decl
));
1295 = build_decl (OMP_CLAUSE_LOCATION (c
),
1296 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1297 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1298 insert_field_into_struct (ctx
->record_type
, field
);
1299 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1300 (splay_tree_value
) field
);
1305 case OMP_CLAUSE__GRIDDIM_
:
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1313 case OMP_CLAUSE_NOWAIT
:
1314 case OMP_CLAUSE_ORDERED
:
1315 case OMP_CLAUSE_COLLAPSE
:
1316 case OMP_CLAUSE_UNTIED
:
1317 case OMP_CLAUSE_MERGEABLE
:
1318 case OMP_CLAUSE_PROC_BIND
:
1319 case OMP_CLAUSE_SAFELEN
:
1320 case OMP_CLAUSE_SIMDLEN
:
1321 case OMP_CLAUSE_THREADS
:
1322 case OMP_CLAUSE_SIMD
:
1323 case OMP_CLAUSE_NOGROUP
:
1324 case OMP_CLAUSE_DEFAULTMAP
:
1325 case OMP_CLAUSE_ASYNC
:
1326 case OMP_CLAUSE_WAIT
:
1327 case OMP_CLAUSE_GANG
:
1328 case OMP_CLAUSE_WORKER
:
1329 case OMP_CLAUSE_VECTOR
:
1330 case OMP_CLAUSE_INDEPENDENT
:
1331 case OMP_CLAUSE_AUTO
:
1332 case OMP_CLAUSE_SEQ
:
1333 case OMP_CLAUSE_TILE
:
1334 case OMP_CLAUSE__SIMT_
:
1337 case OMP_CLAUSE_ALIGNED
:
1338 decl
= OMP_CLAUSE_DECL (c
);
1339 if (is_global_var (decl
)
1340 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1341 install_var_local (decl
, ctx
);
1344 case OMP_CLAUSE__CACHE_
:
1350 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1352 switch (OMP_CLAUSE_CODE (c
))
1354 case OMP_CLAUSE_LASTPRIVATE
:
1355 /* Let the corresponding firstprivate clause create
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1358 scan_array_reductions
= true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1363 case OMP_CLAUSE_FIRSTPRIVATE
:
1364 case OMP_CLAUSE_PRIVATE
:
1365 case OMP_CLAUSE_LINEAR
:
1366 case OMP_CLAUSE_IS_DEVICE_PTR
:
1367 decl
= OMP_CLAUSE_DECL (c
);
1368 if (is_variable_sized (decl
))
1370 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1372 && is_gimple_omp_offloaded (ctx
->stmt
))
1374 tree decl2
= DECL_VALUE_EXPR (decl
);
1375 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1376 decl2
= TREE_OPERAND (decl2
, 0);
1377 gcc_assert (DECL_P (decl2
));
1378 install_var_local (decl2
, ctx
);
1379 fixup_remapped_decl (decl2
, ctx
, false);
1381 install_var_local (decl
, ctx
);
1383 fixup_remapped_decl (decl
, ctx
,
1384 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1386 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1388 scan_array_reductions
= true;
1391 case OMP_CLAUSE_REDUCTION
:
1392 decl
= OMP_CLAUSE_DECL (c
);
1393 if (TREE_CODE (decl
) != MEM_REF
)
1395 if (is_variable_sized (decl
))
1396 install_var_local (decl
, ctx
);
1397 fixup_remapped_decl (decl
, ctx
, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1400 scan_array_reductions
= true;
1403 case OMP_CLAUSE_SHARED
:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
1407 decl
= OMP_CLAUSE_DECL (c
);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1415 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1416 install_var_field (decl
, by_ref
, 11, ctx
);
1419 fixup_remapped_decl (decl
, ctx
, false);
1422 case OMP_CLAUSE_MAP
:
1423 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1425 decl
= OMP_CLAUSE_DECL (c
);
1427 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c
)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1430 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1432 && varpool_node::get_create (decl
)->offloadable
)
1436 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1438 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1441 tree new_decl
= lookup_decl (decl
, ctx
);
1442 TREE_TYPE (new_decl
)
1443 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1445 else if (DECL_SIZE (decl
)
1446 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1448 tree decl2
= DECL_VALUE_EXPR (decl
);
1449 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1450 decl2
= TREE_OPERAND (decl2
, 0);
1451 gcc_assert (DECL_P (decl2
));
1452 fixup_remapped_decl (decl2
, ctx
, false);
1453 fixup_remapped_decl (decl
, ctx
, true);
1456 fixup_remapped_decl (decl
, ctx
, false);
1460 case OMP_CLAUSE_COPYPRIVATE
:
1461 case OMP_CLAUSE_COPYIN
:
1462 case OMP_CLAUSE_DEFAULT
:
1464 case OMP_CLAUSE_NUM_THREADS
:
1465 case OMP_CLAUSE_NUM_TEAMS
:
1466 case OMP_CLAUSE_THREAD_LIMIT
:
1467 case OMP_CLAUSE_DEVICE
:
1468 case OMP_CLAUSE_SCHEDULE
:
1469 case OMP_CLAUSE_DIST_SCHEDULE
:
1470 case OMP_CLAUSE_NOWAIT
:
1471 case OMP_CLAUSE_ORDERED
:
1472 case OMP_CLAUSE_COLLAPSE
:
1473 case OMP_CLAUSE_UNTIED
:
1474 case OMP_CLAUSE_FINAL
:
1475 case OMP_CLAUSE_MERGEABLE
:
1476 case OMP_CLAUSE_PROC_BIND
:
1477 case OMP_CLAUSE_SAFELEN
:
1478 case OMP_CLAUSE_SIMDLEN
:
1479 case OMP_CLAUSE_ALIGNED
:
1480 case OMP_CLAUSE_DEPEND
:
1481 case OMP_CLAUSE__LOOPTEMP_
:
1483 case OMP_CLAUSE_FROM
:
1484 case OMP_CLAUSE_PRIORITY
:
1485 case OMP_CLAUSE_GRAINSIZE
:
1486 case OMP_CLAUSE_NUM_TASKS
:
1487 case OMP_CLAUSE_THREADS
:
1488 case OMP_CLAUSE_SIMD
:
1489 case OMP_CLAUSE_NOGROUP
:
1490 case OMP_CLAUSE_DEFAULTMAP
:
1491 case OMP_CLAUSE_USE_DEVICE_PTR
:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1493 case OMP_CLAUSE_ASYNC
:
1494 case OMP_CLAUSE_WAIT
:
1495 case OMP_CLAUSE_NUM_GANGS
:
1496 case OMP_CLAUSE_NUM_WORKERS
:
1497 case OMP_CLAUSE_VECTOR_LENGTH
:
1498 case OMP_CLAUSE_GANG
:
1499 case OMP_CLAUSE_WORKER
:
1500 case OMP_CLAUSE_VECTOR
:
1501 case OMP_CLAUSE_INDEPENDENT
:
1502 case OMP_CLAUSE_AUTO
:
1503 case OMP_CLAUSE_SEQ
:
1504 case OMP_CLAUSE_TILE
:
1505 case OMP_CLAUSE__GRIDDIM_
:
1506 case OMP_CLAUSE__SIMT_
:
1509 case OMP_CLAUSE__CACHE_
:
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx
->stmt
));
1517 if (scan_array_reductions
)
1519 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1520 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1526 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1529 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1540 create_omp_child_function_name (bool task_copy
, bool is_cilk_for
)
1543 return clone_function_name (current_function_decl
, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl
,
1545 task_copy
? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1552 cilk_for_check_loop_diff_type (tree type
)
1554 if (TYPE_PRECISION (type
) <= TYPE_PRECISION (uint32_type_node
))
1556 if (TYPE_UNSIGNED (type
))
1557 return uint32_type_node
;
1559 return integer_type_node
;
1563 if (TYPE_UNSIGNED (type
))
1564 return uint64_type_node
;
1566 return long_long_integer_type_node
;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1574 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1576 if (cgraph_node::get (current_function_decl
)->offloadable
)
1578 for (; ctx
; ctx
= ctx
->outer
)
1579 if (is_gimple_omp_offloaded (ctx
->stmt
))
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1588 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1590 tree decl
, type
, name
, t
;
1593 = (flag_cilkplus
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
1595 OMP_CLAUSE__CILK_FOR_COUNT_
) : NULL_TREE
;
1596 tree cilk_var_type
= NULL_TREE
;
1598 name
= create_omp_child_function_name (task_copy
,
1599 cilk_for_count
!= NULL_TREE
);
1601 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1602 ptr_type_node
, NULL_TREE
);
1603 else if (cilk_for_count
)
1605 type
= TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count
, 0));
1606 cilk_var_type
= cilk_for_check_loop_diff_type (type
);
1607 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1608 cilk_var_type
, cilk_var_type
, NULL_TREE
);
1611 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1613 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1618 ctx
->cb
.dst_fn
= decl
;
1620 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1622 TREE_STATIC (decl
) = 1;
1623 TREE_USED (decl
) = 1;
1624 DECL_ARTIFICIAL (decl
) = 1;
1625 DECL_IGNORED_P (decl
) = 0;
1626 TREE_PUBLIC (decl
) = 0;
1627 DECL_UNINLINABLE (decl
) = 1;
1628 DECL_EXTERNAL (decl
) = 0;
1629 DECL_CONTEXT (decl
) = NULL_TREE
;
1630 DECL_INITIAL (decl
) = make_node (BLOCK
);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1632 if (omp_maybe_offloaded_ctx (ctx
))
1634 cgraph_node::get_create (decl
)->offloadable
= 1;
1635 if (ENABLE_OFFLOADING
)
1636 g
->have_offload
= true;
1639 if (cgraph_node::get_create (decl
)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl
)))
1643 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl
)
1647 = tree_cons (get_identifier (target_attr
),
1648 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1651 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1652 RESULT_DECL
, NULL_TREE
, void_type_node
);
1653 DECL_ARTIFICIAL (t
) = 1;
1654 DECL_IGNORED_P (t
) = 1;
1655 DECL_CONTEXT (t
) = decl
;
1656 DECL_RESULT (decl
) = t
;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1663 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1664 PARM_DECL
, get_identifier ("__high"), cilk_var_type
);
1665 DECL_ARTIFICIAL (t
) = 1;
1666 DECL_NAMELESS (t
) = 1;
1667 DECL_ARG_TYPE (t
) = ptr_type_node
;
1668 DECL_CONTEXT (t
) = current_function_decl
;
1670 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1671 DECL_ARGUMENTS (decl
) = t
;
1673 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1674 PARM_DECL
, get_identifier ("__low"), cilk_var_type
);
1675 DECL_ARTIFICIAL (t
) = 1;
1676 DECL_NAMELESS (t
) = 1;
1677 DECL_ARG_TYPE (t
) = ptr_type_node
;
1678 DECL_CONTEXT (t
) = current_function_decl
;
1680 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1681 DECL_ARGUMENTS (decl
) = t
;
1684 tree data_name
= get_identifier (".omp_data_i");
1685 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1687 DECL_ARTIFICIAL (t
) = 1;
1688 DECL_NAMELESS (t
) = 1;
1689 DECL_ARG_TYPE (t
) = ptr_type_node
;
1690 DECL_CONTEXT (t
) = current_function_decl
;
1692 TREE_READONLY (t
) = 1;
1694 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1695 DECL_ARGUMENTS (decl
) = t
;
1697 ctx
->receiver_decl
= t
;
1700 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1701 PARM_DECL
, get_identifier (".omp_data_o"),
1703 DECL_ARTIFICIAL (t
) = 1;
1704 DECL_NAMELESS (t
) = 1;
1705 DECL_ARG_TYPE (t
) = ptr_type_node
;
1706 DECL_CONTEXT (t
) = current_function_decl
;
1708 TREE_ADDRESSABLE (t
) = 1;
1709 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1710 DECL_ARGUMENTS (decl
) = t
;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1716 push_struct_function (decl
);
1717 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1718 init_tree_ssa (cfun
);
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1726 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1727 bool *handled_ops_p
,
1728 struct walk_stmt_info
*wi
)
1730 gimple
*stmt
= gsi_stmt (*gsi_p
);
1732 *handled_ops_p
= true;
1733 switch (gimple_code (stmt
))
1737 case GIMPLE_OMP_FOR
:
1738 if (gimple_omp_for_combined_into_p (stmt
)
1739 && gimple_omp_for_kind (stmt
)
1740 == *(const enum gf_mask
*) (wi
->info
))
1743 return integer_zero_node
;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1755 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1756 omp_context
*outer_ctx
)
1758 struct walk_stmt_info wi
;
1760 memset (&wi
, 0, sizeof (wi
));
1762 wi
.info
= (void *) &msk
;
1763 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1764 if (wi
.info
!= (void *) &msk
)
1766 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1767 struct omp_for_data fd
;
1768 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count
= 2, i
;
1773 tree type
= fd
.iter_type
;
1775 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1777 count
+= fd
.collapse
- 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1782 OMP_CLAUSE_LASTPRIVATE
))
1784 else if (msk
== GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1786 OMP_CLAUSE_LASTPRIVATE
))
1789 for (i
= 0; i
< count
; i
++)
1791 tree temp
= create_tmp_var (type
);
1792 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1793 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1794 OMP_CLAUSE_DECL (c
) = temp
;
1795 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1796 gimple_omp_taskreg_set_clauses (stmt
, c
);
1801 /* Scan an OpenMP parallel directive. */
1804 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1808 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1813 && empty_body_p (gimple_omp_body (stmt
))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1815 OMP_CLAUSE_COPYIN
) == NULL
)
1817 gsi_replace (gsi
, gimple_build_nop (), false);
1821 if (gimple_omp_parallel_combined_p (stmt
))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1824 ctx
= new_omp_context (stmt
, outer_ctx
);
1825 taskreg_contexts
.safe_push (ctx
);
1826 if (taskreg_nesting_level
> 1)
1827 ctx
->is_nested
= true;
1828 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1829 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
1830 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1831 name
= create_tmp_var_name (".omp_data_s");
1832 name
= build_decl (gimple_location (stmt
),
1833 TYPE_DECL
, name
, ctx
->record_type
);
1834 DECL_ARTIFICIAL (name
) = 1;
1835 DECL_NAMELESS (name
) = 1;
1836 TYPE_NAME (ctx
->record_type
) = name
;
1837 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt
))
1840 create_omp_child_function (ctx
, false);
1841 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1845 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1847 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1848 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1851 /* Scan an OpenMP task directive. */
1854 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1858 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1860 /* Ignore task directives with empty bodies, unless they have depend
1863 && empty_body_p (gimple_omp_body (stmt
))
1864 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1866 gsi_replace (gsi
, gimple_build_nop (), false);
1870 if (gimple_omp_task_taskloop_p (stmt
))
1871 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1873 ctx
= new_omp_context (stmt
, outer_ctx
);
1874 taskreg_contexts
.safe_push (ctx
);
1875 if (taskreg_nesting_level
> 1)
1876 ctx
->is_nested
= true;
1877 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1878 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
1879 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1880 name
= create_tmp_var_name (".omp_data_s");
1881 name
= build_decl (gimple_location (stmt
),
1882 TYPE_DECL
, name
, ctx
->record_type
);
1883 DECL_ARTIFICIAL (name
) = 1;
1884 DECL_NAMELESS (name
) = 1;
1885 TYPE_NAME (ctx
->record_type
) = name
;
1886 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1887 create_omp_child_function (ctx
, false);
1888 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1890 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1892 if (ctx
->srecord_type
)
1894 name
= create_tmp_var_name (".omp_data_a");
1895 name
= build_decl (gimple_location (stmt
),
1896 TYPE_DECL
, name
, ctx
->srecord_type
);
1897 DECL_ARTIFICIAL (name
) = 1;
1898 DECL_NAMELESS (name
) = 1;
1899 TYPE_NAME (ctx
->srecord_type
) = name
;
1900 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1901 create_omp_child_function (ctx
, true);
1904 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1906 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1908 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1909 t
= build_int_cst (long_integer_type_node
, 0);
1910 gimple_omp_task_set_arg_size (stmt
, t
);
1911 t
= build_int_cst (long_integer_type_node
, 1);
1912 gimple_omp_task_set_arg_align (stmt
, t
);
1917 /* If any decls have been made addressable during scan_omp,
1918 adjust their fields if needed, and layout record types
1919 of parallel/task constructs. */
1922 finish_taskreg_scan (omp_context
*ctx
)
1924 if (ctx
->record_type
== NULL_TREE
)
1927 /* If any task_shared_vars were needed, verify all
1928 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1929 statements if use_pointer_for_field hasn't changed
1930 because of that. If it did, update field types now. */
1931 if (task_shared_vars
)
1935 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
1936 c
; c
= OMP_CLAUSE_CHAIN (c
))
1937 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1938 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1940 tree decl
= OMP_CLAUSE_DECL (c
);
1942 /* Global variables don't need to be copied,
1943 the receiver side will use them directly. */
1944 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1946 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
1947 || !use_pointer_for_field (decl
, ctx
))
1949 tree field
= lookup_field (decl
, ctx
);
1950 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
1951 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
1953 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
1954 TREE_THIS_VOLATILE (field
) = 0;
1955 DECL_USER_ALIGN (field
) = 0;
1956 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
1957 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
1958 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
1959 if (ctx
->srecord_type
)
1961 tree sfield
= lookup_sfield (decl
, ctx
);
1962 TREE_TYPE (sfield
) = TREE_TYPE (field
);
1963 TREE_THIS_VOLATILE (sfield
) = 0;
1964 DECL_USER_ALIGN (sfield
) = 0;
1965 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
1966 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
1967 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
1972 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
1974 layout_type (ctx
->record_type
);
1975 fixup_child_record_type (ctx
);
1979 location_t loc
= gimple_location (ctx
->stmt
);
1980 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
1981 /* Move VLA fields to the end. */
1982 p
= &TYPE_FIELDS (ctx
->record_type
);
1984 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
1985 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
1988 *p
= TREE_CHAIN (*p
);
1989 TREE_CHAIN (*q
) = NULL_TREE
;
1990 q
= &TREE_CHAIN (*q
);
1993 p
= &DECL_CHAIN (*p
);
1995 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
1997 /* Move fields corresponding to first and second _looptemp_
1998 clause first. There are filled by GOMP_taskloop
1999 and thus need to be in specific positions. */
2000 tree c1
= gimple_omp_task_clauses (ctx
->stmt
);
2001 c1
= omp_find_clause (c1
, OMP_CLAUSE__LOOPTEMP_
);
2002 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2003 OMP_CLAUSE__LOOPTEMP_
);
2004 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2005 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2006 p
= &TYPE_FIELDS (ctx
->record_type
);
2008 if (*p
== f1
|| *p
== f2
)
2009 *p
= DECL_CHAIN (*p
);
2011 p
= &DECL_CHAIN (*p
);
2012 DECL_CHAIN (f1
) = f2
;
2013 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2014 TYPE_FIELDS (ctx
->record_type
) = f1
;
2015 if (ctx
->srecord_type
)
2017 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2018 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2019 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2021 if (*p
== f1
|| *p
== f2
)
2022 *p
= DECL_CHAIN (*p
);
2024 p
= &DECL_CHAIN (*p
);
2025 DECL_CHAIN (f1
) = f2
;
2026 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2027 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2030 layout_type (ctx
->record_type
);
2031 fixup_child_record_type (ctx
);
2032 if (ctx
->srecord_type
)
2033 layout_type (ctx
->srecord_type
);
2034 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2035 TYPE_SIZE_UNIT (ctx
->record_type
));
2036 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2037 t
= build_int_cst (long_integer_type_node
,
2038 TYPE_ALIGN_UNIT (ctx
->record_type
));
2039 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2043 /* Find the enclosing offload context. */
2045 static omp_context
*
2046 enclosing_target_ctx (omp_context
*ctx
)
2048 for (; ctx
; ctx
= ctx
->outer
)
2049 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2055 /* Return true if ctx is part of an oacc kernels region. */
2058 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2060 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2062 gimple
*stmt
= ctx
->stmt
;
2063 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2064 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2071 /* Check the parallelism clauses inside a kernels regions.
2072 Until kernels handling moves to use the same loop indirection
2073 scheme as parallel, we need to do this checking early. */
2076 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2078 bool checking
= true;
2079 unsigned outer_mask
= 0;
2080 unsigned this_mask
= 0;
2081 bool has_seq
= false, has_auto
= false;
2084 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2088 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2090 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2093 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2095 switch (OMP_CLAUSE_CODE (c
))
2097 case OMP_CLAUSE_GANG
:
2098 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2100 case OMP_CLAUSE_WORKER
:
2101 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2103 case OMP_CLAUSE_VECTOR
:
2104 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2106 case OMP_CLAUSE_SEQ
:
2109 case OMP_CLAUSE_AUTO
:
2119 if (has_seq
&& (this_mask
|| has_auto
))
2120 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2121 " OpenACC loop specifiers");
2122 else if (has_auto
&& this_mask
)
2123 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2124 " OpenACC loop specifiers");
2126 if (this_mask
& outer_mask
)
2127 error_at (gimple_location (stmt
), "inner loop uses same"
2128 " OpenACC parallelism as containing loop");
2131 return outer_mask
| this_mask
;
2134 /* Scan a GIMPLE_OMP_FOR. */
2136 static omp_context
*
2137 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2141 tree clauses
= gimple_omp_for_clauses (stmt
);
2143 ctx
= new_omp_context (stmt
, outer_ctx
);
2145 if (is_gimple_omp_oacc (stmt
))
2147 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2149 if (!tgt
|| is_oacc_parallel (tgt
))
2150 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2152 char const *check
= NULL
;
2154 switch (OMP_CLAUSE_CODE (c
))
2156 case OMP_CLAUSE_GANG
:
2160 case OMP_CLAUSE_WORKER
:
2164 case OMP_CLAUSE_VECTOR
:
2172 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2173 error_at (gimple_location (stmt
),
2174 "argument not permitted on %qs clause in"
2175 " OpenACC %<parallel%>", check
);
2178 if (tgt
&& is_oacc_kernels (tgt
))
2180 /* Strip out reductions, as they are not handled yet. */
2181 tree
*prev_ptr
= &clauses
;
2183 while (tree probe
= *prev_ptr
)
2185 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2187 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2188 *prev_ptr
= *next_ptr
;
2190 prev_ptr
= next_ptr
;
2193 gimple_omp_for_set_clauses (stmt
, clauses
);
2194 check_oacc_kernel_gwv (stmt
, ctx
);
2198 scan_sharing_clauses (clauses
, ctx
);
2200 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2201 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2203 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2204 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2205 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2206 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2208 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2212 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2215 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2216 omp_context
*outer_ctx
)
2218 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2219 gsi_replace (gsi
, bind
, false);
2220 gimple_seq seq
= NULL
;
2221 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2222 tree cond
= create_tmp_var_raw (integer_type_node
);
2223 DECL_CONTEXT (cond
) = current_function_decl
;
2224 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2225 gimple_bind_set_vars (bind
, cond
);
2226 gimple_call_set_lhs (g
, cond
);
2227 gimple_seq_add_stmt (&seq
, g
);
2228 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2229 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2230 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2231 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2232 gimple_seq_add_stmt (&seq
, g
);
2233 g
= gimple_build_label (lab1
);
2234 gimple_seq_add_stmt (&seq
, g
);
2235 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2236 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2237 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2238 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2239 gimple_omp_for_set_clauses (new_stmt
, clause
);
2240 gimple_seq_add_stmt (&seq
, new_stmt
);
2241 g
= gimple_build_goto (lab3
);
2242 gimple_seq_add_stmt (&seq
, g
);
2243 g
= gimple_build_label (lab2
);
2244 gimple_seq_add_stmt (&seq
, g
);
2245 gimple_seq_add_stmt (&seq
, stmt
);
2246 g
= gimple_build_label (lab3
);
2247 gimple_seq_add_stmt (&seq
, g
);
2248 gimple_bind_set_body (bind
, seq
);
2250 scan_omp_for (new_stmt
, outer_ctx
);
2251 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2254 /* Scan an OpenMP sections directive. */
2257 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2261 ctx
= new_omp_context (stmt
, outer_ctx
);
2262 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2263 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2266 /* Scan an OpenMP single directive. */
2269 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2274 ctx
= new_omp_context (stmt
, outer_ctx
);
2275 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2276 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2277 name
= create_tmp_var_name (".omp_copy_s");
2278 name
= build_decl (gimple_location (stmt
),
2279 TYPE_DECL
, name
, ctx
->record_type
);
2280 TYPE_NAME (ctx
->record_type
) = name
;
2282 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2283 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2285 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2286 ctx
->record_type
= NULL
;
2288 layout_type (ctx
->record_type
);
2291 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2292 used in the corresponding offloaded function are restrict. */
2295 omp_target_base_pointers_restrict_p (tree clauses
)
2297 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2299 if (flag_openacc
== 0)
2302 /* I. Basic example:
2306 unsigned int a[2], b[2];
2308 #pragma acc kernels \
2317 After gimplification, we have:
2319 #pragma omp target oacc_kernels \
2320 map(force_from:a [len: 8]) \
2321 map(force_from:b [len: 8])
2327 Because both mappings have the force prefix, we know that they will be
2328 allocated when calling the corresponding offloaded function, which means we
2329 can mark the base pointers for a and b in the offloaded function as
2333 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2335 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
2338 switch (OMP_CLAUSE_MAP_KIND (c
))
2340 case GOMP_MAP_FORCE_ALLOC
:
2341 case GOMP_MAP_FORCE_TO
:
2342 case GOMP_MAP_FORCE_FROM
:
2343 case GOMP_MAP_FORCE_TOFROM
:
2353 /* Scan a GIMPLE_OMP_TARGET. */
2356 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2360 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2361 tree clauses
= gimple_omp_target_clauses (stmt
);
2363 ctx
= new_omp_context (stmt
, outer_ctx
);
2364 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2365 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
2366 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2367 name
= create_tmp_var_name (".omp_data_t");
2368 name
= build_decl (gimple_location (stmt
),
2369 TYPE_DECL
, name
, ctx
->record_type
);
2370 DECL_ARTIFICIAL (name
) = 1;
2371 DECL_NAMELESS (name
) = 1;
2372 TYPE_NAME (ctx
->record_type
) = name
;
2373 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2375 bool base_pointers_restrict
= false;
2378 create_omp_child_function (ctx
, false);
2379 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2381 base_pointers_restrict
= omp_target_base_pointers_restrict_p (clauses
);
2382 if (base_pointers_restrict
2383 && dump_file
&& (dump_flags
& TDF_DETAILS
))
2385 "Base pointers in offloaded function are restrict\n");
2388 scan_sharing_clauses (clauses
, ctx
, base_pointers_restrict
);
2389 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2391 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2392 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2395 TYPE_FIELDS (ctx
->record_type
)
2396 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2399 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2400 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2402 field
= DECL_CHAIN (field
))
2403 gcc_assert (DECL_ALIGN (field
) == align
);
2405 layout_type (ctx
->record_type
);
2407 fixup_child_record_type (ctx
);
2411 /* Scan an OpenMP teams directive. */
2414 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2416 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2417 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2418 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2421 /* Check nesting restrictions. */
2423 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2427 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2428 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2429 the original copy of its contents. */
2432 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2433 inside an OpenACC CTX. */
2434 if (!(is_gimple_omp (stmt
)
2435 && is_gimple_omp_oacc (stmt
))
2436 /* Except for atomic codes that we share with OpenMP. */
2437 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2438 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2440 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2442 error_at (gimple_location (stmt
),
2443 "non-OpenACC construct inside of OpenACC routine");
2447 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2448 if (is_gimple_omp (octx
->stmt
)
2449 && is_gimple_omp_oacc (octx
->stmt
))
2451 error_at (gimple_location (stmt
),
2452 "non-OpenACC construct inside of OpenACC region");
2459 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2460 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2463 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2465 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2466 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2468 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2469 && (ctx
->outer
== NULL
2470 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2471 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2472 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2473 != GF_OMP_FOR_KIND_FOR
)
2474 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2476 error_at (gimple_location (stmt
),
2477 "%<ordered simd threads%> must be closely "
2478 "nested inside of %<for simd%> region");
2484 error_at (gimple_location (stmt
),
2485 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2486 " may not be nested inside %<simd%> region");
2489 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2491 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2492 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2493 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2494 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2496 error_at (gimple_location (stmt
),
2497 "only %<distribute%> or %<parallel%> regions are "
2498 "allowed to be strictly nested inside %<teams%> "
2504 switch (gimple_code (stmt
))
2506 case GIMPLE_OMP_FOR
:
2507 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2509 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2511 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2513 error_at (gimple_location (stmt
),
2514 "%<distribute%> region must be strictly nested "
2515 "inside %<teams%> construct");
2520 /* We split taskloop into task and nested taskloop in it. */
2521 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2523 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2528 switch (gimple_code (ctx
->stmt
))
2530 case GIMPLE_OMP_FOR
:
2531 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2532 == GF_OMP_FOR_KIND_OACC_LOOP
);
2535 case GIMPLE_OMP_TARGET
:
2536 switch (gimple_omp_target_kind (ctx
->stmt
))
2538 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2539 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2550 else if (oacc_get_fn_attrib (current_function_decl
))
2554 error_at (gimple_location (stmt
),
2555 "OpenACC loop directive must be associated with"
2556 " an OpenACC compute region");
2562 if (is_gimple_call (stmt
)
2563 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2564 == BUILT_IN_GOMP_CANCEL
2565 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2566 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2568 const char *bad
= NULL
;
2569 const char *kind
= NULL
;
2570 const char *construct
2571 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2572 == BUILT_IN_GOMP_CANCEL
)
2573 ? "#pragma omp cancel"
2574 : "#pragma omp cancellation point";
2577 error_at (gimple_location (stmt
), "orphaned %qs construct",
2581 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2582 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2586 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2587 bad
= "#pragma omp parallel";
2588 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2589 == BUILT_IN_GOMP_CANCEL
2590 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2591 ctx
->cancellable
= true;
2595 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2596 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2597 bad
= "#pragma omp for";
2598 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2599 == BUILT_IN_GOMP_CANCEL
2600 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2602 ctx
->cancellable
= true;
2603 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2605 warning_at (gimple_location (stmt
), 0,
2606 "%<#pragma omp cancel for%> inside "
2607 "%<nowait%> for construct");
2608 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2609 OMP_CLAUSE_ORDERED
))
2610 warning_at (gimple_location (stmt
), 0,
2611 "%<#pragma omp cancel for%> inside "
2612 "%<ordered%> for construct");
2617 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2618 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2619 bad
= "#pragma omp sections";
2620 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2621 == BUILT_IN_GOMP_CANCEL
2622 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2624 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2626 ctx
->cancellable
= true;
2627 if (omp_find_clause (gimple_omp_sections_clauses
2630 warning_at (gimple_location (stmt
), 0,
2631 "%<#pragma omp cancel sections%> inside "
2632 "%<nowait%> sections construct");
2636 gcc_assert (ctx
->outer
2637 && gimple_code (ctx
->outer
->stmt
)
2638 == GIMPLE_OMP_SECTIONS
);
2639 ctx
->outer
->cancellable
= true;
2640 if (omp_find_clause (gimple_omp_sections_clauses
2643 warning_at (gimple_location (stmt
), 0,
2644 "%<#pragma omp cancel sections%> inside "
2645 "%<nowait%> sections construct");
2651 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TASK
)
2652 bad
= "#pragma omp task";
2655 for (omp_context
*octx
= ctx
->outer
;
2656 octx
; octx
= octx
->outer
)
2658 switch (gimple_code (octx
->stmt
))
2660 case GIMPLE_OMP_TASKGROUP
:
2662 case GIMPLE_OMP_TARGET
:
2663 if (gimple_omp_target_kind (octx
->stmt
)
2664 != GF_OMP_TARGET_KIND_REGION
)
2667 case GIMPLE_OMP_PARALLEL
:
2668 case GIMPLE_OMP_TEAMS
:
2669 error_at (gimple_location (stmt
),
2670 "%<%s taskgroup%> construct not closely "
2671 "nested inside of %<taskgroup%> region",
2679 ctx
->cancellable
= true;
2684 error_at (gimple_location (stmt
), "invalid arguments");
2689 error_at (gimple_location (stmt
),
2690 "%<%s %s%> construct not closely nested inside of %qs",
2691 construct
, kind
, bad
);
2696 case GIMPLE_OMP_SECTIONS
:
2697 case GIMPLE_OMP_SINGLE
:
2698 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2699 switch (gimple_code (ctx
->stmt
))
2701 case GIMPLE_OMP_FOR
:
2702 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2703 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2706 case GIMPLE_OMP_SECTIONS
:
2707 case GIMPLE_OMP_SINGLE
:
2708 case GIMPLE_OMP_ORDERED
:
2709 case GIMPLE_OMP_MASTER
:
2710 case GIMPLE_OMP_TASK
:
2711 case GIMPLE_OMP_CRITICAL
:
2712 if (is_gimple_call (stmt
))
2714 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2715 != BUILT_IN_GOMP_BARRIER
)
2717 error_at (gimple_location (stmt
),
2718 "barrier region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> "
2724 error_at (gimple_location (stmt
),
2725 "work-sharing region may not be closely nested inside "
2726 "of work-sharing, %<critical%>, %<ordered%>, "
2727 "%<master%>, explicit %<task%> or %<taskloop%> region");
2729 case GIMPLE_OMP_PARALLEL
:
2730 case GIMPLE_OMP_TEAMS
:
2732 case GIMPLE_OMP_TARGET
:
2733 if (gimple_omp_target_kind (ctx
->stmt
)
2734 == GF_OMP_TARGET_KIND_REGION
)
2741 case GIMPLE_OMP_MASTER
:
2742 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2743 switch (gimple_code (ctx
->stmt
))
2745 case GIMPLE_OMP_FOR
:
2746 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2747 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2750 case GIMPLE_OMP_SECTIONS
:
2751 case GIMPLE_OMP_SINGLE
:
2752 case GIMPLE_OMP_TASK
:
2753 error_at (gimple_location (stmt
),
2754 "%<master%> region may not be closely nested inside "
2755 "of work-sharing, explicit %<task%> or %<taskloop%> "
2758 case GIMPLE_OMP_PARALLEL
:
2759 case GIMPLE_OMP_TEAMS
:
2761 case GIMPLE_OMP_TARGET
:
2762 if (gimple_omp_target_kind (ctx
->stmt
)
2763 == GF_OMP_TARGET_KIND_REGION
)
2770 case GIMPLE_OMP_TASK
:
2771 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2772 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2773 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2774 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2776 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2777 error_at (OMP_CLAUSE_LOCATION (c
),
2778 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2779 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2783 case GIMPLE_OMP_ORDERED
:
2784 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2785 c
; c
= OMP_CLAUSE_CHAIN (c
))
2787 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2789 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2790 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2793 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2794 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2795 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2798 /* Look for containing ordered(N) loop. */
2800 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2802 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2803 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2805 error_at (OMP_CLAUSE_LOCATION (c
),
2806 "%<ordered%> construct with %<depend%> clause "
2807 "must be closely nested inside an %<ordered%> "
2811 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2813 error_at (OMP_CLAUSE_LOCATION (c
),
2814 "%<ordered%> construct with %<depend%> clause "
2815 "must be closely nested inside a loop with "
2816 "%<ordered%> clause with a parameter");
2822 error_at (OMP_CLAUSE_LOCATION (c
),
2823 "invalid depend kind in omp %<ordered%> %<depend%>");
2827 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2828 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2830 /* ordered simd must be closely nested inside of simd region,
2831 and simd region must not encounter constructs other than
2832 ordered simd, therefore ordered simd may be either orphaned,
2833 or ctx->stmt must be simd. The latter case is handled already
2837 error_at (gimple_location (stmt
),
2838 "%<ordered%> %<simd%> must be closely nested inside "
2843 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2844 switch (gimple_code (ctx
->stmt
))
2846 case GIMPLE_OMP_CRITICAL
:
2847 case GIMPLE_OMP_TASK
:
2848 case GIMPLE_OMP_ORDERED
:
2849 ordered_in_taskloop
:
2850 error_at (gimple_location (stmt
),
2851 "%<ordered%> region may not be closely nested inside "
2852 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2853 "%<taskloop%> region");
2855 case GIMPLE_OMP_FOR
:
2856 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2857 goto ordered_in_taskloop
;
2858 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2859 OMP_CLAUSE_ORDERED
) == NULL
)
2861 error_at (gimple_location (stmt
),
2862 "%<ordered%> region must be closely nested inside "
2863 "a loop region with an %<ordered%> clause");
2867 case GIMPLE_OMP_TARGET
:
2868 if (gimple_omp_target_kind (ctx
->stmt
)
2869 != GF_OMP_TARGET_KIND_REGION
)
2872 case GIMPLE_OMP_PARALLEL
:
2873 case GIMPLE_OMP_TEAMS
:
2874 error_at (gimple_location (stmt
),
2875 "%<ordered%> region must be closely nested inside "
2876 "a loop region with an %<ordered%> clause");
2882 case GIMPLE_OMP_CRITICAL
:
2885 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
2886 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2887 if (gomp_critical
*other_crit
2888 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
2889 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
2891 error_at (gimple_location (stmt
),
2892 "%<critical%> region may not be nested inside "
2893 "a %<critical%> region with the same name");
2898 case GIMPLE_OMP_TEAMS
:
2900 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
2901 || gimple_omp_target_kind (ctx
->stmt
) != GF_OMP_TARGET_KIND_REGION
)
2903 error_at (gimple_location (stmt
),
2904 "%<teams%> construct not closely nested inside of "
2905 "%<target%> construct");
2909 case GIMPLE_OMP_TARGET
:
2910 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2911 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2912 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2913 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2915 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2916 error_at (OMP_CLAUSE_LOCATION (c
),
2917 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2918 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2921 if (is_gimple_omp_offloaded (stmt
)
2922 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2924 error_at (gimple_location (stmt
),
2925 "OpenACC region inside of OpenACC routine, nested "
2926 "parallelism not supported yet");
2929 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2931 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
2933 if (is_gimple_omp (stmt
)
2934 && is_gimple_omp_oacc (stmt
)
2935 && is_gimple_omp (ctx
->stmt
))
2937 error_at (gimple_location (stmt
),
2938 "OpenACC construct inside of non-OpenACC region");
2944 const char *stmt_name
, *ctx_stmt_name
;
2945 switch (gimple_omp_target_kind (stmt
))
2947 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
2948 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
2949 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
2950 case GF_OMP_TARGET_KIND_ENTER_DATA
:
2951 stmt_name
= "target enter data"; break;
2952 case GF_OMP_TARGET_KIND_EXIT_DATA
:
2953 stmt_name
= "target exit data"; break;
2954 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
2955 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
2956 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
2957 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
2958 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
2959 stmt_name
= "enter/exit data"; break;
2960 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
2962 default: gcc_unreachable ();
2964 switch (gimple_omp_target_kind (ctx
->stmt
))
2966 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
2967 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
2968 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2969 ctx_stmt_name
= "parallel"; break;
2970 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2971 ctx_stmt_name
= "kernels"; break;
2972 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
2973 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
2974 ctx_stmt_name
= "host_data"; break;
2975 default: gcc_unreachable ();
2978 /* OpenACC/OpenMP mismatch? */
2979 if (is_gimple_omp_oacc (stmt
)
2980 != is_gimple_omp_oacc (ctx
->stmt
))
2982 error_at (gimple_location (stmt
),
2983 "%s %qs construct inside of %s %qs region",
2984 (is_gimple_omp_oacc (stmt
)
2985 ? "OpenACC" : "OpenMP"), stmt_name
,
2986 (is_gimple_omp_oacc (ctx
->stmt
)
2987 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
2990 if (is_gimple_omp_offloaded (ctx
->stmt
))
2992 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2993 if (is_gimple_omp_oacc (ctx
->stmt
))
2995 error_at (gimple_location (stmt
),
2996 "%qs construct inside of %qs region",
2997 stmt_name
, ctx_stmt_name
);
3002 warning_at (gimple_location (stmt
), 0,
3003 "%qs construct inside of %qs region",
3004 stmt_name
, ctx_stmt_name
);
3016 /* Helper function scan_omp.
3018 Callback for walk_tree or operators in walk_gimple_stmt used to
3019 scan for OMP directives in TP. */
3022 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3024 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3025 omp_context
*ctx
= (omp_context
*) wi
->info
;
3028 switch (TREE_CODE (t
))
3036 tree repl
= remap_decl (t
, &ctx
->cb
);
3037 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3043 if (ctx
&& TYPE_P (t
))
3044 *tp
= remap_type (t
, &ctx
->cb
);
3045 else if (!DECL_P (t
))
3050 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3051 if (tem
!= TREE_TYPE (t
))
3053 if (TREE_CODE (t
) == INTEGER_CST
)
3054 *tp
= wide_int_to_tree (tem
, t
);
3056 TREE_TYPE (t
) = tem
;
3066 /* Return true if FNDECL is a setjmp or a longjmp. */
3069 setjmp_or_longjmp_p (const_tree fndecl
)
3071 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3072 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SETJMP
3073 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LONGJMP
))
3076 tree declname
= DECL_NAME (fndecl
);
3079 const char *name
= IDENTIFIER_POINTER (declname
);
3080 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3084 /* Helper function for scan_omp.
3086 Callback for walk_gimple_stmt used to scan for OMP directives in
3087 the current statement in GSI. */
3090 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3091 struct walk_stmt_info
*wi
)
3093 gimple
*stmt
= gsi_stmt (*gsi
);
3094 omp_context
*ctx
= (omp_context
*) wi
->info
;
3096 if (gimple_has_location (stmt
))
3097 input_location
= gimple_location (stmt
);
3099 /* Check the nesting restrictions. */
3100 bool remove
= false;
3101 if (is_gimple_omp (stmt
))
3102 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3103 else if (is_gimple_call (stmt
))
3105 tree fndecl
= gimple_call_fndecl (stmt
);
3108 if (setjmp_or_longjmp_p (fndecl
)
3110 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3111 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3114 error_at (gimple_location (stmt
),
3115 "setjmp/longjmp inside simd construct");
3117 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3118 switch (DECL_FUNCTION_CODE (fndecl
))
3120 case BUILT_IN_GOMP_BARRIER
:
3121 case BUILT_IN_GOMP_CANCEL
:
3122 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3123 case BUILT_IN_GOMP_TASKYIELD
:
3124 case BUILT_IN_GOMP_TASKWAIT
:
3125 case BUILT_IN_GOMP_TASKGROUP_START
:
3126 case BUILT_IN_GOMP_TASKGROUP_END
:
3127 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3136 stmt
= gimple_build_nop ();
3137 gsi_replace (gsi
, stmt
, false);
3140 *handled_ops_p
= true;
3142 switch (gimple_code (stmt
))
3144 case GIMPLE_OMP_PARALLEL
:
3145 taskreg_nesting_level
++;
3146 scan_omp_parallel (gsi
, ctx
);
3147 taskreg_nesting_level
--;
3150 case GIMPLE_OMP_TASK
:
3151 taskreg_nesting_level
++;
3152 scan_omp_task (gsi
, ctx
);
3153 taskreg_nesting_level
--;
3156 case GIMPLE_OMP_FOR
:
3157 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3158 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3159 && omp_maybe_offloaded_ctx (ctx
)
3160 && omp_max_simt_vf ())
3161 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3163 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3166 case GIMPLE_OMP_SECTIONS
:
3167 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3170 case GIMPLE_OMP_SINGLE
:
3171 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3174 case GIMPLE_OMP_SECTION
:
3175 case GIMPLE_OMP_MASTER
:
3176 case GIMPLE_OMP_TASKGROUP
:
3177 case GIMPLE_OMP_ORDERED
:
3178 case GIMPLE_OMP_CRITICAL
:
3179 case GIMPLE_OMP_GRID_BODY
:
3180 ctx
= new_omp_context (stmt
, ctx
);
3181 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3184 case GIMPLE_OMP_TARGET
:
3185 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3188 case GIMPLE_OMP_TEAMS
:
3189 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3196 *handled_ops_p
= false;
3198 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3200 var
= DECL_CHAIN (var
))
3201 insert_decl_map (&ctx
->cb
, var
, var
);
3205 *handled_ops_p
= false;
3213 /* Scan all the statements starting at the current statement. CTX
3214 contains context information about the OMP directives and
3215 clauses found during the scan. */
3218 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3220 location_t saved_location
;
3221 struct walk_stmt_info wi
;
3223 memset (&wi
, 0, sizeof (wi
));
3225 wi
.want_locations
= true;
3227 saved_location
= input_location
;
3228 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3229 input_location
= saved_location
;
3232 /* Re-gimplification and code generation routines. */
3234 /* If a context was created for STMT when it was scanned, return it. */
3236 static omp_context
*
3237 maybe_lookup_ctx (gimple
*stmt
)
3240 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3241 return n
? (omp_context
*) n
->value
: NULL
;
3245 /* Find the mapping for DECL in CTX or the immediately enclosing
3246 context that has a mapping for DECL.
3248 If CTX is a nested parallel directive, we may have to use the decl
3249 mappings created in CTX's parent context. Suppose that we have the
3250 following parallel nesting (variable UIDs showed for clarity):
3253 #omp parallel shared(iD.1562) -> outer parallel
3254 iD.1562 = iD.1562 + 1;
3256 #omp parallel shared (iD.1562) -> inner parallel
3257 iD.1562 = iD.1562 - 1;
3259 Each parallel structure will create a distinct .omp_data_s structure
3260 for copying iD.1562 in/out of the directive:
3262 outer parallel .omp_data_s.1.i -> iD.1562
3263 inner parallel .omp_data_s.2.i -> iD.1562
3265 A shared variable mapping will produce a copy-out operation before
3266 the parallel directive and a copy-in operation after it. So, in
3267 this case we would have:
3270 .omp_data_o.1.i = iD.1562;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 .omp_data_i.1 = &.omp_data_o.1
3273 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3275 .omp_data_o.2.i = iD.1562; -> **
3276 #omp parallel shared(iD.1562) -> inner parallel
3277 .omp_data_i.2 = &.omp_data_o.2
3278 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3281 ** This is a problem. The symbol iD.1562 cannot be referenced
3282 inside the body of the outer parallel region. But since we are
3283 emitting this copy operation while expanding the inner parallel
3284 directive, we need to access the CTX structure of the outer
3285 parallel directive to get the correct mapping:
3287 .omp_data_o.2.i = .omp_data_i.1->i
3289 Since there may be other workshare or parallel directives enclosing
3290 the parallel directive, it may be necessary to walk up the context
3291 parent chain. This is not a problem in general because nested
3292 parallelism happens only rarely. */
3295 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3300 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3301 t
= maybe_lookup_decl (decl
, up
);
3303 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3305 return t
? t
: decl
;
3309 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3310 in outer contexts. */
3313 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3318 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3319 t
= maybe_lookup_decl (decl
, up
);
3321 return t
? t
: decl
;
3325 /* Construct the initialization value for reduction operation OP. */
3328 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3337 case TRUTH_ORIF_EXPR
:
3338 case TRUTH_XOR_EXPR
:
3340 return build_zero_cst (type
);
3343 case TRUTH_AND_EXPR
:
3344 case TRUTH_ANDIF_EXPR
:
3346 return fold_convert_loc (loc
, type
, integer_one_node
);
3349 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3352 if (SCALAR_FLOAT_TYPE_P (type
))
3354 REAL_VALUE_TYPE max
, min
;
3355 if (HONOR_INFINITIES (type
))
3358 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3361 real_maxval (&min
, 1, TYPE_MODE (type
));
3362 return build_real (type
, min
);
3364 else if (POINTER_TYPE_P (type
))
3367 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3368 return wide_int_to_tree (type
, min
);
3372 gcc_assert (INTEGRAL_TYPE_P (type
));
3373 return TYPE_MIN_VALUE (type
);
3377 if (SCALAR_FLOAT_TYPE_P (type
))
3379 REAL_VALUE_TYPE max
;
3380 if (HONOR_INFINITIES (type
))
3383 real_maxval (&max
, 0, TYPE_MODE (type
));
3384 return build_real (type
, max
);
3386 else if (POINTER_TYPE_P (type
))
3389 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3390 return wide_int_to_tree (type
, max
);
3394 gcc_assert (INTEGRAL_TYPE_P (type
));
3395 return TYPE_MAX_VALUE (type
);
3403 /* Construct the initialization value for reduction CLAUSE. */
3406 omp_reduction_init (tree clause
, tree type
)
3408 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3409 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3412 /* Return alignment to be assumed for var in CLAUSE, which should be
3413 OMP_CLAUSE_ALIGNED. */
3416 omp_clause_aligned_alignment (tree clause
)
3418 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3419 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3421 /* Otherwise return implementation defined alignment. */
3422 unsigned int al
= 1;
3423 machine_mode mode
, vmode
;
3424 int vs
= targetm
.vectorize
.autovectorize_vector_sizes ();
3426 vs
= 1 << floor_log2 (vs
);
3427 static enum mode_class classes
[]
3428 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3429 for (int i
= 0; i
< 4; i
+= 2)
3430 for (mode
= GET_CLASS_NARROWEST_MODE (classes
[i
]);
3432 mode
= GET_MODE_WIDER_MODE (mode
))
3434 vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3435 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3438 && GET_MODE_SIZE (vmode
) < vs
3439 && GET_MODE_2XWIDER_MODE (vmode
) != VOIDmode
)
3440 vmode
= GET_MODE_2XWIDER_MODE (vmode
);
3442 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3443 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3445 type
= build_vector_type (type
, GET_MODE_SIZE (vmode
)
3446 / GET_MODE_SIZE (mode
));
3447 if (TYPE_MODE (type
) != vmode
)
3449 if (TYPE_ALIGN_UNIT (type
) > al
)
3450 al
= TYPE_ALIGN_UNIT (type
);
3452 return build_int_cst (integer_type_node
, al
);
3456 /* This structure is part of the interface between lower_rec_simd_input_clauses
3457 and lower_rec_input_clauses. */
3459 struct omplow_simd_context
{
3462 vec
<tree
, va_heap
> simt_eargs
;
3463 gimple_seq simt_dlist
;
3468 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3472 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3473 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3475 if (sctx
->max_vf
== 0)
3477 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3478 if (sctx
->max_vf
> 1)
3480 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3481 OMP_CLAUSE_SAFELEN
);
3483 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c
)) != INTEGER_CST
3484 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c
)) != 1))
3486 else if (c
&& compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c
),
3487 sctx
->max_vf
) == -1)
3488 sctx
->max_vf
= tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c
));
3490 if (sctx
->max_vf
> 1)
3492 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3493 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3496 if (sctx
->max_vf
== 1)
3501 if (is_gimple_reg (new_var
))
3503 ivar
= lvar
= new_var
;
3506 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3507 ivar
= lvar
= create_tmp_var (type
);
3508 TREE_ADDRESSABLE (ivar
) = 1;
3509 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3510 NULL
, DECL_ATTRIBUTES (ivar
));
3511 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3512 tree clobber
= build_constructor (type
, NULL
);
3513 TREE_THIS_VOLATILE (clobber
) = 1;
3514 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3515 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3519 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3520 tree avar
= create_tmp_var_raw (atype
);
3521 if (TREE_ADDRESSABLE (new_var
))
3522 TREE_ADDRESSABLE (avar
) = 1;
3523 DECL_ATTRIBUTES (avar
)
3524 = tree_cons (get_identifier ("omp simd array"), NULL
,
3525 DECL_ATTRIBUTES (avar
));
3526 gimple_add_tmp_var (avar
);
3527 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3528 NULL_TREE
, NULL_TREE
);
3529 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3530 NULL_TREE
, NULL_TREE
);
3532 if (DECL_P (new_var
))
3534 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3535 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3540 /* Helper function of lower_rec_input_clauses. For a reference
3541 in simd reduction, add an underlying variable it will reference. */
3544 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3546 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3547 if (TREE_CONSTANT (z
))
3549 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3550 get_name (new_vard
));
3551 gimple_add_tmp_var (z
);
3552 TREE_ADDRESSABLE (z
) = 1;
3553 z
= build_fold_addr_expr_loc (loc
, z
);
3554 gimplify_assign (new_vard
, z
, ilist
);
3558 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3559 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3560 private variables. Initialization statements go in ILIST, while calls
3561 to destructors go in DLIST. */
3564 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3565 omp_context
*ctx
, struct omp_for_data
*fd
)
3567 tree c
, dtor
, copyin_seq
, x
, ptr
;
3568 bool copyin_by_ref
= false;
3569 bool lastprivate_firstprivate
= false;
3570 bool reduction_omp_orig_ref
= false;
3572 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3573 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3574 omplow_simd_context sctx
= omplow_simd_context ();
3575 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3576 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3577 gimple_seq llist
[3] = { };
3580 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3582 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3583 with data sharing clauses referencing variable sized vars. That
3584 is unnecessarily hard to support and very unlikely to result in
3585 vectorized code anyway. */
3587 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3588 switch (OMP_CLAUSE_CODE (c
))
3590 case OMP_CLAUSE_LINEAR
:
3591 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3594 case OMP_CLAUSE_PRIVATE
:
3595 case OMP_CLAUSE_FIRSTPRIVATE
:
3596 case OMP_CLAUSE_LASTPRIVATE
:
3597 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3600 case OMP_CLAUSE_REDUCTION
:
3601 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3602 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3609 /* Add a placeholder for simduid. */
3610 if (sctx
.is_simt
&& sctx
.max_vf
!= 1)
3611 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3613 /* Do all the fixed sized types in the first pass, and the variable sized
3614 types in the second pass. This makes sure that the scalar arguments to
3615 the variable sized types are processed before we use them in the
3616 variable sized operations. */
3617 for (pass
= 0; pass
< 2; ++pass
)
3619 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3621 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3624 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3628 case OMP_CLAUSE_PRIVATE
:
3629 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3632 case OMP_CLAUSE_SHARED
:
3633 /* Ignore shared directives in teams construct. */
3634 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3636 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3638 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3639 || is_global_var (OMP_CLAUSE_DECL (c
)));
3642 case OMP_CLAUSE_FIRSTPRIVATE
:
3643 case OMP_CLAUSE_COPYIN
:
3645 case OMP_CLAUSE_LINEAR
:
3646 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3647 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3648 lastprivate_firstprivate
= true;
3650 case OMP_CLAUSE_REDUCTION
:
3651 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3652 reduction_omp_orig_ref
= true;
3654 case OMP_CLAUSE__LOOPTEMP_
:
3655 /* Handle _looptemp_ clauses only on parallel/task. */
3659 case OMP_CLAUSE_LASTPRIVATE
:
3660 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3662 lastprivate_firstprivate
= true;
3663 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3666 /* Even without corresponding firstprivate, if
3667 decl is Fortran allocatable, it needs outer var
3670 && lang_hooks
.decls
.omp_private_outer_ref
3671 (OMP_CLAUSE_DECL (c
)))
3672 lastprivate_firstprivate
= true;
3674 case OMP_CLAUSE_ALIGNED
:
3677 var
= OMP_CLAUSE_DECL (c
);
3678 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3679 && !is_global_var (var
))
3681 new_var
= maybe_lookup_decl (var
, ctx
);
3682 if (new_var
== NULL_TREE
)
3683 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3684 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3685 tree alarg
= omp_clause_aligned_alignment (c
);
3686 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3687 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
3688 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3689 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
3690 gimplify_and_add (x
, ilist
);
3692 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
3693 && is_global_var (var
))
3695 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
3696 new_var
= lookup_decl (var
, ctx
);
3697 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3698 t
= build_fold_addr_expr_loc (clause_loc
, t
);
3699 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3700 tree alarg
= omp_clause_aligned_alignment (c
);
3701 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3702 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
3703 t
= fold_convert_loc (clause_loc
, ptype
, t
);
3704 x
= create_tmp_var (ptype
);
3705 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
3706 gimplify_and_add (t
, ilist
);
3707 t
= build_simple_mem_ref_loc (clause_loc
, x
);
3708 SET_DECL_VALUE_EXPR (new_var
, t
);
3709 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3716 new_var
= var
= OMP_CLAUSE_DECL (c
);
3717 if (c_kind
== OMP_CLAUSE_REDUCTION
&& TREE_CODE (var
) == MEM_REF
)
3719 var
= TREE_OPERAND (var
, 0);
3720 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
3721 var
= TREE_OPERAND (var
, 0);
3722 if (TREE_CODE (var
) == INDIRECT_REF
3723 || TREE_CODE (var
) == ADDR_EXPR
)
3724 var
= TREE_OPERAND (var
, 0);
3725 if (is_variable_sized (var
))
3727 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
3728 var
= DECL_VALUE_EXPR (var
);
3729 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
3730 var
= TREE_OPERAND (var
, 0);
3731 gcc_assert (DECL_P (var
));
3735 if (c_kind
!= OMP_CLAUSE_COPYIN
)
3736 new_var
= lookup_decl (var
, ctx
);
3738 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
3743 /* C/C++ array section reductions. */
3744 else if (c_kind
== OMP_CLAUSE_REDUCTION
3745 && var
!= OMP_CLAUSE_DECL (c
))
3750 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
3751 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
3752 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
3754 tree b
= TREE_OPERAND (orig_var
, 1);
3755 b
= maybe_lookup_decl (b
, ctx
);
3758 b
= TREE_OPERAND (orig_var
, 1);
3759 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
3761 if (integer_zerop (bias
))
3765 bias
= fold_convert_loc (clause_loc
,
3766 TREE_TYPE (b
), bias
);
3767 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3768 TREE_TYPE (b
), b
, bias
);
3770 orig_var
= TREE_OPERAND (orig_var
, 0);
3772 if (TREE_CODE (orig_var
) == INDIRECT_REF
3773 || TREE_CODE (orig_var
) == ADDR_EXPR
)
3774 orig_var
= TREE_OPERAND (orig_var
, 0);
3775 tree d
= OMP_CLAUSE_DECL (c
);
3776 tree type
= TREE_TYPE (d
);
3777 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
3778 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3779 const char *name
= get_name (orig_var
);
3780 if (TREE_CONSTANT (v
))
3782 x
= create_tmp_var_raw (type
, name
);
3783 gimple_add_tmp_var (x
);
3784 TREE_ADDRESSABLE (x
) = 1;
3785 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3790 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3791 tree t
= maybe_lookup_decl (v
, ctx
);
3795 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
3796 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
3797 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
3799 build_int_cst (TREE_TYPE (v
), 1));
3800 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
3802 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3803 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
3804 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
3807 tree ptype
= build_pointer_type (TREE_TYPE (type
));
3808 x
= fold_convert_loc (clause_loc
, ptype
, x
);
3809 tree y
= create_tmp_var (ptype
, name
);
3810 gimplify_assign (y
, x
, ilist
);
3814 if (!integer_zerop (bias
))
3816 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3818 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
3820 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
3821 pointer_sized_int_node
, yb
, bias
);
3822 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
3823 yb
= create_tmp_var (ptype
, name
);
3824 gimplify_assign (yb
, x
, ilist
);
3828 d
= TREE_OPERAND (d
, 0);
3829 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
3830 d
= TREE_OPERAND (d
, 0);
3831 if (TREE_CODE (d
) == ADDR_EXPR
)
3833 if (orig_var
!= var
)
3835 gcc_assert (is_variable_sized (orig_var
));
3836 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
3838 gimplify_assign (new_var
, x
, ilist
);
3839 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
3840 tree t
= build_fold_indirect_ref (new_var
);
3841 DECL_IGNORED_P (new_var
) = 0;
3842 TREE_THIS_NOTRAP (t
);
3843 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
3844 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
3848 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
3849 build_int_cst (ptype
, 0));
3850 SET_DECL_VALUE_EXPR (new_var
, x
);
3851 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3856 gcc_assert (orig_var
== var
);
3857 if (TREE_CODE (d
) == INDIRECT_REF
)
3859 x
= create_tmp_var (ptype
, name
);
3860 TREE_ADDRESSABLE (x
) = 1;
3861 gimplify_assign (x
, yb
, ilist
);
3862 x
= build_fold_addr_expr_loc (clause_loc
, x
);
3864 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3865 gimplify_assign (new_var
, x
, ilist
);
3867 tree y1
= create_tmp_var (ptype
, NULL
);
3868 gimplify_assign (y1
, y
, ilist
);
3869 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
3870 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
3871 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
3872 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
3874 y2
= create_tmp_var (ptype
, NULL
);
3875 gimplify_assign (y2
, y
, ilist
);
3876 tree ref
= build_outer_var_ref (var
, ctx
);
3877 /* For ref build_outer_var_ref already performs this. */
3878 if (TREE_CODE (d
) == INDIRECT_REF
)
3879 gcc_assert (omp_is_reference (var
));
3880 else if (TREE_CODE (d
) == ADDR_EXPR
)
3881 ref
= build_fold_addr_expr (ref
);
3882 else if (omp_is_reference (var
))
3883 ref
= build_fold_addr_expr (ref
);
3884 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
3885 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
3886 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3888 y3
= create_tmp_var (ptype
, NULL
);
3889 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
3893 y4
= create_tmp_var (ptype
, NULL
);
3894 gimplify_assign (y4
, ref
, dlist
);
3897 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
3898 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
3899 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
3900 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
3901 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
3904 i2
= create_tmp_var (TREE_TYPE (v
), NULL
);
3905 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
3906 body2
= create_artificial_label (UNKNOWN_LOCATION
);
3907 end2
= create_artificial_label (UNKNOWN_LOCATION
);
3908 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
3910 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
3912 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
3913 tree decl_placeholder
3914 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
3915 SET_DECL_VALUE_EXPR (decl_placeholder
,
3916 build_simple_mem_ref (y1
));
3917 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
3918 SET_DECL_VALUE_EXPR (placeholder
,
3919 y3
? build_simple_mem_ref (y3
)
3921 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
3922 x
= lang_hooks
.decls
.omp_clause_default_ctor
3923 (c
, build_simple_mem_ref (y1
),
3924 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
3926 gimplify_and_add (x
, ilist
);
3927 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
3929 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
3930 lower_omp (&tseq
, ctx
);
3931 gimple_seq_add_seq (ilist
, tseq
);
3933 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
3936 SET_DECL_VALUE_EXPR (decl_placeholder
,
3937 build_simple_mem_ref (y2
));
3938 SET_DECL_VALUE_EXPR (placeholder
,
3939 build_simple_mem_ref (y4
));
3940 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
3941 lower_omp (&tseq
, ctx
);
3942 gimple_seq_add_seq (dlist
, tseq
);
3943 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
3945 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
3946 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
3947 x
= lang_hooks
.decls
.omp_clause_dtor
3948 (c
, build_simple_mem_ref (y2
));
3951 gimple_seq tseq
= NULL
;
3953 gimplify_stmt (&dtor
, &tseq
);
3954 gimple_seq_add_seq (dlist
, tseq
);
3959 x
= omp_reduction_init (c
, TREE_TYPE (type
));
3960 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
3962 /* reduction(-:var) sums up the partial results, so it
3963 acts identically to reduction(+:var). */
3964 if (code
== MINUS_EXPR
)
3967 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
3970 x
= build2 (code
, TREE_TYPE (type
),
3971 build_simple_mem_ref (y4
),
3972 build_simple_mem_ref (y2
));
3973 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
3977 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
3978 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3979 gimple_seq_add_stmt (ilist
, g
);
3982 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
3983 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3984 gimple_seq_add_stmt (ilist
, g
);
3986 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
3987 build_int_cst (TREE_TYPE (i
), 1));
3988 gimple_seq_add_stmt (ilist
, g
);
3989 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
3990 gimple_seq_add_stmt (ilist
, g
);
3991 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
3994 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
3995 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
3996 gimple_seq_add_stmt (dlist
, g
);
3999 g
= gimple_build_assign
4000 (y4
, POINTER_PLUS_EXPR
, y4
,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4002 gimple_seq_add_stmt (dlist
, g
);
4004 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4005 build_int_cst (TREE_TYPE (i2
), 1));
4006 gimple_seq_add_stmt (dlist
, g
);
4007 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4008 gimple_seq_add_stmt (dlist
, g
);
4009 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4013 else if (is_variable_sized (var
))
4015 /* For variable sized types, we need to allocate the
4016 actual storage here. Call alloca and store the
4017 result in the pointer decl that we created elsewhere. */
4021 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4026 ptr
= DECL_VALUE_EXPR (new_var
);
4027 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4028 ptr
= TREE_OPERAND (ptr
, 0);
4029 gcc_assert (DECL_P (ptr
));
4030 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4032 /* void *tmp = __builtin_alloca */
4033 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4034 stmt
= gimple_build_call (atmp
, 2, x
,
4035 size_int (DECL_ALIGN (var
)));
4036 tmp
= create_tmp_var_raw (ptr_type_node
);
4037 gimple_add_tmp_var (tmp
);
4038 gimple_call_set_lhs (stmt
, tmp
);
4040 gimple_seq_add_stmt (ilist
, stmt
);
4042 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4043 gimplify_assign (ptr
, x
, ilist
);
4046 else if (omp_is_reference (var
))
4048 /* For references that are being privatized for Fortran,
4049 allocate new backing storage for the new pointer
4050 variable. This allows us to avoid changing all the
4051 code that expects a pointer to something that expects
4052 a direct variable. */
4056 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4057 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4059 x
= build_receiver_ref (var
, false, ctx
);
4060 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4062 else if (TREE_CONSTANT (x
))
4064 /* For reduction in SIMD loop, defer adding the
4065 initialization of the reference, because if we decide
4066 to use SIMD array for it, the initilization could cause
4068 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4072 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4074 gimple_add_tmp_var (x
);
4075 TREE_ADDRESSABLE (x
) = 1;
4076 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4082 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4083 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4084 tree al
= size_int (TYPE_ALIGN (rtype
));
4085 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4090 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4091 gimplify_assign (new_var
, x
, ilist
);
4094 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4096 else if (c_kind
== OMP_CLAUSE_REDUCTION
4097 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4105 switch (OMP_CLAUSE_CODE (c
))
4107 case OMP_CLAUSE_SHARED
:
4108 /* Ignore shared directives in teams construct. */
4109 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
4111 /* Shared global vars are just accessed directly. */
4112 if (is_global_var (new_var
))
4114 /* For taskloop firstprivate/lastprivate, represented
4115 as firstprivate and shared clause on the task, new_var
4116 is the firstprivate var. */
4117 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4119 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4120 needs to be delayed until after fixup_child_record_type so
4121 that we get the correct type during the dereference. */
4122 by_ref
= use_pointer_for_field (var
, ctx
);
4123 x
= build_receiver_ref (var
, by_ref
, ctx
);
4124 SET_DECL_VALUE_EXPR (new_var
, x
);
4125 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4127 /* ??? If VAR is not passed by reference, and the variable
4128 hasn't been initialized yet, then we'll get a warning for
4129 the store into the omp_data_s structure. Ideally, we'd be
4130 able to notice this and not store anything at all, but
4131 we're generating code too early. Suppress the warning. */
4133 TREE_NO_WARNING (var
) = 1;
4136 case OMP_CLAUSE_LASTPRIVATE
:
4137 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4141 case OMP_CLAUSE_PRIVATE
:
4142 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4143 x
= build_outer_var_ref (var
, ctx
);
4144 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4146 if (is_task_ctx (ctx
))
4147 x
= build_receiver_ref (var
, false, ctx
);
4149 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4155 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4156 (c
, unshare_expr (new_var
), x
);
4159 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4160 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4161 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4162 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4166 x
= lang_hooks
.decls
.omp_clause_default_ctor
4167 (c
, unshare_expr (ivar
), x
);
4169 gimplify_and_add (x
, &llist
[0]);
4172 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4175 gimple_seq tseq
= NULL
;
4178 gimplify_stmt (&dtor
, &tseq
);
4179 gimple_seq_add_seq (&llist
[1], tseq
);
4186 gimplify_and_add (nx
, ilist
);
4190 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4193 gimple_seq tseq
= NULL
;
4196 gimplify_stmt (&dtor
, &tseq
);
4197 gimple_seq_add_seq (dlist
, tseq
);
4201 case OMP_CLAUSE_LINEAR
:
4202 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4203 goto do_firstprivate
;
4204 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4207 x
= build_outer_var_ref (var
, ctx
);
4210 case OMP_CLAUSE_FIRSTPRIVATE
:
4211 if (is_task_ctx (ctx
))
4213 if (omp_is_reference (var
) || is_variable_sized (var
))
4215 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4217 || use_pointer_for_field (var
, NULL
))
4219 x
= build_receiver_ref (var
, false, ctx
);
4220 SET_DECL_VALUE_EXPR (new_var
, x
);
4221 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4226 x
= build_outer_var_ref (var
, ctx
);
4229 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4230 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4232 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4233 tree stept
= TREE_TYPE (t
);
4234 tree ct
= omp_find_clause (clauses
,
4235 OMP_CLAUSE__LOOPTEMP_
);
4237 tree l
= OMP_CLAUSE_DECL (ct
);
4238 tree n1
= fd
->loop
.n1
;
4239 tree step
= fd
->loop
.step
;
4240 tree itype
= TREE_TYPE (l
);
4241 if (POINTER_TYPE_P (itype
))
4242 itype
= signed_type_for (itype
);
4243 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4244 if (TYPE_UNSIGNED (itype
)
4245 && fd
->loop
.cond_code
== GT_EXPR
)
4246 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4247 fold_build1 (NEGATE_EXPR
, itype
, l
),
4248 fold_build1 (NEGATE_EXPR
,
4251 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4252 t
= fold_build2 (MULT_EXPR
, stept
,
4253 fold_convert (stept
, l
), t
);
4255 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4257 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4259 gimplify_and_add (x
, ilist
);
4263 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4264 x
= fold_build2 (POINTER_PLUS_EXPR
,
4265 TREE_TYPE (x
), x
, t
);
4267 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4270 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4271 || TREE_ADDRESSABLE (new_var
))
4272 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4275 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4277 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4278 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4279 gimplify_and_add (x
, ilist
);
4280 gimple_stmt_iterator gsi
4281 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4283 = gimple_build_assign (unshare_expr (lvar
), iv
);
4284 gsi_insert_before_without_update (&gsi
, g
,
4286 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4287 enum tree_code code
= PLUS_EXPR
;
4288 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4289 code
= POINTER_PLUS_EXPR
;
4290 g
= gimple_build_assign (iv
, code
, iv
, t
);
4291 gsi_insert_before_without_update (&gsi
, g
,
4295 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4296 (c
, unshare_expr (ivar
), x
);
4297 gimplify_and_add (x
, &llist
[0]);
4298 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4301 gimple_seq tseq
= NULL
;
4304 gimplify_stmt (&dtor
, &tseq
);
4305 gimple_seq_add_seq (&llist
[1], tseq
);
4310 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4311 (c
, unshare_expr (new_var
), x
);
4312 gimplify_and_add (x
, ilist
);
4315 case OMP_CLAUSE__LOOPTEMP_
:
4316 gcc_assert (is_taskreg_ctx (ctx
));
4317 x
= build_outer_var_ref (var
, ctx
);
4318 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4319 gimplify_and_add (x
, ilist
);
4322 case OMP_CLAUSE_COPYIN
:
4323 by_ref
= use_pointer_for_field (var
, NULL
);
4324 x
= build_receiver_ref (var
, by_ref
, ctx
);
4325 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4326 append_to_statement_list (x
, ©in_seq
);
4327 copyin_by_ref
|= by_ref
;
4330 case OMP_CLAUSE_REDUCTION
:
4331 /* OpenACC reductions are initialized using the
4332 GOACC_REDUCTION internal function. */
4333 if (is_gimple_omp_oacc (ctx
->stmt
))
4335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4337 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4339 x
= build_outer_var_ref (var
, ctx
);
4341 if (omp_is_reference (var
)
4342 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
4344 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4345 SET_DECL_VALUE_EXPR (placeholder
, x
);
4346 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4347 tree new_vard
= new_var
;
4348 if (omp_is_reference (var
))
4350 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4351 new_vard
= TREE_OPERAND (new_var
, 0);
4352 gcc_assert (DECL_P (new_vard
));
4355 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4358 if (new_vard
== new_var
)
4360 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4361 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4365 SET_DECL_VALUE_EXPR (new_vard
,
4366 build_fold_addr_expr (ivar
));
4367 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4369 x
= lang_hooks
.decls
.omp_clause_default_ctor
4370 (c
, unshare_expr (ivar
),
4371 build_outer_var_ref (var
, ctx
));
4373 gimplify_and_add (x
, &llist
[0]);
4374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4376 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4377 lower_omp (&tseq
, ctx
);
4378 gimple_seq_add_seq (&llist
[0], tseq
);
4380 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4381 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4382 lower_omp (&tseq
, ctx
);
4383 gimple_seq_add_seq (&llist
[1], tseq
);
4384 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4385 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4386 if (new_vard
== new_var
)
4387 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4389 SET_DECL_VALUE_EXPR (new_vard
,
4390 build_fold_addr_expr (lvar
));
4391 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4396 gimplify_stmt (&dtor
, &tseq
);
4397 gimple_seq_add_seq (&llist
[1], tseq
);
4401 /* If this is a reference to constant size reduction var
4402 with placeholder, we haven't emitted the initializer
4403 for it because it is undesirable if SIMD arrays are used.
4404 But if they aren't used, we need to emit the deferred
4405 initialization now. */
4406 else if (omp_is_reference (var
) && is_simd
)
4407 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4408 x
= lang_hooks
.decls
.omp_clause_default_ctor
4409 (c
, unshare_expr (new_var
),
4410 build_outer_var_ref (var
, ctx
));
4412 gimplify_and_add (x
, ilist
);
4413 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4415 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4416 lower_omp (&tseq
, ctx
);
4417 gimple_seq_add_seq (ilist
, tseq
);
4419 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4422 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4423 lower_omp (&tseq
, ctx
);
4424 gimple_seq_add_seq (dlist
, tseq
);
4425 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4427 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4432 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
4433 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
4434 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4436 /* reduction(-:var) sums up the partial results, so it
4437 acts identically to reduction(+:var). */
4438 if (code
== MINUS_EXPR
)
4441 tree new_vard
= new_var
;
4442 if (is_simd
&& omp_is_reference (var
))
4444 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4445 new_vard
= TREE_OPERAND (new_var
, 0);
4446 gcc_assert (DECL_P (new_vard
));
4449 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4452 tree ref
= build_outer_var_ref (var
, ctx
);
4454 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
4459 simt_lane
= create_tmp_var (unsigned_type_node
);
4460 x
= build_call_expr_internal_loc
4461 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
4462 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
4463 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
4464 gimplify_assign (ivar
, x
, &llist
[2]);
4466 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
4467 ref
= build_outer_var_ref (var
, ctx
);
4468 gimplify_assign (ref
, x
, &llist
[1]);
4470 if (new_vard
!= new_var
)
4472 SET_DECL_VALUE_EXPR (new_vard
,
4473 build_fold_addr_expr (lvar
));
4474 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4479 if (omp_is_reference (var
) && is_simd
)
4480 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4481 gimplify_assign (new_var
, x
, ilist
);
4484 tree ref
= build_outer_var_ref (var
, ctx
);
4486 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
4487 ref
= build_outer_var_ref (var
, ctx
);
4488 gimplify_assign (ref
, x
, dlist
);
4500 if (sctx
.max_vf
== 1)
4501 sctx
.is_simt
= false;
4503 if (sctx
.lane
|| sctx
.is_simt
)
4505 uid
= create_tmp_var (ptr_type_node
, "simduid");
4506 /* Don't want uninit warnings on simduid, it is always uninitialized,
4507 but we use it not for the value, but for the DECL_UID only. */
4508 TREE_NO_WARNING (uid
) = 1;
4509 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
4510 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
4511 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4512 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4514 /* Emit calls denoting privatized variables and initializing a pointer to
4515 structure that holds private variables as fields after ompdevlow pass. */
4518 sctx
.simt_eargs
[0] = uid
;
4520 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
4521 gimple_call_set_lhs (g
, uid
);
4522 gimple_seq_add_stmt (ilist
, g
);
4523 sctx
.simt_eargs
.release ();
4525 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
4526 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
4527 gimple_call_set_lhs (g
, simtrec
);
4528 gimple_seq_add_stmt (ilist
, g
);
4533 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
4534 gimple_call_set_lhs (g
, sctx
.lane
);
4535 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4536 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
4537 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
4538 build_int_cst (unsigned_type_node
, 0));
4539 gimple_seq_add_stmt (ilist
, g
);
4540 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4543 tree simt_vf
= create_tmp_var (unsigned_type_node
);
4544 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
4545 gimple_call_set_lhs (g
, simt_vf
);
4546 gimple_seq_add_stmt (dlist
, g
);
4548 tree t
= build_int_cst (unsigned_type_node
, 1);
4549 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
4550 gimple_seq_add_stmt (dlist
, g
);
4552 t
= build_int_cst (unsigned_type_node
, 0);
4553 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4554 gimple_seq_add_stmt (dlist
, g
);
4556 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4557 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4558 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4559 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
4560 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
4562 gimple_seq_add_seq (dlist
, llist
[2]);
4564 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
4565 gimple_seq_add_stmt (dlist
, g
);
4567 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
4568 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
4569 gimple_seq_add_stmt (dlist
, g
);
4571 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
4573 for (int i
= 0; i
< 2; i
++)
4576 tree vf
= create_tmp_var (unsigned_type_node
);
4577 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
4578 gimple_call_set_lhs (g
, vf
);
4579 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
4580 gimple_seq_add_stmt (seq
, g
);
4581 tree t
= build_int_cst (unsigned_type_node
, 0);
4582 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
4583 gimple_seq_add_stmt (seq
, g
);
4584 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4585 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
4586 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4587 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
4588 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
4589 gimple_seq_add_seq (seq
, llist
[i
]);
4590 t
= build_int_cst (unsigned_type_node
, 1);
4591 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
4592 gimple_seq_add_stmt (seq
, g
);
4593 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
4594 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
4595 gimple_seq_add_stmt (seq
, g
);
4596 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
4601 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
4603 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
4604 gimple_seq_add_stmt (dlist
, g
);
4607 /* The copyin sequence is not to be executed by the main thread, since
4608 that would result in self-copies. Perhaps not visible to scalars,
4609 but it certainly is to C++ operator=. */
4612 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
4614 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
4615 build_int_cst (TREE_TYPE (x
), 0));
4616 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
4617 gimplify_and_add (x
, ilist
);
4620 /* If any copyin variable is passed by reference, we must ensure the
4621 master thread doesn't modify it before it is copied over in all
4622 threads. Similarly for variables in both firstprivate and
4623 lastprivate clauses we need to ensure the lastprivate copying
4624 happens after firstprivate copying in all threads. And similarly
4625 for UDRs if initializer expression refers to omp_orig. */
4626 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
4628 /* Don't add any barrier for #pragma omp simd or
4629 #pragma omp distribute. */
4630 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
4631 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
)
4632 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
4635 /* If max_vf is non-zero, then we can use only a vectorization factor
4636 up to the max_vf we chose. So stick it into the safelen clause. */
4639 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4640 OMP_CLAUSE_SAFELEN
);
4642 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c
)) == INTEGER_CST
4643 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c
),
4646 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
4647 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
4649 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
4650 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
4656 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4657 both parallel and workshare constructs. PREDICATE may be NULL if it's
4661 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
4664 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
4665 bool par_clauses
= false;
4666 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
4668 /* Early exit if there are no lastprivate or linear clauses. */
4669 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
4670 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
4671 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
4672 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
4674 if (clauses
== NULL
)
4676 /* If this was a workshare clause, see if it had been combined
4677 with its parallel. In that case, look for the clauses on the
4678 parallel statement itself. */
4679 if (is_parallel_ctx (ctx
))
4683 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4686 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4687 OMP_CLAUSE_LASTPRIVATE
);
4688 if (clauses
== NULL
)
4693 bool maybe_simt
= false;
4694 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4695 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
4697 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
4698 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
4700 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
4706 tree label_true
, arm1
, arm2
;
4707 enum tree_code pred_code
= TREE_CODE (predicate
);
4709 label
= create_artificial_label (UNKNOWN_LOCATION
);
4710 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4711 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
4713 arm1
= TREE_OPERAND (predicate
, 0);
4714 arm2
= TREE_OPERAND (predicate
, 1);
4715 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4716 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4721 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
4722 arm2
= boolean_false_node
;
4723 pred_code
= NE_EXPR
;
4727 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
4728 c
= fold_convert (integer_type_node
, c
);
4729 simtcond
= create_tmp_var (integer_type_node
);
4730 gimplify_assign (simtcond
, c
, stmt_list
);
4731 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
4733 c
= create_tmp_var (integer_type_node
);
4734 gimple_call_set_lhs (g
, c
);
4735 gimple_seq_add_stmt (stmt_list
, g
);
4736 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
4740 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
4741 gimple_seq_add_stmt (stmt_list
, stmt
);
4742 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
4745 for (c
= clauses
; c
;)
4748 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4750 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4751 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4752 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
4754 var
= OMP_CLAUSE_DECL (c
);
4755 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4756 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
4757 && is_taskloop_ctx (ctx
))
4759 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
4760 new_var
= lookup_decl (var
, ctx
->outer
);
4764 new_var
= lookup_decl (var
, ctx
);
4765 /* Avoid uninitialized warnings for lastprivate and
4766 for linear iterators. */
4768 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4769 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
4770 TREE_NO_WARNING (new_var
) = 1;
4773 if (simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
4775 tree val
= DECL_VALUE_EXPR (new_var
);
4777 && TREE_CODE (val
) == ARRAY_REF
4778 && VAR_P (TREE_OPERAND (val
, 0))
4779 && lookup_attribute ("omp simd array",
4780 DECL_ATTRIBUTES (TREE_OPERAND (val
,
4783 if (lastlane
== NULL
)
4785 lastlane
= create_tmp_var (unsigned_type_node
);
4787 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
4789 TREE_OPERAND (val
, 1));
4790 gimple_call_set_lhs (g
, lastlane
);
4791 gimple_seq_add_stmt (stmt_list
, g
);
4793 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
4794 TREE_OPERAND (val
, 0), lastlane
,
4795 NULL_TREE
, NULL_TREE
);
4799 && lookup_attribute ("omp simt private",
4800 DECL_ATTRIBUTES (val
)))
4802 if (simtlast
== NULL
)
4804 simtlast
= create_tmp_var (unsigned_type_node
);
4805 gcall
*g
= gimple_build_call_internal
4806 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
4807 gimple_call_set_lhs (g
, simtlast
);
4808 gimple_seq_add_stmt (stmt_list
, g
);
4810 x
= build_call_expr_internal_loc
4811 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
4812 TREE_TYPE (val
), 2, val
, simtlast
);
4813 new_var
= unshare_expr (new_var
);
4814 gimplify_assign (new_var
, x
, stmt_list
);
4815 new_var
= unshare_expr (new_var
);
4819 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4820 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
4822 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
4823 gimple_seq_add_seq (stmt_list
,
4824 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
4825 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
4827 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4828 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
4830 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
4831 gimple_seq_add_seq (stmt_list
,
4832 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
4833 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
4837 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4838 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
4840 gcc_checking_assert (is_taskloop_ctx (ctx
));
4841 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
4843 if (is_global_var (ovar
))
4847 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
4848 if (omp_is_reference (var
))
4849 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4850 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
4851 gimplify_and_add (x
, stmt_list
);
4853 c
= OMP_CLAUSE_CHAIN (c
);
4854 if (c
== NULL
&& !par_clauses
)
4856 /* If this was a workshare clause, see if it had been combined
4857 with its parallel. In that case, continue looking for the
4858 clauses also on the parallel statement itself. */
4859 if (is_parallel_ctx (ctx
))
4863 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
4866 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
4867 OMP_CLAUSE_LASTPRIVATE
);
4873 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
4876 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4877 (which might be a placeholder). INNER is true if this is an inner
4878 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4879 join markers. Generate the before-loop forking sequence in
4880 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4881 general form of these sequences is
4883 GOACC_REDUCTION_SETUP
4885 GOACC_REDUCTION_INIT
4887 GOACC_REDUCTION_FINI
4889 GOACC_REDUCTION_TEARDOWN. */
4892 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
4893 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
4894 gimple_seq
*join_seq
, omp_context
*ctx
)
4896 gimple_seq before_fork
= NULL
;
4897 gimple_seq after_fork
= NULL
;
4898 gimple_seq before_join
= NULL
;
4899 gimple_seq after_join
= NULL
;
4900 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
4901 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
4902 unsigned offset
= 0;
4904 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4905 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4907 tree orig
= OMP_CLAUSE_DECL (c
);
4908 tree var
= maybe_lookup_decl (orig
, ctx
);
4909 tree ref_to_res
= NULL_TREE
;
4910 tree incoming
, outgoing
, v1
, v2
, v3
;
4911 bool is_private
= false;
4913 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
4914 if (rcode
== MINUS_EXPR
)
4916 else if (rcode
== TRUTH_ANDIF_EXPR
)
4917 rcode
= BIT_AND_EXPR
;
4918 else if (rcode
== TRUTH_ORIF_EXPR
)
4919 rcode
= BIT_IOR_EXPR
;
4920 tree op
= build_int_cst (unsigned_type_node
, rcode
);
4925 incoming
= outgoing
= var
;
4929 /* See if an outer construct also reduces this variable. */
4930 omp_context
*outer
= ctx
;
4932 while (omp_context
*probe
= outer
->outer
)
4934 enum gimple_code type
= gimple_code (probe
->stmt
);
4939 case GIMPLE_OMP_FOR
:
4940 cls
= gimple_omp_for_clauses (probe
->stmt
);
4943 case GIMPLE_OMP_TARGET
:
4944 if (gimple_omp_target_kind (probe
->stmt
)
4945 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
4948 cls
= gimple_omp_target_clauses (probe
->stmt
);
4956 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
4957 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
4958 && orig
== OMP_CLAUSE_DECL (cls
))
4960 incoming
= outgoing
= lookup_decl (orig
, probe
);
4961 goto has_outer_reduction
;
4963 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
4964 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
4965 && orig
== OMP_CLAUSE_DECL (cls
))
4973 /* This is the outermost construct with this reduction,
4974 see if there's a mapping for it. */
4975 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
4976 && maybe_lookup_field (orig
, outer
) && !is_private
)
4978 ref_to_res
= build_receiver_ref (orig
, false, outer
);
4979 if (omp_is_reference (orig
))
4980 ref_to_res
= build_simple_mem_ref (ref_to_res
);
4982 tree type
= TREE_TYPE (var
);
4983 if (POINTER_TYPE_P (type
))
4984 type
= TREE_TYPE (type
);
4987 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
4991 /* Try to look at enclosing contexts for reduction var,
4992 use original if no mapping found. */
4994 omp_context
*c
= ctx
->outer
;
4997 t
= maybe_lookup_decl (orig
, c
);
5000 incoming
= outgoing
= (t
? t
: orig
);
5003 has_outer_reduction
:;
5007 ref_to_res
= integer_zero_node
;
5009 if (omp_is_reference (orig
))
5011 tree type
= TREE_TYPE (var
);
5012 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
5016 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
5017 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
5020 v1
= create_tmp_var (type
, id
);
5021 v2
= create_tmp_var (type
, id
);
5022 v3
= create_tmp_var (type
, id
);
5024 gimplify_assign (v1
, var
, fork_seq
);
5025 gimplify_assign (v2
, var
, fork_seq
);
5026 gimplify_assign (v3
, var
, fork_seq
);
5028 var
= build_simple_mem_ref (var
);
5029 v1
= build_simple_mem_ref (v1
);
5030 v2
= build_simple_mem_ref (v2
);
5031 v3
= build_simple_mem_ref (v3
);
5032 outgoing
= build_simple_mem_ref (outgoing
);
5034 if (!TREE_CONSTANT (incoming
))
5035 incoming
= build_simple_mem_ref (incoming
);
5040 /* Determine position in reduction buffer, which may be used
5042 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (var
));
5043 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
5044 offset
= (offset
+ align
- 1) & ~(align
- 1);
5045 tree off
= build_int_cst (sizetype
, offset
);
5046 offset
+= GET_MODE_SIZE (mode
);
5050 init_code
= build_int_cst (integer_type_node
,
5051 IFN_GOACC_REDUCTION_INIT
);
5052 fini_code
= build_int_cst (integer_type_node
,
5053 IFN_GOACC_REDUCTION_FINI
);
5054 setup_code
= build_int_cst (integer_type_node
,
5055 IFN_GOACC_REDUCTION_SETUP
);
5056 teardown_code
= build_int_cst (integer_type_node
,
5057 IFN_GOACC_REDUCTION_TEARDOWN
);
5061 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5062 TREE_TYPE (var
), 6, setup_code
,
5063 unshare_expr (ref_to_res
),
5064 incoming
, level
, op
, off
);
5066 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5067 TREE_TYPE (var
), 6, init_code
,
5068 unshare_expr (ref_to_res
),
5069 v1
, level
, op
, off
);
5071 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5072 TREE_TYPE (var
), 6, fini_code
,
5073 unshare_expr (ref_to_res
),
5074 v2
, level
, op
, off
);
5076 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5077 TREE_TYPE (var
), 6, teardown_code
,
5078 ref_to_res
, v3
, level
, op
, off
);
5080 gimplify_assign (v1
, setup_call
, &before_fork
);
5081 gimplify_assign (v2
, init_call
, &after_fork
);
5082 gimplify_assign (v3
, fini_call
, &before_join
);
5083 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5086 /* Now stitch things together. */
5087 gimple_seq_add_seq (fork_seq
, before_fork
);
5089 gimple_seq_add_stmt (fork_seq
, fork
);
5090 gimple_seq_add_seq (fork_seq
, after_fork
);
5092 gimple_seq_add_seq (join_seq
, before_join
);
5094 gimple_seq_add_stmt (join_seq
, join
);
5095 gimple_seq_add_seq (join_seq
, after_join
);
5098 /* Generate code to implement the REDUCTION clauses. */
5101 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5103 gimple_seq sub_seq
= NULL
;
5108 /* OpenACC loop reductions are handled elsewhere. */
5109 if (is_gimple_omp_oacc (ctx
->stmt
))
5112 /* SIMD reductions are handled in lower_rec_input_clauses. */
5113 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5114 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5117 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5118 update in that case, otherwise use a lock. */
5119 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5120 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5122 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5123 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5125 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5135 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5137 tree var
, ref
, new_var
, orig_var
;
5138 enum tree_code code
;
5139 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5141 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5144 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5145 if (TREE_CODE (var
) == MEM_REF
)
5147 var
= TREE_OPERAND (var
, 0);
5148 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5149 var
= TREE_OPERAND (var
, 0);
5150 if (TREE_CODE (var
) == INDIRECT_REF
5151 || TREE_CODE (var
) == ADDR_EXPR
)
5152 var
= TREE_OPERAND (var
, 0);
5154 if (is_variable_sized (var
))
5156 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5157 var
= DECL_VALUE_EXPR (var
);
5158 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5159 var
= TREE_OPERAND (var
, 0);
5160 gcc_assert (DECL_P (var
));
5163 new_var
= lookup_decl (var
, ctx
);
5164 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5165 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5166 ref
= build_outer_var_ref (var
, ctx
);
5167 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5169 /* reduction(-:var) sums up the partial results, so it acts
5170 identically to reduction(+:var). */
5171 if (code
== MINUS_EXPR
)
5176 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5178 addr
= save_expr (addr
);
5179 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5180 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5181 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5182 gimplify_and_add (x
, stmt_seqp
);
5185 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5187 tree d
= OMP_CLAUSE_DECL (c
);
5188 tree type
= TREE_TYPE (d
);
5189 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5190 tree i
= create_tmp_var (TREE_TYPE (v
), NULL
);
5191 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5192 tree bias
= TREE_OPERAND (d
, 1);
5193 d
= TREE_OPERAND (d
, 0);
5194 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5196 tree b
= TREE_OPERAND (d
, 1);
5197 b
= maybe_lookup_decl (b
, ctx
);
5200 b
= TREE_OPERAND (d
, 1);
5201 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5203 if (integer_zerop (bias
))
5207 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5208 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5209 TREE_TYPE (b
), b
, bias
);
5211 d
= TREE_OPERAND (d
, 0);
5213 /* For ref build_outer_var_ref already performs this, so
5214 only new_var needs a dereference. */
5215 if (TREE_CODE (d
) == INDIRECT_REF
)
5217 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5218 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5220 else if (TREE_CODE (d
) == ADDR_EXPR
)
5222 if (orig_var
== var
)
5224 new_var
= build_fold_addr_expr (new_var
);
5225 ref
= build_fold_addr_expr (ref
);
5230 gcc_assert (orig_var
== var
);
5231 if (omp_is_reference (var
))
5232 ref
= build_fold_addr_expr (ref
);
5236 tree t
= maybe_lookup_decl (v
, ctx
);
5240 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5241 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5243 if (!integer_zerop (bias
))
5245 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5246 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5247 TREE_TYPE (new_var
), new_var
,
5248 unshare_expr (bias
));
5249 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5250 TREE_TYPE (ref
), ref
, bias
);
5252 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5253 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5254 tree m
= create_tmp_var (ptype
, NULL
);
5255 gimplify_assign (m
, new_var
, stmt_seqp
);
5257 m
= create_tmp_var (ptype
, NULL
);
5258 gimplify_assign (m
, ref
, stmt_seqp
);
5260 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5261 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5262 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5263 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5264 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5265 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5266 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5268 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5269 tree decl_placeholder
5270 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5271 SET_DECL_VALUE_EXPR (placeholder
, out
);
5272 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5273 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5274 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5275 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5276 gimple_seq_add_seq (&sub_seq
,
5277 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5278 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5279 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5280 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5284 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5285 out
= unshare_expr (out
);
5286 gimplify_assign (out
, x
, &sub_seq
);
5288 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5289 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5290 gimple_seq_add_stmt (&sub_seq
, g
);
5291 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5292 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5293 gimple_seq_add_stmt (&sub_seq
, g
);
5294 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5295 build_int_cst (TREE_TYPE (i
), 1));
5296 gimple_seq_add_stmt (&sub_seq
, g
);
5297 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5298 gimple_seq_add_stmt (&sub_seq
, g
);
5299 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5301 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5303 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5305 if (omp_is_reference (var
)
5306 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
5308 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
5309 SET_DECL_VALUE_EXPR (placeholder
, ref
);
5310 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5311 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5312 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5313 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5314 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5318 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5319 ref
= build_outer_var_ref (var
, ctx
);
5320 gimplify_assign (ref
, x
, &sub_seq
);
5324 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
5326 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5328 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
5330 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
5332 gimple_seq_add_stmt (stmt_seqp
, stmt
);
5336 /* Generate code to implement the COPYPRIVATE clauses. */
5339 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
5344 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5346 tree var
, new_var
, ref
, x
;
5348 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5350 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
5353 var
= OMP_CLAUSE_DECL (c
);
5354 by_ref
= use_pointer_for_field (var
, NULL
);
5356 ref
= build_sender_ref (var
, ctx
);
5357 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
5360 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
5361 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
5363 gimplify_assign (ref
, x
, slist
);
5365 ref
= build_receiver_ref (var
, false, ctx
);
5368 ref
= fold_convert_loc (clause_loc
,
5369 build_pointer_type (TREE_TYPE (new_var
)),
5371 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
5373 if (omp_is_reference (var
))
5375 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
5376 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
5377 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5379 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
5380 gimplify_and_add (x
, rlist
);
5385 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5386 and REDUCTION from the sender (aka parent) side. */
5389 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
5393 int ignored_looptemp
= 0;
5394 bool is_taskloop
= false;
5396 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5397 by GOMP_taskloop. */
5398 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
5400 ignored_looptemp
= 2;
5404 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5406 tree val
, ref
, x
, var
;
5407 bool by_ref
, do_in
= false, do_out
= false;
5408 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5410 switch (OMP_CLAUSE_CODE (c
))
5412 case OMP_CLAUSE_PRIVATE
:
5413 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5416 case OMP_CLAUSE_FIRSTPRIVATE
:
5417 case OMP_CLAUSE_COPYIN
:
5418 case OMP_CLAUSE_LASTPRIVATE
:
5419 case OMP_CLAUSE_REDUCTION
:
5421 case OMP_CLAUSE_SHARED
:
5422 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5425 case OMP_CLAUSE__LOOPTEMP_
:
5426 if (ignored_looptemp
)
5436 val
= OMP_CLAUSE_DECL (c
);
5437 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5438 && TREE_CODE (val
) == MEM_REF
)
5440 val
= TREE_OPERAND (val
, 0);
5441 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
5442 val
= TREE_OPERAND (val
, 0);
5443 if (TREE_CODE (val
) == INDIRECT_REF
5444 || TREE_CODE (val
) == ADDR_EXPR
)
5445 val
= TREE_OPERAND (val
, 0);
5446 if (is_variable_sized (val
))
5450 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5451 outer taskloop region. */
5452 omp_context
*ctx_for_o
= ctx
;
5454 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
5455 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5456 ctx_for_o
= ctx
->outer
;
5458 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
5460 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
5461 && is_global_var (var
))
5464 t
= omp_member_access_dummy_var (var
);
5467 var
= DECL_VALUE_EXPR (var
);
5468 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
5470 var
= unshare_and_remap (var
, t
, o
);
5472 var
= unshare_expr (var
);
5475 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
5477 /* Handle taskloop firstprivate/lastprivate, where the
5478 lastprivate on GIMPLE_OMP_TASK is represented as
5479 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5480 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
5481 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
5482 if (use_pointer_for_field (val
, ctx
))
5483 var
= build_fold_addr_expr (var
);
5484 gimplify_assign (x
, var
, ilist
);
5485 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
5489 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5490 || val
== OMP_CLAUSE_DECL (c
))
5491 && is_variable_sized (val
))
5493 by_ref
= use_pointer_for_field (val
, NULL
);
5495 switch (OMP_CLAUSE_CODE (c
))
5497 case OMP_CLAUSE_FIRSTPRIVATE
:
5498 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
5500 && is_task_ctx (ctx
))
5501 TREE_NO_WARNING (var
) = 1;
5505 case OMP_CLAUSE_PRIVATE
:
5506 case OMP_CLAUSE_COPYIN
:
5507 case OMP_CLAUSE__LOOPTEMP_
:
5511 case OMP_CLAUSE_LASTPRIVATE
:
5512 if (by_ref
|| omp_is_reference (val
))
5514 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5521 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
5526 case OMP_CLAUSE_REDUCTION
:
5528 if (val
== OMP_CLAUSE_DECL (c
))
5529 do_out
= !(by_ref
|| omp_is_reference (val
));
5531 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
5540 ref
= build_sender_ref (val
, ctx
);
5541 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
5542 gimplify_assign (ref
, x
, ilist
);
5543 if (is_task_ctx (ctx
))
5544 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
5549 ref
= build_sender_ref (val
, ctx
);
5550 gimplify_assign (var
, ref
, olist
);
5555 /* Generate code to implement SHARED from the sender (aka parent)
5556 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5557 list things that got automatically shared. */
5560 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
5562 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
5564 if (ctx
->record_type
== NULL
)
5567 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
5568 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
5570 ovar
= DECL_ABSTRACT_ORIGIN (f
);
5571 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
5574 nvar
= maybe_lookup_decl (ovar
, ctx
);
5575 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
5578 /* If CTX is a nested parallel directive. Find the immediately
5579 enclosing parallel or workshare construct that contains a
5580 mapping for OVAR. */
5581 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
5583 t
= omp_member_access_dummy_var (var
);
5586 var
= DECL_VALUE_EXPR (var
);
5587 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
5589 var
= unshare_and_remap (var
, t
, o
);
5591 var
= unshare_expr (var
);
5594 if (use_pointer_for_field (ovar
, ctx
))
5596 x
= build_sender_ref (ovar
, ctx
);
5597 var
= build_fold_addr_expr (var
);
5598 gimplify_assign (x
, var
, ilist
);
5602 x
= build_sender_ref (ovar
, ctx
);
5603 gimplify_assign (x
, var
, ilist
);
5605 if (!TREE_READONLY (var
)
5606 /* We don't need to receive a new reference to a result
5607 or parm decl. In fact we may not store to it as we will
5608 invalidate any pending RSO and generate wrong gimple
5610 && !((TREE_CODE (var
) == RESULT_DECL
5611 || TREE_CODE (var
) == PARM_DECL
)
5612 && DECL_BY_REFERENCE (var
)))
5614 x
= build_sender_ref (ovar
, ctx
);
5615 gimplify_assign (var
, x
, olist
);
5621 /* Emit an OpenACC head marker call, encapulating the partitioning and
5622 other information that must be processed by the target compiler.
5623 Return the maximum number of dimensions the associated loop might
5624 be partitioned over. */
5627 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
5628 gimple_seq
*seq
, omp_context
*ctx
)
5630 unsigned levels
= 0;
5632 tree gang_static
= NULL_TREE
;
5633 auto_vec
<tree
, 5> args
;
5635 args
.quick_push (build_int_cst
5636 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
5637 args
.quick_push (ddvar
);
5638 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5640 switch (OMP_CLAUSE_CODE (c
))
5642 case OMP_CLAUSE_GANG
:
5643 tag
|= OLF_DIM_GANG
;
5644 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
5645 /* static:* is represented by -1, and we can ignore it, as
5646 scheduling is always static. */
5647 if (gang_static
&& integer_minus_onep (gang_static
))
5648 gang_static
= NULL_TREE
;
5652 case OMP_CLAUSE_WORKER
:
5653 tag
|= OLF_DIM_WORKER
;
5657 case OMP_CLAUSE_VECTOR
:
5658 tag
|= OLF_DIM_VECTOR
;
5662 case OMP_CLAUSE_SEQ
:
5666 case OMP_CLAUSE_AUTO
:
5670 case OMP_CLAUSE_INDEPENDENT
:
5671 tag
|= OLF_INDEPENDENT
;
5674 case OMP_CLAUSE_TILE
:
5685 if (DECL_P (gang_static
))
5686 gang_static
= build_outer_var_ref (gang_static
, ctx
);
5687 tag
|= OLF_GANG_STATIC
;
5690 /* In a parallel region, loops are implicitly INDEPENDENT. */
5691 omp_context
*tgt
= enclosing_target_ctx (ctx
);
5692 if (!tgt
|| is_oacc_parallel (tgt
))
5693 tag
|= OLF_INDEPENDENT
;
5696 /* Tiling could use all 3 levels. */
5700 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5701 Ensure at least one level, or 2 for possible auto
5703 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
5704 << OLF_DIM_BASE
) | OLF_SEQ
));
5706 if (levels
< 1u + maybe_auto
)
5707 levels
= 1u + maybe_auto
;
5710 args
.quick_push (build_int_cst (integer_type_node
, levels
));
5711 args
.quick_push (build_int_cst (integer_type_node
, tag
));
5713 args
.quick_push (gang_static
);
5715 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
5716 gimple_set_location (call
, loc
);
5717 gimple_set_lhs (call
, ddvar
);
5718 gimple_seq_add_stmt (seq
, call
);
5723 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5724 partitioning level of the enclosed region. */
5727 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
5728 tree tofollow
, gimple_seq
*seq
)
5730 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
5731 : IFN_UNIQUE_OACC_TAIL_MARK
);
5732 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
5733 int nargs
= 2 + (tofollow
!= NULL_TREE
);
5734 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
5735 marker
, ddvar
, tofollow
);
5736 gimple_set_location (call
, loc
);
5737 gimple_set_lhs (call
, ddvar
);
5738 gimple_seq_add_stmt (seq
, call
);
5741 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5742 the loop clauses, from which we extract reductions. Initialize
5746 lower_oacc_head_tail (location_t loc
, tree clauses
,
5747 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
5750 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
5751 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
5753 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
5754 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
5755 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
5758 for (unsigned done
= 1; count
; count
--, done
++)
5760 gimple_seq fork_seq
= NULL
;
5761 gimple_seq join_seq
= NULL
;
5763 tree place
= build_int_cst (integer_type_node
, -1);
5764 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5765 fork_kind
, ddvar
, place
);
5766 gimple_set_location (fork
, loc
);
5767 gimple_set_lhs (fork
, ddvar
);
5769 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
5770 join_kind
, ddvar
, place
);
5771 gimple_set_location (join
, loc
);
5772 gimple_set_lhs (join
, ddvar
);
5774 /* Mark the beginning of this level sequence. */
5776 lower_oacc_loop_marker (loc
, ddvar
, true,
5777 build_int_cst (integer_type_node
, count
),
5779 lower_oacc_loop_marker (loc
, ddvar
, false,
5780 build_int_cst (integer_type_node
, done
),
5783 lower_oacc_reductions (loc
, clauses
, place
, inner
,
5784 fork
, join
, &fork_seq
, &join_seq
, ctx
);
5786 /* Append this level to head. */
5787 gimple_seq_add_seq (head
, fork_seq
);
5788 /* Prepend it to tail. */
5789 gimple_seq_add_seq (&join_seq
, *tail
);
5795 /* Mark the end of the sequence. */
5796 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
5797 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
5800 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5801 catch handler and return it. This prevents programs from violating the
5802 structured block semantics with throws. */
5805 maybe_catch_exception (gimple_seq body
)
5810 if (!flag_exceptions
)
5813 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
5814 decl
= lang_hooks
.eh_protect_cleanup_actions ();
5816 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
5818 g
= gimple_build_eh_must_not_throw (decl
);
5819 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
5822 return gimple_seq_alloc_with_stmt (g
);
5826 /* Routines to lower OMP directives into OMP-GIMPLE. */
5828 /* If ctx is a worksharing context inside of a cancellable parallel
5829 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5830 and conditional branch to parallel's cancel_label to handle
5831 cancellation in the implicit barrier. */
5834 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple_seq
*body
)
5836 gimple
*omp_return
= gimple_seq_last_stmt (*body
);
5837 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
5838 if (gimple_omp_return_nowait_p (omp_return
))
5841 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_PARALLEL
5842 && ctx
->outer
->cancellable
)
5844 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5845 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
5846 tree lhs
= create_tmp_var (c_bool_type
);
5847 gimple_omp_return_set_lhs (omp_return
, lhs
);
5848 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5849 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
5850 fold_convert (c_bool_type
,
5851 boolean_false_node
),
5852 ctx
->outer
->cancel_label
, fallthru_label
);
5853 gimple_seq_add_stmt (body
, g
);
5854 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
5858 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5859 CTX is the enclosing OMP context for the current statement. */
5862 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
5864 tree block
, control
;
5865 gimple_stmt_iterator tgsi
;
5866 gomp_sections
*stmt
;
5868 gbind
*new_stmt
, *bind
;
5869 gimple_seq ilist
, dlist
, olist
, new_body
;
5871 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
5873 push_gimplify_context ();
5877 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
5878 &ilist
, &dlist
, ctx
, NULL
);
5880 new_body
= gimple_omp_body (stmt
);
5881 gimple_omp_set_body (stmt
, NULL
);
5882 tgsi
= gsi_start (new_body
);
5883 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
5888 sec_start
= gsi_stmt (tgsi
);
5889 sctx
= maybe_lookup_ctx (sec_start
);
5892 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
5893 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
5894 GSI_CONTINUE_LINKING
);
5895 gimple_omp_set_body (sec_start
, NULL
);
5897 if (gsi_one_before_end_p (tgsi
))
5899 gimple_seq l
= NULL
;
5900 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
5902 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
5903 gimple_omp_section_set_last (sec_start
);
5906 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
5907 GSI_CONTINUE_LINKING
);
5910 block
= make_node (BLOCK
);
5911 bind
= gimple_build_bind (NULL
, new_body
, block
);
5914 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
5916 block
= make_node (BLOCK
);
5917 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
5918 gsi_replace (gsi_p
, new_stmt
, true);
5920 pop_gimplify_context (new_stmt
);
5921 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
5922 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
5923 if (BLOCK_VARS (block
))
5924 TREE_USED (block
) = 1;
5927 gimple_seq_add_seq (&new_body
, ilist
);
5928 gimple_seq_add_stmt (&new_body
, stmt
);
5929 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
5930 gimple_seq_add_stmt (&new_body
, bind
);
5932 control
= create_tmp_var (unsigned_type_node
, ".section");
5933 t
= gimple_build_omp_continue (control
, control
);
5934 gimple_omp_sections_set_control (stmt
, control
);
5935 gimple_seq_add_stmt (&new_body
, t
);
5937 gimple_seq_add_seq (&new_body
, olist
);
5938 if (ctx
->cancellable
)
5939 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
5940 gimple_seq_add_seq (&new_body
, dlist
);
5942 new_body
= maybe_catch_exception (new_body
);
5944 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
5945 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
5946 t
= gimple_build_omp_return (nowait
);
5947 gimple_seq_add_stmt (&new_body
, t
);
5948 maybe_add_implicit_barrier_cancel (ctx
, &new_body
);
5950 gimple_bind_set_body (new_stmt
, new_body
);
5954 /* A subroutine of lower_omp_single. Expand the simple form of
5955 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5957 if (GOMP_single_start ())
5959 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5961 FIXME. It may be better to delay expanding the logic of this until
5962 pass_expand_omp. The expanded logic may make the job more difficult
5963 to a synchronization analysis pass. */
5966 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
5968 location_t loc
= gimple_location (single_stmt
);
5969 tree tlabel
= create_artificial_label (loc
);
5970 tree flabel
= create_artificial_label (loc
);
5971 gimple
*call
, *cond
;
5974 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
5975 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
5976 call
= gimple_build_call (decl
, 0);
5977 gimple_call_set_lhs (call
, lhs
);
5978 gimple_seq_add_stmt (pre_p
, call
);
5980 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
5981 fold_convert_loc (loc
, TREE_TYPE (lhs
),
5984 gimple_seq_add_stmt (pre_p
, cond
);
5985 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
5986 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
5987 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
5991 /* A subroutine of lower_omp_single. Expand the simple form of
5992 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5994 #pragma omp single copyprivate (a, b, c)
5996 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5999 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6005 GOMP_single_copy_end (©out);
6016 FIXME. It may be better to delay expanding the logic of this until
6017 pass_expand_omp. The expanded logic may make the job more difficult
6018 to a synchronization analysis pass. */
6021 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
6024 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
6025 gimple_seq copyin_seq
;
6026 location_t loc
= gimple_location (single_stmt
);
6028 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
6030 ptr_type
= build_pointer_type (ctx
->record_type
);
6031 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
6033 l0
= create_artificial_label (loc
);
6034 l1
= create_artificial_label (loc
);
6035 l2
= create_artificial_label (loc
);
6037 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
6038 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
6039 t
= fold_convert_loc (loc
, ptr_type
, t
);
6040 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
6042 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
6043 build_int_cst (ptr_type
, 0));
6044 t
= build3 (COND_EXPR
, void_type_node
, t
,
6045 build_and_jump (&l0
), build_and_jump (&l1
));
6046 gimplify_and_add (t
, pre_p
);
6048 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
6050 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6053 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
6056 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
6057 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
6058 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
6059 gimplify_and_add (t
, pre_p
);
6061 t
= build_and_jump (&l2
);
6062 gimplify_and_add (t
, pre_p
);
6064 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
6066 gimple_seq_add_seq (pre_p
, copyin_seq
);
6068 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
6072 /* Expand code for an OpenMP single directive. */
6075 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6078 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
6080 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6082 push_gimplify_context ();
6084 block
= make_node (BLOCK
);
6085 bind
= gimple_build_bind (NULL
, NULL
, block
);
6086 gsi_replace (gsi_p
, bind
, true);
6089 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6090 &bind_body
, &dlist
, ctx
, NULL
);
6091 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6093 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6095 if (ctx
->record_type
)
6096 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6098 lower_omp_single_simple (single_stmt
, &bind_body
);
6100 gimple_omp_set_body (single_stmt
, NULL
);
6102 gimple_seq_add_seq (&bind_body
, dlist
);
6104 bind_body
= maybe_catch_exception (bind_body
);
6106 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6107 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6108 gimple
*g
= gimple_build_omp_return (nowait
);
6109 gimple_seq_add_stmt (&bind_body_tail
, g
);
6110 maybe_add_implicit_barrier_cancel (ctx
, &bind_body_tail
);
6111 if (ctx
->record_type
)
6113 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6114 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6115 TREE_THIS_VOLATILE (clobber
) = 1;
6116 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6117 clobber
), GSI_SAME_STMT
);
6119 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6120 gimple_bind_set_body (bind
, bind_body
);
6122 pop_gimplify_context (bind
);
6124 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6125 BLOCK_VARS (block
) = ctx
->block_vars
;
6126 if (BLOCK_VARS (block
))
6127 TREE_USED (block
) = 1;
6131 /* Expand code for an OpenMP master directive. */
6134 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6136 tree block
, lab
= NULL
, x
, bfn_decl
;
6137 gimple
*stmt
= gsi_stmt (*gsi_p
);
6139 location_t loc
= gimple_location (stmt
);
6142 push_gimplify_context ();
6144 block
= make_node (BLOCK
);
6145 bind
= gimple_build_bind (NULL
, NULL
, block
);
6146 gsi_replace (gsi_p
, bind
, true);
6147 gimple_bind_add_stmt (bind
, stmt
);
6149 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6150 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6151 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6152 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6154 gimplify_and_add (x
, &tseq
);
6155 gimple_bind_add_seq (bind
, tseq
);
6157 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6158 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6159 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6160 gimple_omp_set_body (stmt
, NULL
);
6162 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6164 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6166 pop_gimplify_context (bind
);
6168 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6169 BLOCK_VARS (block
) = ctx
->block_vars
;
6173 /* Expand code for an OpenMP taskgroup directive. */
6176 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6178 gimple
*stmt
= gsi_stmt (*gsi_p
);
6181 tree block
= make_node (BLOCK
);
6183 bind
= gimple_build_bind (NULL
, NULL
, block
);
6184 gsi_replace (gsi_p
, bind
, true);
6185 gimple_bind_add_stmt (bind
, stmt
);
6187 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
6189 gimple_bind_add_stmt (bind
, x
);
6191 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6192 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6193 gimple_omp_set_body (stmt
, NULL
);
6195 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6197 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6198 BLOCK_VARS (block
) = ctx
->block_vars
;
6202 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6205 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
6208 struct omp_for_data fd
;
6209 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
6212 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
6213 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
6214 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
6218 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6219 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
6220 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
6221 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
6223 /* Merge depend clauses from multiple adjacent
6224 #pragma omp ordered depend(sink:...) constructs
6225 into one #pragma omp ordered depend(sink:...), so that
6226 we can optimize them together. */
6227 gimple_stmt_iterator gsi
= *gsi_p
;
6229 while (!gsi_end_p (gsi
))
6231 gimple
*stmt
= gsi_stmt (gsi
);
6232 if (is_gimple_debug (stmt
)
6233 || gimple_code (stmt
) == GIMPLE_NOP
)
6238 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
6240 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
6241 c
= gimple_omp_ordered_clauses (ord_stmt2
);
6243 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
6244 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6247 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
6249 gsi_remove (&gsi
, true);
6253 /* Canonicalize sink dependence clauses into one folded clause if
6256 The basic algorithm is to create a sink vector whose first
6257 element is the GCD of all the first elements, and whose remaining
6258 elements are the minimum of the subsequent columns.
6260 We ignore dependence vectors whose first element is zero because
6261 such dependencies are known to be executed by the same thread.
6263 We take into account the direction of the loop, so a minimum
6264 becomes a maximum if the loop is iterating forwards. We also
6265 ignore sink clauses where the loop direction is unknown, or where
6266 the offsets are clearly invalid because they are not a multiple
6267 of the loop increment.
6271 #pragma omp for ordered(2)
6272 for (i=0; i < N; ++i)
6273 for (j=0; j < M; ++j)
6275 #pragma omp ordered \
6276 depend(sink:i-8,j-2) \
6277 depend(sink:i,j-1) \ // Completely ignored because i+0.
6278 depend(sink:i-4,j-3) \
6279 depend(sink:i-6,j-4)
6280 #pragma omp ordered depend(source)
6285 depend(sink:-gcd(8,4,6),-min(2,3,4))
6290 /* FIXME: Computing GCD's where the first element is zero is
6291 non-trivial in the presence of collapsed loops. Do this later. */
6292 if (fd
.collapse
> 1)
6295 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
6296 memset (folded_deps
, 0, sizeof (*folded_deps
) * (2 * len
- 1));
6297 tree folded_dep
= NULL_TREE
;
6298 /* TRUE if the first dimension's offset is negative. */
6299 bool neg_offset_p
= false;
6301 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
6303 while ((c
= *list_p
) != NULL
)
6305 bool remove
= false;
6307 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
6308 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
6309 goto next_ordered_clause
;
6312 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
6313 vec
&& TREE_CODE (vec
) == TREE_LIST
;
6314 vec
= TREE_CHAIN (vec
), ++i
)
6316 gcc_assert (i
< len
);
6318 /* omp_extract_for_data has canonicalized the condition. */
6319 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
6320 || fd
.loops
[i
].cond_code
== GT_EXPR
);
6321 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
6322 bool maybe_lexically_later
= true;
6324 /* While the committee makes up its mind, bail if we have any
6325 non-constant steps. */
6326 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
6327 goto lower_omp_ordered_ret
;
6329 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
6330 if (POINTER_TYPE_P (itype
))
6332 wide_int offset
= wide_int::from (TREE_PURPOSE (vec
),
6333 TYPE_PRECISION (itype
),
6336 /* Ignore invalid offsets that are not multiples of the step. */
6337 if (!wi::multiple_of_p
6338 (wi::abs (offset
), wi::abs ((wide_int
) fd
.loops
[i
].step
),
6341 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
6342 "ignoring sink clause with offset that is not "
6343 "a multiple of the loop step");
6345 goto next_ordered_clause
;
6348 /* Calculate the first dimension. The first dimension of
6349 the folded dependency vector is the GCD of the first
6350 elements, while ignoring any first elements whose offset
6354 /* Ignore dependence vectors whose first dimension is 0. */
6358 goto next_ordered_clause
;
6362 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
6364 error_at (OMP_CLAUSE_LOCATION (c
),
6365 "first offset must be in opposite direction "
6366 "of loop iterations");
6367 goto lower_omp_ordered_ret
;
6371 neg_offset_p
= forward
;
6372 /* Initialize the first time around. */
6373 if (folded_dep
== NULL_TREE
)
6376 folded_deps
[0] = offset
;
6379 folded_deps
[0] = wi::gcd (folded_deps
[0],
6383 /* Calculate minimum for the remaining dimensions. */
6386 folded_deps
[len
+ i
- 1] = offset
;
6387 if (folded_dep
== c
)
6388 folded_deps
[i
] = offset
;
6389 else if (maybe_lexically_later
6390 && !wi::eq_p (folded_deps
[i
], offset
))
6392 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
6396 for (j
= 1; j
<= i
; j
++)
6397 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
6400 maybe_lexically_later
= false;
6404 gcc_assert (i
== len
);
6408 next_ordered_clause
:
6410 *list_p
= OMP_CLAUSE_CHAIN (c
);
6412 list_p
= &OMP_CLAUSE_CHAIN (c
);
6418 folded_deps
[0] = -folded_deps
[0];
6420 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
6421 if (POINTER_TYPE_P (itype
))
6424 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
6425 = wide_int_to_tree (itype
, folded_deps
[0]);
6426 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
6427 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
6430 lower_omp_ordered_ret
:
6432 /* Ordered without clauses is #pragma omp threads, while we want
6433 a nop instead if we remove all clauses. */
6434 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
6435 gsi_replace (gsi_p
, gimple_build_nop (), true);
6439 /* Expand code for an OpenMP ordered directive. */
6442 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6445 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
6446 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
6449 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6451 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6454 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
6455 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6456 OMP_CLAUSE_THREADS
);
6458 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
6461 /* FIXME: This is needs to be moved to the expansion to verify various
6462 conditions only testable on cfg with dominators computed, and also
6463 all the depend clauses to be merged still might need to be available
6464 for the runtime checks. */
6466 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
6470 push_gimplify_context ();
6472 block
= make_node (BLOCK
);
6473 bind
= gimple_build_bind (NULL
, NULL
, block
);
6474 gsi_replace (gsi_p
, bind
, true);
6475 gimple_bind_add_stmt (bind
, stmt
);
6479 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
6480 build_int_cst (NULL_TREE
, threads
));
6481 cfun
->has_simduid_loops
= true;
6484 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
6486 gimple_bind_add_stmt (bind
, x
);
6488 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
6491 counter
= create_tmp_var (integer_type_node
);
6492 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
6493 gimple_call_set_lhs (g
, counter
);
6494 gimple_bind_add_stmt (bind
, g
);
6496 body
= create_artificial_label (UNKNOWN_LOCATION
);
6497 test
= create_artificial_label (UNKNOWN_LOCATION
);
6498 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
6500 tree simt_pred
= create_tmp_var (integer_type_node
);
6501 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
6502 gimple_call_set_lhs (g
, simt_pred
);
6503 gimple_bind_add_stmt (bind
, g
);
6505 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
6506 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
6507 gimple_bind_add_stmt (bind
, g
);
6509 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
6511 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6512 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6513 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6514 gimple_omp_set_body (stmt
, NULL
);
6518 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
6519 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
6520 gimple_bind_add_stmt (bind
, g
);
6522 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
6523 tree nonneg
= create_tmp_var (integer_type_node
);
6524 gimple_seq tseq
= NULL
;
6525 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
6526 gimple_bind_add_seq (bind
, tseq
);
6528 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
6529 gimple_call_set_lhs (g
, nonneg
);
6530 gimple_bind_add_stmt (bind
, g
);
6532 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6533 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
6534 gimple_bind_add_stmt (bind
, g
);
6536 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
6539 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
6540 build_int_cst (NULL_TREE
, threads
));
6542 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
6544 gimple_bind_add_stmt (bind
, x
);
6546 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6548 pop_gimplify_context (bind
);
6550 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6551 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6555 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6556 substitution of a couple of function calls. But in the NAMED case,
6557 requires that languages coordinate a symbol name. It is therefore
6558 best put here in common code. */
6560 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
6563 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6566 tree name
, lock
, unlock
;
6567 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
6569 location_t loc
= gimple_location (stmt
);
6572 name
= gimple_omp_critical_name (stmt
);
6577 if (!critical_name_mutexes
)
6578 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
6580 tree
*n
= critical_name_mutexes
->get (name
);
6585 decl
= create_tmp_var_raw (ptr_type_node
);
6587 new_str
= ACONCAT ((".gomp_critical_user_",
6588 IDENTIFIER_POINTER (name
), NULL
));
6589 DECL_NAME (decl
) = get_identifier (new_str
);
6590 TREE_PUBLIC (decl
) = 1;
6591 TREE_STATIC (decl
) = 1;
6592 DECL_COMMON (decl
) = 1;
6593 DECL_ARTIFICIAL (decl
) = 1;
6594 DECL_IGNORED_P (decl
) = 1;
6596 varpool_node::finalize_decl (decl
);
6598 critical_name_mutexes
->put (name
, decl
);
6603 /* If '#pragma omp critical' is inside offloaded region or
6604 inside function marked as offloadable, the symbol must be
6605 marked as offloadable too. */
6607 if (cgraph_node::get (current_function_decl
)->offloadable
)
6608 varpool_node::get_create (decl
)->offloadable
= 1;
6610 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
6611 if (is_gimple_omp_offloaded (octx
->stmt
))
6613 varpool_node::get_create (decl
)->offloadable
= 1;
6617 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
6618 lock
= build_call_expr_loc (loc
, lock
, 1,
6619 build_fold_addr_expr_loc (loc
, decl
));
6621 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
6622 unlock
= build_call_expr_loc (loc
, unlock
, 1,
6623 build_fold_addr_expr_loc (loc
, decl
));
6627 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
6628 lock
= build_call_expr_loc (loc
, lock
, 0);
6630 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
6631 unlock
= build_call_expr_loc (loc
, unlock
, 0);
6634 push_gimplify_context ();
6636 block
= make_node (BLOCK
);
6637 bind
= gimple_build_bind (NULL
, NULL
, block
);
6638 gsi_replace (gsi_p
, bind
, true);
6639 gimple_bind_add_stmt (bind
, stmt
);
6641 tbody
= gimple_bind_body (bind
);
6642 gimplify_and_add (lock
, &tbody
);
6643 gimple_bind_set_body (bind
, tbody
);
6645 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6646 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6647 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6648 gimple_omp_set_body (stmt
, NULL
);
6650 tbody
= gimple_bind_body (bind
);
6651 gimplify_and_add (unlock
, &tbody
);
6652 gimple_bind_set_body (bind
, tbody
);
6654 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6656 pop_gimplify_context (bind
);
6657 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6658 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6661 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6662 for a lastprivate clause. Given a loop control predicate of (V
6663 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6664 is appended to *DLIST, iterator initialization is appended to
6668 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
6669 gimple_seq
*dlist
, struct omp_context
*ctx
)
6671 tree clauses
, cond
, vinit
;
6672 enum tree_code cond_code
;
6675 cond_code
= fd
->loop
.cond_code
;
6676 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
6678 /* When possible, use a strict equality expression. This can let VRP
6679 type optimizations deduce the value and remove a copy. */
6680 if (tree_fits_shwi_p (fd
->loop
.step
))
6682 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
6683 if (step
== 1 || step
== -1)
6684 cond_code
= EQ_EXPR
;
6687 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
6688 || gimple_omp_for_grid_phony (fd
->for_stmt
))
6689 cond
= omp_grid_lastprivate_predicate (fd
);
6692 tree n2
= fd
->loop
.n2
;
6693 if (fd
->collapse
> 1
6694 && TREE_CODE (n2
) != INTEGER_CST
6695 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
6697 struct omp_context
*taskreg_ctx
= NULL
;
6698 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
6700 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
6701 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
6702 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
6704 if (gimple_omp_for_combined_into_p (gfor
))
6706 gcc_assert (ctx
->outer
->outer
6707 && is_parallel_ctx (ctx
->outer
->outer
));
6708 taskreg_ctx
= ctx
->outer
->outer
;
6712 struct omp_for_data outer_fd
;
6713 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
6714 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
6717 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
6718 taskreg_ctx
= ctx
->outer
->outer
;
6720 else if (is_taskreg_ctx (ctx
->outer
))
6721 taskreg_ctx
= ctx
->outer
;
6725 tree taskreg_clauses
6726 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
6727 tree innerc
= omp_find_clause (taskreg_clauses
,
6728 OMP_CLAUSE__LOOPTEMP_
);
6729 gcc_assert (innerc
);
6730 for (i
= 0; i
< fd
->collapse
; i
++)
6732 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6733 OMP_CLAUSE__LOOPTEMP_
);
6734 gcc_assert (innerc
);
6736 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
6737 OMP_CLAUSE__LOOPTEMP_
);
6739 n2
= fold_convert (TREE_TYPE (n2
),
6740 lookup_decl (OMP_CLAUSE_DECL (innerc
),
6744 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
6747 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
6749 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
6750 if (!gimple_seq_empty_p (stmts
))
6752 gimple_seq_add_seq (&stmts
, *dlist
);
6755 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6756 vinit
= fd
->loop
.n1
;
6757 if (cond_code
== EQ_EXPR
6758 && tree_fits_shwi_p (fd
->loop
.n2
)
6759 && ! integer_zerop (fd
->loop
.n2
))
6760 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
6762 vinit
= unshare_expr (vinit
);
6764 /* Initialize the iterator variable, so that threads that don't execute
6765 any iterations don't execute the lastprivate clauses by accident. */
6766 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
6771 /* Lower code for an OMP loop directive. */
6774 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6777 struct omp_for_data fd
, *fdp
= NULL
;
6778 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
6780 gimple_seq omp_for_body
, body
, dlist
;
6781 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
6784 push_gimplify_context ();
6786 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
6788 block
= make_node (BLOCK
);
6789 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6790 /* Replace at gsi right away, so that 'stmt' is no member
6791 of a sequence anymore as we're going to add to a different
6793 gsi_replace (gsi_p
, new_stmt
, true);
6795 /* Move declaration of temporaries in the loop body before we make
6797 omp_for_body
= gimple_omp_body (stmt
);
6798 if (!gimple_seq_empty_p (omp_for_body
)
6799 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
6802 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
6803 tree vars
= gimple_bind_vars (inner_bind
);
6804 gimple_bind_append_vars (new_stmt
, vars
);
6805 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6806 keep them on the inner_bind and it's block. */
6807 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
6808 if (gimple_bind_block (inner_bind
))
6809 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
6812 if (gimple_omp_for_combined_into_p (stmt
))
6814 omp_extract_for_data (stmt
, &fd
, NULL
);
6817 /* We need two temporaries with fd.loop.v type (istart/iend)
6818 and then (fd.collapse - 1) temporaries with the same
6819 type for count2 ... countN-1 vars if not constant. */
6821 tree type
= fd
.iter_type
;
6823 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
6824 count
+= fd
.collapse
- 1;
6826 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
6827 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
6828 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
6833 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
6834 OMP_CLAUSE__LOOPTEMP_
);
6836 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
6837 OMP_CLAUSE__LOOPTEMP_
);
6838 for (i
= 0; i
< count
; i
++)
6843 gcc_assert (outerc
);
6844 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
6845 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
6846 OMP_CLAUSE__LOOPTEMP_
);
6850 /* If there are 2 adjacent SIMD stmts, one with _simt_
6851 clause, another without, make sure they have the same
6852 decls in _looptemp_ clauses, because the outer stmt
6853 they are combined into will look up just one inner_stmt. */
6855 temp
= OMP_CLAUSE_DECL (simtc
);
6857 temp
= create_tmp_var (type
);
6858 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
6860 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
6861 OMP_CLAUSE_DECL (*pc
) = temp
;
6862 pc
= &OMP_CLAUSE_CHAIN (*pc
);
6864 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
6865 OMP_CLAUSE__LOOPTEMP_
);
6870 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6873 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
6875 gimple_seq_add_seq (&body
, gimple_omp_for_pre_body (stmt
));
6877 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6879 /* Lower the header expressions. At this point, we can assume that
6880 the header is of the form:
6882 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6884 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6885 using the .omp_data_s mapping, if needed. */
6886 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
6888 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
6889 if (!is_gimple_min_invariant (*rhs_p
))
6890 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6892 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
6893 if (!is_gimple_min_invariant (*rhs_p
))
6894 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6896 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
6897 if (!is_gimple_min_invariant (*rhs_p
))
6898 *rhs_p
= get_formal_tmp_var (*rhs_p
, &body
);
6901 /* Once lowered, extract the bounds and clauses. */
6902 omp_extract_for_data (stmt
, &fd
, NULL
);
6904 if (is_gimple_omp_oacc (ctx
->stmt
)
6905 && !ctx_in_oacc_kernels_region (ctx
))
6906 lower_oacc_head_tail (gimple_location (stmt
),
6907 gimple_omp_for_clauses (stmt
),
6908 &oacc_head
, &oacc_tail
, ctx
);
6910 /* Add OpenACC partitioning and reduction markers just before the loop. */
6912 gimple_seq_add_seq (&body
, oacc_head
);
6914 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
6916 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
6917 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
6918 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6919 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6921 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6922 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
6923 OMP_CLAUSE_LINEAR_STEP (c
)
6924 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
6928 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
6929 && gimple_omp_for_grid_phony (stmt
));
6931 gimple_seq_add_stmt (&body
, stmt
);
6932 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
6935 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
6938 /* After the loop, add exit clauses. */
6939 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
6941 if (ctx
->cancellable
)
6942 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
6944 gimple_seq_add_seq (&body
, dlist
);
6946 body
= maybe_catch_exception (body
);
6950 /* Region exit marker goes at the end of the loop body. */
6951 gimple_seq_add_stmt (&body
, gimple_build_omp_return (fd
.have_nowait
));
6952 maybe_add_implicit_barrier_cancel (ctx
, &body
);
6955 /* Add OpenACC joining and reduction markers just after the loop. */
6957 gimple_seq_add_seq (&body
, oacc_tail
);
6959 pop_gimplify_context (new_stmt
);
6961 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
6962 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
6963 if (BLOCK_VARS (block
))
6964 TREE_USED (block
) = 1;
6966 gimple_bind_set_body (new_stmt
, body
);
6967 gimple_omp_set_body (stmt
, NULL
);
6968 gimple_omp_for_set_pre_body (stmt
, NULL
);
6971 /* Callback for walk_stmts. Check if the current statement only contains
6972 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6975 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
6976 bool *handled_ops_p
,
6977 struct walk_stmt_info
*wi
)
6979 int *info
= (int *) wi
->info
;
6980 gimple
*stmt
= gsi_stmt (*gsi_p
);
6982 *handled_ops_p
= true;
6983 switch (gimple_code (stmt
))
6987 case GIMPLE_OMP_FOR
:
6988 case GIMPLE_OMP_SECTIONS
:
6989 *info
= *info
== 0 ? 1 : -1;
6998 struct omp_taskcopy_context
7000 /* This field must be at the beginning, as we do "inheritance": Some
7001 callback functions for tree-inline.c (e.g., omp_copy_decl)
7002 receive a copy_body_data pointer that is up-casted to an
7003 omp_context pointer. */
7009 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
7011 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
7013 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
7014 return create_tmp_var (TREE_TYPE (var
));
7020 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
7022 tree name
, new_fields
= NULL
, type
, f
;
7024 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7025 name
= DECL_NAME (TYPE_NAME (orig_type
));
7026 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
7027 TYPE_DECL
, name
, type
);
7028 TYPE_NAME (type
) = name
;
7030 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
7032 tree new_f
= copy_node (f
);
7033 DECL_CONTEXT (new_f
) = type
;
7034 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
7035 TREE_CHAIN (new_f
) = new_fields
;
7036 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7037 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
7038 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
7041 tcctx
->cb
.decl_map
->put (f
, new_f
);
7043 TYPE_FIELDS (type
) = nreverse (new_fields
);
7048 /* Create task copyfn. */
7051 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
7053 struct function
*child_cfun
;
7054 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
7055 tree record_type
, srecord_type
, bind
, list
;
7056 bool record_needs_remap
= false, srecord_needs_remap
= false;
7058 struct omp_taskcopy_context tcctx
;
7059 location_t loc
= gimple_location (task_stmt
);
7061 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
7062 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
7063 gcc_assert (child_cfun
->cfg
== NULL
);
7064 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
7066 /* Reset DECL_CONTEXT on function arguments. */
7067 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
7068 DECL_CONTEXT (t
) = child_fn
;
7070 /* Populate the function. */
7071 push_gimplify_context ();
7072 push_cfun (child_cfun
);
7074 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
7075 TREE_SIDE_EFFECTS (bind
) = 1;
7077 DECL_SAVED_TREE (child_fn
) = bind
;
7078 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
7080 /* Remap src and dst argument types if needed. */
7081 record_type
= ctx
->record_type
;
7082 srecord_type
= ctx
->srecord_type
;
7083 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7084 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7086 record_needs_remap
= true;
7089 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
7090 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
7092 srecord_needs_remap
= true;
7096 if (record_needs_remap
|| srecord_needs_remap
)
7098 memset (&tcctx
, '\0', sizeof (tcctx
));
7099 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
7100 tcctx
.cb
.dst_fn
= child_fn
;
7101 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
7102 gcc_checking_assert (tcctx
.cb
.src_node
);
7103 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
7104 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
7105 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
7106 tcctx
.cb
.eh_lp_nr
= 0;
7107 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
7108 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
7111 if (record_needs_remap
)
7112 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
7113 if (srecord_needs_remap
)
7114 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
7117 tcctx
.cb
.decl_map
= NULL
;
7119 arg
= DECL_ARGUMENTS (child_fn
);
7120 TREE_TYPE (arg
) = build_pointer_type (record_type
);
7121 sarg
= DECL_CHAIN (arg
);
7122 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
7124 /* First pass: initialize temporaries used in record_type and srecord_type
7125 sizes and field offsets. */
7126 if (tcctx
.cb
.decl_map
)
7127 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7128 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7132 decl
= OMP_CLAUSE_DECL (c
);
7133 p
= tcctx
.cb
.decl_map
->get (decl
);
7136 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7137 sf
= (tree
) n
->value
;
7138 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7139 src
= build_simple_mem_ref_loc (loc
, sarg
);
7140 src
= omp_build_component_ref (src
, sf
);
7141 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
7142 append_to_statement_list (t
, &list
);
7145 /* Second pass: copy shared var pointers and copy construct non-VLA
7146 firstprivate vars. */
7147 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7148 switch (OMP_CLAUSE_CODE (c
))
7151 case OMP_CLAUSE_SHARED
:
7152 decl
= OMP_CLAUSE_DECL (c
);
7153 key
= (splay_tree_key
) decl
;
7154 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7155 key
= (splay_tree_key
) &DECL_UID (decl
);
7156 n
= splay_tree_lookup (ctx
->field_map
, key
);
7159 f
= (tree
) n
->value
;
7160 if (tcctx
.cb
.decl_map
)
7161 f
= *tcctx
.cb
.decl_map
->get (f
);
7162 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
7163 sf
= (tree
) n
->value
;
7164 if (tcctx
.cb
.decl_map
)
7165 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7166 src
= build_simple_mem_ref_loc (loc
, sarg
);
7167 src
= omp_build_component_ref (src
, sf
);
7168 dst
= build_simple_mem_ref_loc (loc
, arg
);
7169 dst
= omp_build_component_ref (dst
, f
);
7170 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7171 append_to_statement_list (t
, &list
);
7173 case OMP_CLAUSE_FIRSTPRIVATE
:
7174 decl
= OMP_CLAUSE_DECL (c
);
7175 if (is_variable_sized (decl
))
7177 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7180 f
= (tree
) n
->value
;
7181 if (tcctx
.cb
.decl_map
)
7182 f
= *tcctx
.cb
.decl_map
->get (f
);
7183 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7186 sf
= (tree
) n
->value
;
7187 if (tcctx
.cb
.decl_map
)
7188 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7189 src
= build_simple_mem_ref_loc (loc
, sarg
);
7190 src
= omp_build_component_ref (src
, sf
);
7191 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
7192 src
= build_simple_mem_ref_loc (loc
, src
);
7196 dst
= build_simple_mem_ref_loc (loc
, arg
);
7197 dst
= omp_build_component_ref (dst
, f
);
7198 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7199 append_to_statement_list (t
, &list
);
7201 case OMP_CLAUSE_PRIVATE
:
7202 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7204 decl
= OMP_CLAUSE_DECL (c
);
7205 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7206 f
= (tree
) n
->value
;
7207 if (tcctx
.cb
.decl_map
)
7208 f
= *tcctx
.cb
.decl_map
->get (f
);
7209 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
7212 sf
= (tree
) n
->value
;
7213 if (tcctx
.cb
.decl_map
)
7214 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7215 src
= build_simple_mem_ref_loc (loc
, sarg
);
7216 src
= omp_build_component_ref (src
, sf
);
7217 if (use_pointer_for_field (decl
, NULL
))
7218 src
= build_simple_mem_ref_loc (loc
, src
);
7222 dst
= build_simple_mem_ref_loc (loc
, arg
);
7223 dst
= omp_build_component_ref (dst
, f
);
7224 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
7225 append_to_statement_list (t
, &list
);
7231 /* Last pass: handle VLA firstprivates. */
7232 if (tcctx
.cb
.decl_map
)
7233 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7234 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7238 decl
= OMP_CLAUSE_DECL (c
);
7239 if (!is_variable_sized (decl
))
7241 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
7244 f
= (tree
) n
->value
;
7245 f
= *tcctx
.cb
.decl_map
->get (f
);
7246 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
7247 ind
= DECL_VALUE_EXPR (decl
);
7248 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
7249 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
7250 n
= splay_tree_lookup (ctx
->sfield_map
,
7251 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7252 sf
= (tree
) n
->value
;
7253 sf
= *tcctx
.cb
.decl_map
->get (sf
);
7254 src
= build_simple_mem_ref_loc (loc
, sarg
);
7255 src
= omp_build_component_ref (src
, sf
);
7256 src
= build_simple_mem_ref_loc (loc
, src
);
7257 dst
= build_simple_mem_ref_loc (loc
, arg
);
7258 dst
= omp_build_component_ref (dst
, f
);
7259 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
7260 append_to_statement_list (t
, &list
);
7261 n
= splay_tree_lookup (ctx
->field_map
,
7262 (splay_tree_key
) TREE_OPERAND (ind
, 0));
7263 df
= (tree
) n
->value
;
7264 df
= *tcctx
.cb
.decl_map
->get (df
);
7265 ptr
= build_simple_mem_ref_loc (loc
, arg
);
7266 ptr
= omp_build_component_ref (ptr
, df
);
7267 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
7268 build_fold_addr_expr_loc (loc
, dst
));
7269 append_to_statement_list (t
, &list
);
7272 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
7273 append_to_statement_list (t
, &list
);
7275 if (tcctx
.cb
.decl_map
)
7276 delete tcctx
.cb
.decl_map
;
7277 pop_gimplify_context (NULL
);
7278 BIND_EXPR_BODY (bind
) = list
;
7283 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
7287 size_t n_in
= 0, n_out
= 0, idx
= 2, i
;
7289 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
7290 gcc_assert (clauses
);
7291 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7292 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7293 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7295 case OMP_CLAUSE_DEPEND_IN
:
7298 case OMP_CLAUSE_DEPEND_OUT
:
7299 case OMP_CLAUSE_DEPEND_INOUT
:
7302 case OMP_CLAUSE_DEPEND_SOURCE
:
7303 case OMP_CLAUSE_DEPEND_SINK
:
7308 tree type
= build_array_type_nelts (ptr_type_node
, n_in
+ n_out
+ 2);
7309 tree array
= create_tmp_var (type
);
7310 TREE_ADDRESSABLE (array
) = 1;
7311 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7313 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_in
+ n_out
));
7314 gimple_seq_add_stmt (iseq
, g
);
7315 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7317 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, n_out
));
7318 gimple_seq_add_stmt (iseq
, g
);
7319 for (i
= 0; i
< 2; i
++)
7321 if ((i
? n_in
: n_out
) == 0)
7323 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7324 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7325 && ((OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_IN
) ^ i
))
7327 tree t
= OMP_CLAUSE_DECL (c
);
7328 t
= fold_convert (ptr_type_node
, t
);
7329 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
7330 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
7331 NULL_TREE
, NULL_TREE
);
7332 g
= gimple_build_assign (r
, t
);
7333 gimple_seq_add_stmt (iseq
, g
);
7336 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
7337 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
7338 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
7340 tree clobber
= build_constructor (type
, NULL
);
7341 TREE_THIS_VOLATILE (clobber
) = 1;
7342 g
= gimple_build_assign (array
, clobber
);
7343 gimple_seq_add_stmt (oseq
, g
);
7346 /* Lower the OpenMP parallel or task directive in the current statement
7347 in GSI_P. CTX holds context information for the directive. */
7350 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7354 gimple
*stmt
= gsi_stmt (*gsi_p
);
7355 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
7356 gimple_seq par_body
, olist
, ilist
, par_olist
, par_rlist
, par_ilist
, new_body
;
7357 location_t loc
= gimple_location (stmt
);
7359 clauses
= gimple_omp_taskreg_clauses (stmt
);
7361 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
7362 par_body
= gimple_bind_body (par_bind
);
7363 child_fn
= ctx
->cb
.dst_fn
;
7364 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7365 && !gimple_omp_parallel_combined_p (stmt
))
7367 struct walk_stmt_info wi
;
7370 memset (&wi
, 0, sizeof (wi
));
7373 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
7375 gimple_omp_parallel_set_combined_p (stmt
, true);
7377 gimple_seq dep_ilist
= NULL
;
7378 gimple_seq dep_olist
= NULL
;
7379 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
7380 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7382 push_gimplify_context ();
7383 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7384 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
7385 &dep_ilist
, &dep_olist
);
7388 if (ctx
->srecord_type
)
7389 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
7391 push_gimplify_context ();
7396 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
7397 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
7398 if (phony_construct
&& ctx
->record_type
)
7400 gcc_checking_assert (!ctx
->receiver_decl
);
7401 ctx
->receiver_decl
= create_tmp_var
7402 (build_reference_type (ctx
->record_type
), ".omp_rec");
7404 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
7405 lower_omp (&par_body
, ctx
);
7406 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
7407 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
7409 /* Declare all the variables created by mapping and the variables
7410 declared in the scope of the parallel body. */
7411 record_vars_into (ctx
->block_vars
, child_fn
);
7412 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
7414 if (ctx
->record_type
)
7417 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
7418 : ctx
->record_type
, ".omp_data_o");
7419 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7420 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7421 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
7426 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
7427 lower_send_shared_vars (&ilist
, &olist
, ctx
);
7429 if (ctx
->record_type
)
7431 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
7432 TREE_THIS_VOLATILE (clobber
) = 1;
7433 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
7437 /* Once all the expansions are done, sequence all the different
7438 fragments inside gimple_omp_body. */
7442 if (ctx
->record_type
)
7444 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7445 /* fixup_child_record_type might have changed receiver_decl's type. */
7446 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
7447 gimple_seq_add_stmt (&new_body
,
7448 gimple_build_assign (ctx
->receiver_decl
, t
));
7451 gimple_seq_add_seq (&new_body
, par_ilist
);
7452 gimple_seq_add_seq (&new_body
, par_body
);
7453 gimple_seq_add_seq (&new_body
, par_rlist
);
7454 if (ctx
->cancellable
)
7455 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7456 gimple_seq_add_seq (&new_body
, par_olist
);
7457 new_body
= maybe_catch_exception (new_body
);
7458 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
7459 gimple_seq_add_stmt (&new_body
,
7460 gimple_build_omp_continue (integer_zero_node
,
7461 integer_zero_node
));
7462 if (!phony_construct
)
7464 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
7465 gimple_omp_set_body (stmt
, new_body
);
7468 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
7469 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
7470 gimple_bind_add_seq (bind
, ilist
);
7471 if (!phony_construct
)
7472 gimple_bind_add_stmt (bind
, stmt
);
7474 gimple_bind_add_seq (bind
, new_body
);
7475 gimple_bind_add_seq (bind
, olist
);
7477 pop_gimplify_context (NULL
);
7481 gimple_bind_add_seq (dep_bind
, dep_ilist
);
7482 gimple_bind_add_stmt (dep_bind
, bind
);
7483 gimple_bind_add_seq (dep_bind
, dep_olist
);
7484 pop_gimplify_context (dep_bind
);
7488 /* Lower the GIMPLE_OMP_TARGET in the current statement
7489 in GSI_P. CTX holds context information for the directive. */
7492 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7495 tree child_fn
, t
, c
;
7496 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
7497 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
7498 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
7499 location_t loc
= gimple_location (stmt
);
7500 bool offloaded
, data_region
;
7501 unsigned int map_cnt
= 0;
7503 offloaded
= is_gimple_omp_offloaded (stmt
);
7504 switch (gimple_omp_target_kind (stmt
))
7506 case GF_OMP_TARGET_KIND_REGION
:
7507 case GF_OMP_TARGET_KIND_UPDATE
:
7508 case GF_OMP_TARGET_KIND_ENTER_DATA
:
7509 case GF_OMP_TARGET_KIND_EXIT_DATA
:
7510 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
7511 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
7512 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
7513 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
7514 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
7515 data_region
= false;
7517 case GF_OMP_TARGET_KIND_DATA
:
7518 case GF_OMP_TARGET_KIND_OACC_DATA
:
7519 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
7526 clauses
= gimple_omp_target_clauses (stmt
);
7528 gimple_seq dep_ilist
= NULL
;
7529 gimple_seq dep_olist
= NULL
;
7530 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
7532 push_gimplify_context ();
7533 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
7534 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
7535 &dep_ilist
, &dep_olist
);
7542 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
7543 tgt_body
= gimple_bind_body (tgt_bind
);
7545 else if (data_region
)
7546 tgt_body
= gimple_omp_body (stmt
);
7547 child_fn
= ctx
->cb
.dst_fn
;
7549 push_gimplify_context ();
7552 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7553 switch (OMP_CLAUSE_CODE (c
))
7559 case OMP_CLAUSE_MAP
:
7561 /* First check what we're prepared to handle in the following. */
7562 switch (OMP_CLAUSE_MAP_KIND (c
))
7564 case GOMP_MAP_ALLOC
:
7567 case GOMP_MAP_TOFROM
:
7568 case GOMP_MAP_POINTER
:
7569 case GOMP_MAP_TO_PSET
:
7570 case GOMP_MAP_DELETE
:
7571 case GOMP_MAP_RELEASE
:
7572 case GOMP_MAP_ALWAYS_TO
:
7573 case GOMP_MAP_ALWAYS_FROM
:
7574 case GOMP_MAP_ALWAYS_TOFROM
:
7575 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
7576 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
7577 case GOMP_MAP_STRUCT
:
7578 case GOMP_MAP_ALWAYS_POINTER
:
7580 case GOMP_MAP_FORCE_ALLOC
:
7581 case GOMP_MAP_FORCE_TO
:
7582 case GOMP_MAP_FORCE_FROM
:
7583 case GOMP_MAP_FORCE_TOFROM
:
7584 case GOMP_MAP_FORCE_PRESENT
:
7585 case GOMP_MAP_FORCE_DEVICEPTR
:
7586 case GOMP_MAP_DEVICE_RESIDENT
:
7588 gcc_assert (is_gimple_omp_oacc (stmt
));
7596 case OMP_CLAUSE_FROM
:
7598 var
= OMP_CLAUSE_DECL (c
);
7601 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
7602 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7603 && (OMP_CLAUSE_MAP_KIND (c
)
7604 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
7610 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
7612 tree var2
= DECL_VALUE_EXPR (var
);
7613 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
7614 var2
= TREE_OPERAND (var2
, 0);
7615 gcc_assert (DECL_P (var2
));
7620 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7621 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7622 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7624 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7626 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
7627 && varpool_node::get_create (var
)->offloadable
)
7630 tree type
= build_pointer_type (TREE_TYPE (var
));
7631 tree new_var
= lookup_decl (var
, ctx
);
7632 x
= create_tmp_var_raw (type
, get_name (new_var
));
7633 gimple_add_tmp_var (x
);
7634 x
= build_simple_mem_ref (x
);
7635 SET_DECL_VALUE_EXPR (new_var
, x
);
7636 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7641 if (!maybe_lookup_field (var
, ctx
))
7644 /* Don't remap oacc parallel reduction variables, because the
7645 intermediate result must be local to each gang. */
7646 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7647 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
7649 x
= build_receiver_ref (var
, true, ctx
);
7650 tree new_var
= lookup_decl (var
, ctx
);
7652 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7653 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7654 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7655 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7656 x
= build_simple_mem_ref (x
);
7657 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7659 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7660 if (omp_is_reference (new_var
))
7662 /* Create a local object to hold the instance
7664 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
7665 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
7666 tree inst
= create_tmp_var (type
, id
);
7667 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
7668 x
= build_fold_addr_expr (inst
);
7670 gimplify_assign (new_var
, x
, &fplist
);
7672 else if (DECL_P (new_var
))
7674 SET_DECL_VALUE_EXPR (new_var
, x
);
7675 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7683 case OMP_CLAUSE_FIRSTPRIVATE
:
7684 if (is_oacc_parallel (ctx
))
7685 goto oacc_firstprivate
;
7687 var
= OMP_CLAUSE_DECL (c
);
7688 if (!omp_is_reference (var
)
7689 && !is_gimple_reg_type (TREE_TYPE (var
)))
7691 tree new_var
= lookup_decl (var
, ctx
);
7692 if (is_variable_sized (var
))
7694 tree pvar
= DECL_VALUE_EXPR (var
);
7695 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7696 pvar
= TREE_OPERAND (pvar
, 0);
7697 gcc_assert (DECL_P (pvar
));
7698 tree new_pvar
= lookup_decl (pvar
, ctx
);
7699 x
= build_fold_indirect_ref (new_pvar
);
7700 TREE_THIS_NOTRAP (x
) = 1;
7703 x
= build_receiver_ref (var
, true, ctx
);
7704 SET_DECL_VALUE_EXPR (new_var
, x
);
7705 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7709 case OMP_CLAUSE_PRIVATE
:
7710 if (is_gimple_omp_oacc (ctx
->stmt
))
7712 var
= OMP_CLAUSE_DECL (c
);
7713 if (is_variable_sized (var
))
7715 tree new_var
= lookup_decl (var
, ctx
);
7716 tree pvar
= DECL_VALUE_EXPR (var
);
7717 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7718 pvar
= TREE_OPERAND (pvar
, 0);
7719 gcc_assert (DECL_P (pvar
));
7720 tree new_pvar
= lookup_decl (pvar
, ctx
);
7721 x
= build_fold_indirect_ref (new_pvar
);
7722 TREE_THIS_NOTRAP (x
) = 1;
7723 SET_DECL_VALUE_EXPR (new_var
, x
);
7724 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7728 case OMP_CLAUSE_USE_DEVICE_PTR
:
7729 case OMP_CLAUSE_IS_DEVICE_PTR
:
7730 var
= OMP_CLAUSE_DECL (c
);
7732 if (is_variable_sized (var
))
7734 tree new_var
= lookup_decl (var
, ctx
);
7735 tree pvar
= DECL_VALUE_EXPR (var
);
7736 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
7737 pvar
= TREE_OPERAND (pvar
, 0);
7738 gcc_assert (DECL_P (pvar
));
7739 tree new_pvar
= lookup_decl (pvar
, ctx
);
7740 x
= build_fold_indirect_ref (new_pvar
);
7741 TREE_THIS_NOTRAP (x
) = 1;
7742 SET_DECL_VALUE_EXPR (new_var
, x
);
7743 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7745 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
7747 tree new_var
= lookup_decl (var
, ctx
);
7748 tree type
= build_pointer_type (TREE_TYPE (var
));
7749 x
= create_tmp_var_raw (type
, get_name (new_var
));
7750 gimple_add_tmp_var (x
);
7751 x
= build_simple_mem_ref (x
);
7752 SET_DECL_VALUE_EXPR (new_var
, x
);
7753 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7757 tree new_var
= lookup_decl (var
, ctx
);
7758 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
7759 gimple_add_tmp_var (x
);
7760 SET_DECL_VALUE_EXPR (new_var
, x
);
7761 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
7768 target_nesting_level
++;
7769 lower_omp (&tgt_body
, ctx
);
7770 target_nesting_level
--;
7772 else if (data_region
)
7773 lower_omp (&tgt_body
, ctx
);
7777 /* Declare all the variables created by mapping and the variables
7778 declared in the scope of the target body. */
7779 record_vars_into (ctx
->block_vars
, child_fn
);
7780 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
7785 if (ctx
->record_type
)
7788 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
7789 DECL_NAMELESS (ctx
->sender_decl
) = 1;
7790 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
7791 t
= make_tree_vec (3);
7792 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
7794 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
7796 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
7797 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
7798 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
7799 tree tkind_type
= short_unsigned_type_node
;
7800 int talign_shift
= 8;
7802 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
7804 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
7805 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
7806 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
7807 gimple_omp_target_set_data_arg (stmt
, t
);
7809 vec
<constructor_elt
, va_gc
> *vsize
;
7810 vec
<constructor_elt
, va_gc
> *vkind
;
7811 vec_alloc (vsize
, map_cnt
);
7812 vec_alloc (vkind
, map_cnt
);
7813 unsigned int map_idx
= 0;
7815 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7816 switch (OMP_CLAUSE_CODE (c
))
7818 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
7819 unsigned int talign
;
7824 case OMP_CLAUSE_MAP
:
7826 case OMP_CLAUSE_FROM
:
7827 oacc_firstprivate_map
:
7829 ovar
= OMP_CLAUSE_DECL (c
);
7830 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7831 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7832 || (OMP_CLAUSE_MAP_KIND (c
)
7833 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
7837 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7838 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
7840 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
7841 == get_base_address (ovar
));
7842 nc
= OMP_CLAUSE_CHAIN (c
);
7843 ovar
= OMP_CLAUSE_DECL (nc
);
7847 tree x
= build_sender_ref (ovar
, ctx
);
7849 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
7850 gimplify_assign (x
, v
, &ilist
);
7856 if (DECL_SIZE (ovar
)
7857 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
7859 tree ovar2
= DECL_VALUE_EXPR (ovar
);
7860 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
7861 ovar2
= TREE_OPERAND (ovar2
, 0);
7862 gcc_assert (DECL_P (ovar2
));
7865 if (!maybe_lookup_field (ovar
, ctx
))
7869 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
7870 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
7871 talign
= DECL_ALIGN_UNIT (ovar
);
7874 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7875 x
= build_sender_ref (ovar
, ctx
);
7877 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
7878 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
7879 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
7880 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
7882 gcc_assert (offloaded
);
7884 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
7885 mark_addressable (avar
);
7886 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
7887 talign
= DECL_ALIGN_UNIT (avar
);
7888 avar
= build_fold_addr_expr (avar
);
7889 gimplify_assign (x
, avar
, &ilist
);
7891 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7893 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
7894 if (!omp_is_reference (var
))
7896 if (is_gimple_reg (var
)
7897 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
7898 TREE_NO_WARNING (var
) = 1;
7899 var
= build_fold_addr_expr (var
);
7902 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
7903 gimplify_assign (x
, var
, &ilist
);
7905 else if (is_gimple_reg (var
))
7907 gcc_assert (offloaded
);
7908 tree avar
= create_tmp_var (TREE_TYPE (var
));
7909 mark_addressable (avar
);
7910 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
7911 if (GOMP_MAP_COPY_TO_P (map_kind
)
7912 || map_kind
== GOMP_MAP_POINTER
7913 || map_kind
== GOMP_MAP_TO_PSET
7914 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7916 /* If we need to initialize a temporary
7917 with VAR because it is not addressable, and
7918 the variable hasn't been initialized yet, then
7919 we'll get a warning for the store to avar.
7920 Don't warn in that case, the mapping might
7922 TREE_NO_WARNING (var
) = 1;
7923 gimplify_assign (avar
, var
, &ilist
);
7925 avar
= build_fold_addr_expr (avar
);
7926 gimplify_assign (x
, avar
, &ilist
);
7927 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
7928 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
7929 && !TYPE_READONLY (TREE_TYPE (var
)))
7931 x
= unshare_expr (x
);
7932 x
= build_simple_mem_ref (x
);
7933 gimplify_assign (var
, x
, &olist
);
7938 var
= build_fold_addr_expr (var
);
7939 gimplify_assign (x
, var
, &ilist
);
7943 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
7945 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7946 s
= TREE_TYPE (ovar
);
7947 if (TREE_CODE (s
) == REFERENCE_TYPE
)
7949 s
= TYPE_SIZE_UNIT (s
);
7952 s
= OMP_CLAUSE_SIZE (c
);
7954 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
7955 s
= fold_convert (size_type_node
, s
);
7956 purpose
= size_int (map_idx
++);
7957 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
7958 if (TREE_CODE (s
) != INTEGER_CST
)
7959 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
7961 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
7962 switch (OMP_CLAUSE_CODE (c
))
7964 case OMP_CLAUSE_MAP
:
7965 tkind
= OMP_CLAUSE_MAP_KIND (c
);
7967 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
7970 case GOMP_MAP_ALLOC
:
7973 case GOMP_MAP_TOFROM
:
7974 case GOMP_MAP_ALWAYS_TO
:
7975 case GOMP_MAP_ALWAYS_FROM
:
7976 case GOMP_MAP_ALWAYS_TOFROM
:
7977 case GOMP_MAP_RELEASE
:
7978 case GOMP_MAP_FORCE_TO
:
7979 case GOMP_MAP_FORCE_FROM
:
7980 case GOMP_MAP_FORCE_TOFROM
:
7981 case GOMP_MAP_FORCE_PRESENT
:
7982 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
7984 case GOMP_MAP_DELETE
:
7985 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
7989 if (tkind_zero
!= tkind
)
7991 if (integer_zerop (s
))
7993 else if (integer_nonzerop (s
))
7997 case OMP_CLAUSE_FIRSTPRIVATE
:
7998 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
7999 tkind
= GOMP_MAP_TO
;
8003 tkind
= GOMP_MAP_TO
;
8006 case OMP_CLAUSE_FROM
:
8007 tkind
= GOMP_MAP_FROM
;
8013 gcc_checking_assert (tkind
8014 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8015 gcc_checking_assert (tkind_zero
8016 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8017 talign
= ceil_log2 (talign
);
8018 tkind
|= talign
<< talign_shift
;
8019 tkind_zero
|= talign
<< talign_shift
;
8020 gcc_checking_assert (tkind
8021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8022 gcc_checking_assert (tkind_zero
8023 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8024 if (tkind
== tkind_zero
)
8025 x
= build_int_cstu (tkind_type
, tkind
);
8028 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
8029 x
= build3 (COND_EXPR
, tkind_type
,
8030 fold_build2 (EQ_EXPR
, boolean_type_node
,
8031 unshare_expr (s
), size_zero_node
),
8032 build_int_cstu (tkind_type
, tkind_zero
),
8033 build_int_cstu (tkind_type
, tkind
));
8035 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
8040 case OMP_CLAUSE_FIRSTPRIVATE
:
8041 if (is_oacc_parallel (ctx
))
8042 goto oacc_firstprivate_map
;
8043 ovar
= OMP_CLAUSE_DECL (c
);
8044 if (omp_is_reference (ovar
))
8045 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8047 talign
= DECL_ALIGN_UNIT (ovar
);
8048 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8049 x
= build_sender_ref (ovar
, ctx
);
8050 tkind
= GOMP_MAP_FIRSTPRIVATE
;
8051 type
= TREE_TYPE (ovar
);
8052 if (omp_is_reference (ovar
))
8053 type
= TREE_TYPE (type
);
8054 if ((INTEGRAL_TYPE_P (type
)
8055 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8056 || TREE_CODE (type
) == POINTER_TYPE
)
8058 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8060 if (omp_is_reference (var
))
8061 t
= build_simple_mem_ref (var
);
8062 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8063 TREE_NO_WARNING (var
) = 1;
8064 if (TREE_CODE (type
) != POINTER_TYPE
)
8065 t
= fold_convert (pointer_sized_int_node
, t
);
8066 t
= fold_convert (TREE_TYPE (x
), t
);
8067 gimplify_assign (x
, t
, &ilist
);
8069 else if (omp_is_reference (var
))
8070 gimplify_assign (x
, var
, &ilist
);
8071 else if (is_gimple_reg (var
))
8073 tree avar
= create_tmp_var (TREE_TYPE (var
));
8074 mark_addressable (avar
);
8075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8076 TREE_NO_WARNING (var
) = 1;
8077 gimplify_assign (avar
, var
, &ilist
);
8078 avar
= build_fold_addr_expr (avar
);
8079 gimplify_assign (x
, avar
, &ilist
);
8083 var
= build_fold_addr_expr (var
);
8084 gimplify_assign (x
, var
, &ilist
);
8086 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
8088 else if (omp_is_reference (ovar
))
8089 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
8091 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
8092 s
= fold_convert (size_type_node
, s
);
8093 purpose
= size_int (map_idx
++);
8094 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8095 if (TREE_CODE (s
) != INTEGER_CST
)
8096 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
8098 gcc_checking_assert (tkind
8099 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8100 talign
= ceil_log2 (talign
);
8101 tkind
|= talign
<< talign_shift
;
8102 gcc_checking_assert (tkind
8103 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8104 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8105 build_int_cstu (tkind_type
, tkind
));
8108 case OMP_CLAUSE_USE_DEVICE_PTR
:
8109 case OMP_CLAUSE_IS_DEVICE_PTR
:
8110 ovar
= OMP_CLAUSE_DECL (c
);
8111 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8112 x
= build_sender_ref (ovar
, ctx
);
8113 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8114 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
8116 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
8117 type
= TREE_TYPE (ovar
);
8118 if (TREE_CODE (type
) == ARRAY_TYPE
)
8119 var
= build_fold_addr_expr (var
);
8122 if (omp_is_reference (ovar
))
8124 type
= TREE_TYPE (type
);
8125 if (TREE_CODE (type
) != ARRAY_TYPE
)
8126 var
= build_simple_mem_ref (var
);
8127 var
= fold_convert (TREE_TYPE (x
), var
);
8130 gimplify_assign (x
, var
, &ilist
);
8132 purpose
= size_int (map_idx
++);
8133 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
8134 gcc_checking_assert (tkind
8135 < (HOST_WIDE_INT_C (1U) << talign_shift
));
8136 gcc_checking_assert (tkind
8137 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
8138 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
8139 build_int_cstu (tkind_type
, tkind
));
8143 gcc_assert (map_idx
== map_cnt
);
8145 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
8146 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
8147 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
8148 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
8149 for (int i
= 1; i
<= 2; i
++)
8150 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
8152 gimple_seq initlist
= NULL
;
8153 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
8154 TREE_VEC_ELT (t
, i
)),
8155 &initlist
, true, NULL_TREE
);
8156 gimple_seq_add_seq (&ilist
, initlist
);
8158 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
8160 TREE_THIS_VOLATILE (clobber
) = 1;
8161 gimple_seq_add_stmt (&olist
,
8162 gimple_build_assign (TREE_VEC_ELT (t
, i
),
8166 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
8167 TREE_THIS_VOLATILE (clobber
) = 1;
8168 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
8172 /* Once all the expansions are done, sequence all the different
8173 fragments inside gimple_omp_body. */
8178 && ctx
->record_type
)
8180 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8181 /* fixup_child_record_type might have changed receiver_decl's type. */
8182 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
8183 gimple_seq_add_stmt (&new_body
,
8184 gimple_build_assign (ctx
->receiver_decl
, t
));
8186 gimple_seq_add_seq (&new_body
, fplist
);
8188 if (offloaded
|| data_region
)
8190 tree prev
= NULL_TREE
;
8191 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8192 switch (OMP_CLAUSE_CODE (c
))
8197 case OMP_CLAUSE_FIRSTPRIVATE
:
8198 if (is_gimple_omp_oacc (ctx
->stmt
))
8200 var
= OMP_CLAUSE_DECL (c
);
8201 if (omp_is_reference (var
)
8202 || is_gimple_reg_type (TREE_TYPE (var
)))
8204 tree new_var
= lookup_decl (var
, ctx
);
8206 type
= TREE_TYPE (var
);
8207 if (omp_is_reference (var
))
8208 type
= TREE_TYPE (type
);
8209 if ((INTEGRAL_TYPE_P (type
)
8210 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
8211 || TREE_CODE (type
) == POINTER_TYPE
)
8213 x
= build_receiver_ref (var
, false, ctx
);
8214 if (TREE_CODE (type
) != POINTER_TYPE
)
8215 x
= fold_convert (pointer_sized_int_node
, x
);
8216 x
= fold_convert (type
, x
);
8217 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8219 if (omp_is_reference (var
))
8221 tree v
= create_tmp_var_raw (type
, get_name (var
));
8222 gimple_add_tmp_var (v
);
8223 TREE_ADDRESSABLE (v
) = 1;
8224 gimple_seq_add_stmt (&new_body
,
8225 gimple_build_assign (v
, x
));
8226 x
= build_fold_addr_expr (v
);
8228 gimple_seq_add_stmt (&new_body
,
8229 gimple_build_assign (new_var
, x
));
8233 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
8234 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8236 gimple_seq_add_stmt (&new_body
,
8237 gimple_build_assign (new_var
, x
));
8240 else if (is_variable_sized (var
))
8242 tree pvar
= DECL_VALUE_EXPR (var
);
8243 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8244 pvar
= TREE_OPERAND (pvar
, 0);
8245 gcc_assert (DECL_P (pvar
));
8246 tree new_var
= lookup_decl (pvar
, ctx
);
8247 x
= build_receiver_ref (var
, false, ctx
);
8248 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8249 gimple_seq_add_stmt (&new_body
,
8250 gimple_build_assign (new_var
, x
));
8253 case OMP_CLAUSE_PRIVATE
:
8254 if (is_gimple_omp_oacc (ctx
->stmt
))
8256 var
= OMP_CLAUSE_DECL (c
);
8257 if (omp_is_reference (var
))
8259 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8260 tree new_var
= lookup_decl (var
, ctx
);
8261 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8262 if (TREE_CONSTANT (x
))
8264 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
8266 gimple_add_tmp_var (x
);
8267 TREE_ADDRESSABLE (x
) = 1;
8268 x
= build_fold_addr_expr_loc (clause_loc
, x
);
8273 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8274 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8275 gimple_seq_add_stmt (&new_body
,
8276 gimple_build_assign (new_var
, x
));
8279 case OMP_CLAUSE_USE_DEVICE_PTR
:
8280 case OMP_CLAUSE_IS_DEVICE_PTR
:
8281 var
= OMP_CLAUSE_DECL (c
);
8282 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
8283 x
= build_sender_ref (var
, ctx
);
8285 x
= build_receiver_ref (var
, false, ctx
);
8286 if (is_variable_sized (var
))
8288 tree pvar
= DECL_VALUE_EXPR (var
);
8289 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8290 pvar
= TREE_OPERAND (pvar
, 0);
8291 gcc_assert (DECL_P (pvar
));
8292 tree new_var
= lookup_decl (pvar
, ctx
);
8293 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8294 gimple_seq_add_stmt (&new_body
,
8295 gimple_build_assign (new_var
, x
));
8297 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
8299 tree new_var
= lookup_decl (var
, ctx
);
8300 new_var
= DECL_VALUE_EXPR (new_var
);
8301 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
8302 new_var
= TREE_OPERAND (new_var
, 0);
8303 gcc_assert (DECL_P (new_var
));
8304 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8305 gimple_seq_add_stmt (&new_body
,
8306 gimple_build_assign (new_var
, x
));
8310 tree type
= TREE_TYPE (var
);
8311 tree new_var
= lookup_decl (var
, ctx
);
8312 if (omp_is_reference (var
))
8314 type
= TREE_TYPE (type
);
8315 if (TREE_CODE (type
) != ARRAY_TYPE
)
8317 tree v
= create_tmp_var_raw (type
, get_name (var
));
8318 gimple_add_tmp_var (v
);
8319 TREE_ADDRESSABLE (v
) = 1;
8320 x
= fold_convert (type
, x
);
8321 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
8323 gimple_seq_add_stmt (&new_body
,
8324 gimple_build_assign (v
, x
));
8325 x
= build_fold_addr_expr (v
);
8328 new_var
= DECL_VALUE_EXPR (new_var
);
8329 x
= fold_convert (TREE_TYPE (new_var
), x
);
8330 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8331 gimple_seq_add_stmt (&new_body
,
8332 gimple_build_assign (new_var
, x
));
8336 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8337 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8338 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8339 or references to VLAs. */
8340 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8341 switch (OMP_CLAUSE_CODE (c
))
8346 case OMP_CLAUSE_MAP
:
8347 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8348 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8350 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8351 HOST_WIDE_INT offset
= 0;
8353 var
= OMP_CLAUSE_DECL (c
);
8355 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
8356 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
8358 && varpool_node::get_create (var
)->offloadable
)
8360 if (TREE_CODE (var
) == INDIRECT_REF
8361 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
8362 var
= TREE_OPERAND (var
, 0);
8363 if (TREE_CODE (var
) == COMPONENT_REF
)
8365 var
= get_addr_base_and_unit_offset (var
, &offset
);
8366 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
8368 else if (DECL_SIZE (var
)
8369 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
8371 tree var2
= DECL_VALUE_EXPR (var
);
8372 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
8373 var2
= TREE_OPERAND (var2
, 0);
8374 gcc_assert (DECL_P (var2
));
8377 tree new_var
= lookup_decl (var
, ctx
), x
;
8378 tree type
= TREE_TYPE (new_var
);
8380 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
8381 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8384 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
8386 new_var
= build2 (MEM_REF
, type
,
8387 build_fold_addr_expr (new_var
),
8388 build_int_cst (build_pointer_type (type
),
8391 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
8393 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
8394 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
8395 new_var
= build2 (MEM_REF
, type
,
8396 build_fold_addr_expr (new_var
),
8397 build_int_cst (build_pointer_type (type
),
8401 is_ref
= omp_is_reference (var
);
8402 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8404 bool ref_to_array
= false;
8407 type
= TREE_TYPE (type
);
8408 if (TREE_CODE (type
) == ARRAY_TYPE
)
8410 type
= build_pointer_type (type
);
8411 ref_to_array
= true;
8414 else if (TREE_CODE (type
) == ARRAY_TYPE
)
8416 tree decl2
= DECL_VALUE_EXPR (new_var
);
8417 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
8418 decl2
= TREE_OPERAND (decl2
, 0);
8419 gcc_assert (DECL_P (decl2
));
8421 type
= TREE_TYPE (new_var
);
8423 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
8424 x
= fold_convert_loc (clause_loc
, type
, x
);
8425 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
8427 tree bias
= OMP_CLAUSE_SIZE (c
);
8429 bias
= lookup_decl (bias
, ctx
);
8430 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
8431 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
8433 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
8434 TREE_TYPE (x
), x
, bias
);
8437 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8438 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8439 if (is_ref
&& !ref_to_array
)
8441 tree t
= create_tmp_var_raw (type
, get_name (var
));
8442 gimple_add_tmp_var (t
);
8443 TREE_ADDRESSABLE (t
) = 1;
8444 gimple_seq_add_stmt (&new_body
,
8445 gimple_build_assign (t
, x
));
8446 x
= build_fold_addr_expr_loc (clause_loc
, t
);
8448 gimple_seq_add_stmt (&new_body
,
8449 gimple_build_assign (new_var
, x
));
8452 else if (OMP_CLAUSE_CHAIN (c
)
8453 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
8455 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8456 == GOMP_MAP_FIRSTPRIVATE_POINTER
8457 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8458 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
8461 case OMP_CLAUSE_PRIVATE
:
8462 var
= OMP_CLAUSE_DECL (c
);
8463 if (is_variable_sized (var
))
8465 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8466 tree new_var
= lookup_decl (var
, ctx
);
8467 tree pvar
= DECL_VALUE_EXPR (var
);
8468 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
8469 pvar
= TREE_OPERAND (pvar
, 0);
8470 gcc_assert (DECL_P (pvar
));
8471 tree new_pvar
= lookup_decl (pvar
, ctx
);
8472 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8473 tree al
= size_int (DECL_ALIGN (var
));
8474 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
8475 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8476 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
8477 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8478 gimple_seq_add_stmt (&new_body
,
8479 gimple_build_assign (new_pvar
, x
));
8481 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
8483 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8484 tree new_var
= lookup_decl (var
, ctx
);
8485 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
8486 if (TREE_CONSTANT (x
))
8491 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
8492 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
8493 tree al
= size_int (TYPE_ALIGN (rtype
));
8494 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
8497 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
8498 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
8499 gimple_seq_add_stmt (&new_body
,
8500 gimple_build_assign (new_var
, x
));
8505 gimple_seq fork_seq
= NULL
;
8506 gimple_seq join_seq
= NULL
;
8508 if (is_oacc_parallel (ctx
))
8510 /* If there are reductions on the offloaded region itself, treat
8511 them as a dummy GANG loop. */
8512 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
8514 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
8515 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
8518 gimple_seq_add_seq (&new_body
, fork_seq
);
8519 gimple_seq_add_seq (&new_body
, tgt_body
);
8520 gimple_seq_add_seq (&new_body
, join_seq
);
8523 new_body
= maybe_catch_exception (new_body
);
8525 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
8526 gimple_omp_set_body (stmt
, new_body
);
8529 bind
= gimple_build_bind (NULL
, NULL
,
8530 tgt_bind
? gimple_bind_block (tgt_bind
)
8532 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
8533 gimple_bind_add_seq (bind
, ilist
);
8534 gimple_bind_add_stmt (bind
, stmt
);
8535 gimple_bind_add_seq (bind
, olist
);
8537 pop_gimplify_context (NULL
);
8541 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8542 gimple_bind_add_stmt (dep_bind
, bind
);
8543 gimple_bind_add_seq (dep_bind
, dep_olist
);
8544 pop_gimplify_context (dep_bind
);
8548 /* Expand code for an OpenMP teams directive. */
8551 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8553 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
8554 push_gimplify_context ();
8556 tree block
= make_node (BLOCK
);
8557 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
8558 gsi_replace (gsi_p
, bind
, true);
8559 gimple_seq bind_body
= NULL
;
8560 gimple_seq dlist
= NULL
;
8561 gimple_seq olist
= NULL
;
8563 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8564 OMP_CLAUSE_NUM_TEAMS
);
8565 if (num_teams
== NULL_TREE
)
8566 num_teams
= build_int_cst (unsigned_type_node
, 0);
8569 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
8570 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
8571 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
8573 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
8574 OMP_CLAUSE_THREAD_LIMIT
);
8575 if (thread_limit
== NULL_TREE
)
8576 thread_limit
= build_int_cst (unsigned_type_node
, 0);
8579 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
8580 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
8581 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
8585 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
8586 &bind_body
, &dlist
, ctx
, NULL
);
8587 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
8588 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
8589 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8591 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
8592 location_t loc
= gimple_location (teams_stmt
);
8593 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
8594 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
8595 gimple_set_location (call
, loc
);
8596 gimple_seq_add_stmt (&bind_body
, call
);
8599 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
8600 gimple_omp_set_body (teams_stmt
, NULL
);
8601 gimple_seq_add_seq (&bind_body
, olist
);
8602 gimple_seq_add_seq (&bind_body
, dlist
);
8603 if (!gimple_omp_teams_grid_phony (teams_stmt
))
8604 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
8605 gimple_bind_set_body (bind
, bind_body
);
8607 pop_gimplify_context (bind
);
8609 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8610 BLOCK_VARS (block
) = ctx
->block_vars
;
8611 if (BLOCK_VARS (block
))
8612 TREE_USED (block
) = 1;
8615 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8618 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8620 gimple
*stmt
= gsi_stmt (*gsi_p
);
8621 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8622 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
8623 gimple_build_omp_return (false));
8627 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8628 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8629 of OMP context, but with task_shared_vars set. */
8632 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
8637 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8638 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
8641 if (task_shared_vars
8643 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
8646 /* If a global variable has been privatized, TREE_CONSTANT on
8647 ADDR_EXPR might be wrong. */
8648 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
8649 recompute_tree_invariant_for_addr_expr (t
);
8651 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
8655 /* Data to be communicated between lower_omp_regimplify_operands and
8656 lower_omp_regimplify_operands_p. */
8658 struct lower_omp_regimplify_operands_data
8664 /* Helper function for lower_omp_regimplify_operands. Find
8665 omp_member_access_dummy_var vars and adjust temporarily their
8666 DECL_VALUE_EXPRs if needed. */
8669 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
8672 tree t
= omp_member_access_dummy_var (*tp
);
8675 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8676 lower_omp_regimplify_operands_data
*ldata
8677 = (lower_omp_regimplify_operands_data
*) wi
->info
;
8678 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
8681 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
8682 ldata
->decls
->safe_push (*tp
);
8683 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
8684 SET_DECL_VALUE_EXPR (*tp
, v
);
8687 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
8691 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8692 of omp_member_access_dummy_var vars during regimplification. */
8695 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
8696 gimple_stmt_iterator
*gsi_p
)
8698 auto_vec
<tree
, 10> decls
;
8701 struct walk_stmt_info wi
;
8702 memset (&wi
, '\0', sizeof (wi
));
8703 struct lower_omp_regimplify_operands_data data
;
8705 data
.decls
= &decls
;
8707 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
8709 gimple_regimplify_operands (stmt
, gsi_p
);
8710 while (!decls
.is_empty ())
8712 tree t
= decls
.pop ();
8713 tree v
= decls
.pop ();
8714 SET_DECL_VALUE_EXPR (t
, v
);
8719 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8721 gimple
*stmt
= gsi_stmt (*gsi_p
);
8722 struct walk_stmt_info wi
;
8725 if (gimple_has_location (stmt
))
8726 input_location
= gimple_location (stmt
);
8728 if (task_shared_vars
)
8729 memset (&wi
, '\0', sizeof (wi
));
8731 /* If we have issued syntax errors, avoid doing any heavy lifting.
8732 Just replace the OMP directives with a NOP to avoid
8733 confusing RTL expansion. */
8734 if (seen_error () && is_gimple_omp (stmt
))
8736 gsi_replace (gsi_p
, gimple_build_nop (), true);
8740 switch (gimple_code (stmt
))
8744 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
8745 if ((ctx
|| task_shared_vars
)
8746 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
8747 lower_omp_regimplify_p
,
8748 ctx
? NULL
: &wi
, NULL
)
8749 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
8750 lower_omp_regimplify_p
,
8751 ctx
? NULL
: &wi
, NULL
)))
8752 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
8756 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
8758 case GIMPLE_EH_FILTER
:
8759 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
8762 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
8763 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
8765 case GIMPLE_TRANSACTION
:
8766 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
8770 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
8772 case GIMPLE_OMP_PARALLEL
:
8773 case GIMPLE_OMP_TASK
:
8774 ctx
= maybe_lookup_ctx (stmt
);
8776 if (ctx
->cancellable
)
8777 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8778 lower_omp_taskreg (gsi_p
, ctx
);
8780 case GIMPLE_OMP_FOR
:
8781 ctx
= maybe_lookup_ctx (stmt
);
8783 if (ctx
->cancellable
)
8784 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8785 lower_omp_for (gsi_p
, ctx
);
8787 case GIMPLE_OMP_SECTIONS
:
8788 ctx
= maybe_lookup_ctx (stmt
);
8790 if (ctx
->cancellable
)
8791 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
8792 lower_omp_sections (gsi_p
, ctx
);
8794 case GIMPLE_OMP_SINGLE
:
8795 ctx
= maybe_lookup_ctx (stmt
);
8797 lower_omp_single (gsi_p
, ctx
);
8799 case GIMPLE_OMP_MASTER
:
8800 ctx
= maybe_lookup_ctx (stmt
);
8802 lower_omp_master (gsi_p
, ctx
);
8804 case GIMPLE_OMP_TASKGROUP
:
8805 ctx
= maybe_lookup_ctx (stmt
);
8807 lower_omp_taskgroup (gsi_p
, ctx
);
8809 case GIMPLE_OMP_ORDERED
:
8810 ctx
= maybe_lookup_ctx (stmt
);
8812 lower_omp_ordered (gsi_p
, ctx
);
8814 case GIMPLE_OMP_CRITICAL
:
8815 ctx
= maybe_lookup_ctx (stmt
);
8817 lower_omp_critical (gsi_p
, ctx
);
8819 case GIMPLE_OMP_ATOMIC_LOAD
:
8820 if ((ctx
|| task_shared_vars
)
8821 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8822 as_a
<gomp_atomic_load
*> (stmt
)),
8823 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
8824 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8826 case GIMPLE_OMP_TARGET
:
8827 ctx
= maybe_lookup_ctx (stmt
);
8829 lower_omp_target (gsi_p
, ctx
);
8831 case GIMPLE_OMP_TEAMS
:
8832 ctx
= maybe_lookup_ctx (stmt
);
8834 lower_omp_teams (gsi_p
, ctx
);
8836 case GIMPLE_OMP_GRID_BODY
:
8837 ctx
= maybe_lookup_ctx (stmt
);
8839 lower_omp_grid_body (gsi_p
, ctx
);
8843 call_stmt
= as_a
<gcall
*> (stmt
);
8844 fndecl
= gimple_call_fndecl (call_stmt
);
8846 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8847 switch (DECL_FUNCTION_CODE (fndecl
))
8849 case BUILT_IN_GOMP_BARRIER
:
8853 case BUILT_IN_GOMP_CANCEL
:
8854 case BUILT_IN_GOMP_CANCELLATION_POINT
:
8857 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
8859 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
8860 if (!cctx
->cancellable
)
8862 if (DECL_FUNCTION_CODE (fndecl
)
8863 == BUILT_IN_GOMP_CANCELLATION_POINT
)
8865 stmt
= gimple_build_nop ();
8866 gsi_replace (gsi_p
, stmt
, false);
8870 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
8872 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
8873 gimple_call_set_fndecl (call_stmt
, fndecl
);
8874 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
8877 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
8878 gimple_call_set_lhs (call_stmt
, lhs
);
8879 tree fallthru_label
;
8880 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8882 g
= gimple_build_label (fallthru_label
);
8883 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8884 g
= gimple_build_cond (NE_EXPR
, lhs
,
8885 fold_convert (TREE_TYPE (lhs
),
8886 boolean_false_node
),
8887 cctx
->cancel_label
, fallthru_label
);
8888 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
8895 if ((ctx
|| task_shared_vars
)
8896 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
8899 /* Just remove clobbers, this should happen only if we have
8900 "privatized" local addressable variables in SIMD regions,
8901 the clobber isn't needed in that case and gimplifying address
8902 of the ARRAY_REF into a pointer and creating MEM_REF based
8903 clobber would create worse code than we get with the clobber
8905 if (gimple_clobber_p (stmt
))
8907 gsi_replace (gsi_p
, gimple_build_nop (), true);
8910 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
8917 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
8919 location_t saved_location
= input_location
;
8920 gimple_stmt_iterator gsi
;
8921 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8922 lower_omp_1 (&gsi
, ctx
);
8923 /* During gimplification, we haven't folded statments inside offloading
8924 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8925 if (target_nesting_level
|| taskreg_nesting_level
)
8926 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8928 input_location
= saved_location
;
8931 /* Main entry point. */
8934 execute_lower_omp (void)
8940 /* This pass always runs, to provide PROP_gimple_lomp.
8941 But often, there is nothing to do. */
8942 if (flag_cilkplus
== 0 && flag_openacc
== 0 && flag_openmp
== 0
8943 && flag_openmp_simd
== 0)
8946 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
8947 delete_omp_context
);
8949 body
= gimple_body (current_function_decl
);
8951 if (hsa_gen_requested_p ())
8952 omp_grid_gridify_all_targets (&body
);
8954 scan_omp (&body
, NULL
);
8955 gcc_assert (taskreg_nesting_level
== 0);
8956 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
8957 finish_taskreg_scan (ctx
);
8958 taskreg_contexts
.release ();
8960 if (all_contexts
->root
)
8962 if (task_shared_vars
)
8963 push_gimplify_context ();
8964 lower_omp (&body
, NULL
);
8965 if (task_shared_vars
)
8966 pop_gimplify_context (NULL
);
8971 splay_tree_delete (all_contexts
);
8972 all_contexts
= NULL
;
8974 BITMAP_FREE (task_shared_vars
);
8980 const pass_data pass_data_lower_omp
=
8982 GIMPLE_PASS
, /* type */
8983 "omplower", /* name */
8984 OPTGROUP_OMP
, /* optinfo_flags */
8985 TV_NONE
, /* tv_id */
8986 PROP_gimple_any
, /* properties_required */
8987 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
8988 0, /* properties_destroyed */
8989 0, /* todo_flags_start */
8990 0, /* todo_flags_finish */
8993 class pass_lower_omp
: public gimple_opt_pass
8996 pass_lower_omp (gcc::context
*ctxt
)
8997 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
9000 /* opt_pass methods: */
9001 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
9003 }; // class pass_lower_omp
9008 make_pass_lower_omp (gcc::context
*ctxt
)
9010 return new pass_lower_omp (ctxt
);
9013 /* The following is a utility to diagnose structured block violations.
9014 It is not part of the "omplower" pass, as that's invoked too late. It
9015 should be invoked by the respective front ends after gimplification. */
9017 static splay_tree all_labels
;
9019 /* Check for mismatched contexts and generate an error if needed. Return
9020 true if an error is detected. */
9023 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
9024 gimple
*branch_ctx
, gimple
*label_ctx
)
9026 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
9027 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
9029 if (label_ctx
== branch_ctx
)
9032 const char* kind
= NULL
;
9037 && gimple_code (branch_ctx
) == GIMPLE_OMP_FOR
9038 && gimple_omp_for_kind (branch_ctx
) == GF_OMP_FOR_KIND_CILKSIMD
)
9040 && gimple_code (label_ctx
) == GIMPLE_OMP_FOR
9041 && gimple_omp_for_kind (label_ctx
) == GF_OMP_FOR_KIND_CILKSIMD
))
9046 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
9047 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
9049 gcc_checking_assert (kind
== NULL
);
9055 gcc_checking_assert (flag_openmp
);
9059 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9060 so we could traverse it and issue a correct "exit" or "enter" error
9061 message upon a structured block violation.
9063 We built the context by building a list with tree_cons'ing, but there is
9064 no easy counterpart in gimple tuples. It seems like far too much work
9065 for issuing exit/enter error messages. If someone really misses the
9066 distinct error message... patches welcome. */
9069 /* Try to avoid confusing the user by producing and error message
9070 with correct "exit" or "enter" verbiage. We prefer "exit"
9071 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9072 if (branch_ctx
== NULL
)
9078 if (TREE_VALUE (label_ctx
) == branch_ctx
)
9083 label_ctx
= TREE_CHAIN (label_ctx
);
9088 error ("invalid exit from %s structured block", kind
);
9090 error ("invalid entry to %s structured block", kind
);
9093 /* If it's obvious we have an invalid entry, be specific about the error. */
9094 if (branch_ctx
== NULL
)
9095 error ("invalid entry to %s structured block", kind
);
9098 /* Otherwise, be vague and lazy, but efficient. */
9099 error ("invalid branch to/from %s structured block", kind
);
9102 gsi_replace (gsi_p
, gimple_build_nop (), false);
9106 /* Pass 1: Create a minimal tree of structured blocks, and record
9107 where each label is found. */
9110 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9111 struct walk_stmt_info
*wi
)
9113 gimple
*context
= (gimple
*) wi
->info
;
9114 gimple
*inner_context
;
9115 gimple
*stmt
= gsi_stmt (*gsi_p
);
9117 *handled_ops_p
= true;
9119 switch (gimple_code (stmt
))
9123 case GIMPLE_OMP_PARALLEL
:
9124 case GIMPLE_OMP_TASK
:
9125 case GIMPLE_OMP_SECTIONS
:
9126 case GIMPLE_OMP_SINGLE
:
9127 case GIMPLE_OMP_SECTION
:
9128 case GIMPLE_OMP_MASTER
:
9129 case GIMPLE_OMP_ORDERED
:
9130 case GIMPLE_OMP_CRITICAL
:
9131 case GIMPLE_OMP_TARGET
:
9132 case GIMPLE_OMP_TEAMS
:
9133 case GIMPLE_OMP_TASKGROUP
:
9134 /* The minimal context here is just the current OMP construct. */
9135 inner_context
= stmt
;
9136 wi
->info
= inner_context
;
9137 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9141 case GIMPLE_OMP_FOR
:
9142 inner_context
= stmt
;
9143 wi
->info
= inner_context
;
9144 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9146 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9147 diagnose_sb_1
, NULL
, wi
);
9148 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
9153 splay_tree_insert (all_labels
,
9154 (splay_tree_key
) gimple_label_label (
9155 as_a
<glabel
*> (stmt
)),
9156 (splay_tree_value
) context
);
9166 /* Pass 2: Check each branch and see if its context differs from that of
9167 the destination label's context. */
9170 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9171 struct walk_stmt_info
*wi
)
9173 gimple
*context
= (gimple
*) wi
->info
;
9175 gimple
*stmt
= gsi_stmt (*gsi_p
);
9177 *handled_ops_p
= true;
9179 switch (gimple_code (stmt
))
9183 case GIMPLE_OMP_PARALLEL
:
9184 case GIMPLE_OMP_TASK
:
9185 case GIMPLE_OMP_SECTIONS
:
9186 case GIMPLE_OMP_SINGLE
:
9187 case GIMPLE_OMP_SECTION
:
9188 case GIMPLE_OMP_MASTER
:
9189 case GIMPLE_OMP_ORDERED
:
9190 case GIMPLE_OMP_CRITICAL
:
9191 case GIMPLE_OMP_TARGET
:
9192 case GIMPLE_OMP_TEAMS
:
9193 case GIMPLE_OMP_TASKGROUP
:
9195 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9199 case GIMPLE_OMP_FOR
:
9201 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9203 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
9204 diagnose_sb_2
, NULL
, wi
);
9205 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
9211 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
9212 tree lab
= gimple_cond_true_label (cond_stmt
);
9215 n
= splay_tree_lookup (all_labels
,
9216 (splay_tree_key
) lab
);
9217 diagnose_sb_0 (gsi_p
, context
,
9218 n
? (gimple
*) n
->value
: NULL
);
9220 lab
= gimple_cond_false_label (cond_stmt
);
9223 n
= splay_tree_lookup (all_labels
,
9224 (splay_tree_key
) lab
);
9225 diagnose_sb_0 (gsi_p
, context
,
9226 n
? (gimple
*) n
->value
: NULL
);
9233 tree lab
= gimple_goto_dest (stmt
);
9234 if (TREE_CODE (lab
) != LABEL_DECL
)
9237 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9238 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
9244 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
9246 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
9248 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
9249 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
9250 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
9257 diagnose_sb_0 (gsi_p
, context
, NULL
);
9268 diagnose_omp_structured_block_errors (void)
9270 struct walk_stmt_info wi
;
9271 gimple_seq body
= gimple_body (current_function_decl
);
9273 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
9275 memset (&wi
, 0, sizeof (wi
));
9276 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
9278 memset (&wi
, 0, sizeof (wi
));
9279 wi
.want_locations
= true;
9280 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
9282 gimple_set_body (current_function_decl
, body
);
9284 splay_tree_delete (all_labels
);
9292 const pass_data pass_data_diagnose_omp_blocks
=
9294 GIMPLE_PASS
, /* type */
9295 "*diagnose_omp_blocks", /* name */
9296 OPTGROUP_OMP
, /* optinfo_flags */
9297 TV_NONE
, /* tv_id */
9298 PROP_gimple_any
, /* properties_required */
9299 0, /* properties_provided */
9300 0, /* properties_destroyed */
9301 0, /* todo_flags_start */
9302 0, /* todo_flags_finish */
9305 class pass_diagnose_omp_blocks
: public gimple_opt_pass
9308 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9309 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
9312 /* opt_pass methods: */
9313 virtual bool gate (function
*)
9315 return flag_cilkplus
|| flag_openacc
|| flag_openmp
;
9317 virtual unsigned int execute (function
*)
9319 return diagnose_omp_structured_block_errors ();
9322 }; // class pass_diagnose_omp_blocks
9327 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
9329 return new pass_diagnose_omp_blocks (ctxt
);
9333 #include "gt-omp-low.h"