1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap make_addressable_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
194 static vec
<gomp_task
*> task_cpyfns
;
196 static void scan_omp (gimple_seq
*, omp_context
*);
197 static tree
scan_omp_1_op (tree
*, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context
*ctx
);
200 #define WALK_SUBSTMTS \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
215 is_oacc_parallel_or_serial (omp_context
*ctx
)
217 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
218 return ((outer_type
== GIMPLE_OMP_TARGET
)
219 && ((gimple_omp_target_kind (ctx
->stmt
)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
221 || (gimple_omp_target_kind (ctx
->stmt
)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
229 is_oacc_kernels (omp_context
*ctx
)
231 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
232 return ((outer_type
== GIMPLE_OMP_TARGET
)
233 && (gimple_omp_target_kind (ctx
->stmt
)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
240 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
242 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
243 return ((outer_type
== GIMPLE_OMP_TARGET
)
244 && ((gimple_omp_target_kind (ctx
->stmt
)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
246 || (gimple_omp_target_kind (ctx
->stmt
)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
248 || (gimple_omp_target_kind (ctx
->stmt
)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
254 is_omp_target (gimple
*stmt
)
256 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
258 int kind
= gimple_omp_target_kind (stmt
);
259 return (kind
== GF_OMP_TARGET_KIND_REGION
260 || kind
== GF_OMP_TARGET_KIND_DATA
261 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
272 omp_member_access_dummy_var (tree decl
)
275 || !DECL_ARTIFICIAL (decl
)
276 || !DECL_IGNORED_P (decl
)
277 || !DECL_HAS_VALUE_EXPR_P (decl
)
278 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
281 tree v
= DECL_VALUE_EXPR (decl
);
282 if (TREE_CODE (v
) != COMPONENT_REF
)
286 switch (TREE_CODE (v
))
292 case POINTER_PLUS_EXPR
:
293 v
= TREE_OPERAND (v
, 0);
296 if (DECL_CONTEXT (v
) == current_function_decl
297 && DECL_ARTIFICIAL (v
)
298 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
306 /* Helper for unshare_and_remap, called through walk_tree. */
309 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
311 tree
*pair
= (tree
*) data
;
314 *tp
= unshare_expr (pair
[1]);
317 else if (IS_TYPE_OR_DECL_P (*tp
))
322 /* Return unshare_expr (X) with all occurrences of FROM
326 unshare_and_remap (tree x
, tree from
, tree to
)
328 tree pair
[2] = { from
, to
};
329 x
= unshare_expr (x
);
330 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
337 scan_omp_op (tree
*tp
, omp_context
*ctx
)
339 struct walk_stmt_info wi
;
341 memset (&wi
, 0, sizeof (wi
));
343 wi
.want_locations
= true;
345 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
348 static void lower_omp (gimple_seq
*, omp_context
*);
349 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
350 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
352 /* Return true if CTX is for an omp parallel. */
355 is_parallel_ctx (omp_context
*ctx
)
357 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
361 /* Return true if CTX is for an omp task. */
364 is_task_ctx (omp_context
*ctx
)
366 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
370 /* Return true if CTX is for an omp taskloop. */
373 is_taskloop_ctx (omp_context
*ctx
)
375 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
380 /* Return true if CTX is for a host omp teams. */
383 is_host_teams_ctx (omp_context
*ctx
)
385 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
394 is_taskreg_ctx (omp_context
*ctx
)
396 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
399 /* Return true if EXPR is variable sized. */
402 is_variable_sized (const_tree expr
)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
412 lookup_decl (tree var
, omp_context
*ctx
)
414 tree
*n
= ctx
->cb
.decl_map
->get (var
);
419 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
421 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
422 return n
? *n
: NULL_TREE
;
426 lookup_field (tree var
, omp_context
*ctx
)
429 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
430 return (tree
) n
->value
;
434 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
437 n
= splay_tree_lookup (ctx
->sfield_map
438 ? ctx
->sfield_map
: ctx
->field_map
, key
);
439 return (tree
) n
->value
;
443 lookup_sfield (tree var
, omp_context
*ctx
)
445 return lookup_sfield ((splay_tree_key
) var
, ctx
);
449 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
452 n
= splay_tree_lookup (ctx
->field_map
, key
);
453 return n
? (tree
) n
->value
: NULL_TREE
;
457 maybe_lookup_field (tree var
, omp_context
*ctx
)
459 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
466 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
469 || TYPE_ATOMIC (TREE_TYPE (decl
)))
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
492 /* Do not use copy-in/copy-out for variables that have their
494 if (is_global_var (decl
))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl
))
504 if (!global_nonaddressable_vars
)
505 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
506 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars
,
513 else if (TREE_ADDRESSABLE (decl
))
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
518 if (TREE_READONLY (decl
)
519 || ((TREE_CODE (decl
) == RESULT_DECL
520 || TREE_CODE (decl
) == PARM_DECL
)
521 && DECL_BY_REFERENCE (decl
)))
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx
->is_nested
)
533 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
534 if ((is_taskreg_ctx (up
)
535 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up
->stmt
)))
537 && maybe_lookup_decl (decl
, up
))
544 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
546 for (c
= gimple_omp_target_clauses (up
->stmt
);
547 c
; c
= OMP_CLAUSE_CHAIN (c
))
548 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c
) == decl
)
553 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
554 c
; c
= OMP_CLAUSE_CHAIN (c
))
555 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c
) == decl
)
560 goto maybe_mark_addressable_and_ret
;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx
))
570 maybe_mark_addressable_and_ret
:
571 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
572 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
577 if (!make_addressable_vars
)
578 make_addressable_vars
= BITMAP_ALLOC (NULL
);
579 bitmap_set_bit (make_addressable_vars
, DECL_UID (outer
));
580 TREE_ADDRESSABLE (outer
) = 1;
589 /* Construct a new automatic decl similar to VAR. */
592 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
594 tree copy
= copy_var_decl (var
, name
, type
);
596 DECL_CONTEXT (copy
) = current_function_decl
;
600 DECL_CHAIN (copy
) = ctx
->block_vars
;
601 ctx
->block_vars
= copy
;
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
610 if (TREE_ADDRESSABLE (var
)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars
, DECL_UID (var
)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
615 TREE_ADDRESSABLE (copy
) = 0;
621 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
623 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
629 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
631 tree x
, field
= lookup_field (var
, ctx
);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x
= maybe_lookup_field (field
, ctx
);
639 x
= build_simple_mem_ref (ctx
->receiver_decl
);
640 TREE_THIS_NOTRAP (x
) = 1;
641 x
= omp_build_component_ref (x
, field
);
644 x
= build_simple_mem_ref (x
);
645 TREE_THIS_NOTRAP (x
) = 1;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
656 build_outer_var_ref (tree var
, omp_context
*ctx
,
657 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
660 omp_context
*outer
= ctx
->outer
;
661 for (; outer
; outer
= outer
->outer
)
663 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
665 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var
, outer
))
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
673 else if (is_variable_sized (var
))
675 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
676 x
= build_outer_var_ref (x
, ctx
, code
);
677 x
= build_simple_mem_ref (x
);
679 else if (is_taskreg_ctx (ctx
))
681 bool by_ref
= use_pointer_for_field (var
, NULL
);
682 x
= build_receiver_ref (var
, by_ref
, ctx
);
684 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
687 || code
== OMP_CLAUSE_ALLOCATE
688 || (code
== OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
690 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
698 if (outer
&& is_taskreg_ctx (outer
))
699 x
= lookup_decl (var
, outer
);
701 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
705 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
709 = splay_tree_lookup (outer
->field_map
,
710 (splay_tree_key
) &DECL_UID (var
));
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
716 x
= lookup_decl (var
, outer
);
720 tree field
= (tree
) n
->value
;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x
= maybe_lookup_field (field
, outer
);
727 x
= build_simple_mem_ref (outer
->receiver_decl
);
728 x
= omp_build_component_ref (x
, field
);
729 if (use_pointer_for_field (var
, outer
))
730 x
= build_simple_mem_ref (x
);
734 x
= lookup_decl (var
, outer
);
735 else if (omp_privatize_by_reference (var
))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
739 else if (omp_member_access_dummy_var (var
))
746 tree t
= omp_member_access_dummy_var (var
);
749 x
= DECL_VALUE_EXPR (var
);
750 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
752 x
= unshare_and_remap (x
, t
, o
);
754 x
= unshare_expr (x
);
758 if (omp_privatize_by_reference (var
))
759 x
= build_simple_mem_ref (x
);
764 /* Build tree nodes to access the field for VAR on the sender side. */
767 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
769 tree field
= lookup_sfield (key
, ctx
);
770 return omp_build_component_ref (ctx
->sender_decl
, field
);
774 build_sender_ref (tree var
, omp_context
*ctx
)
776 return build_sender_ref ((splay_tree_key
) var
, ctx
);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
783 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
785 tree field
, type
, sfield
= NULL_TREE
;
786 splay_tree_key key
= (splay_tree_key
) var
;
788 if ((mask
& 16) != 0)
790 key
= (splay_tree_key
) &DECL_NAME (var
);
791 gcc_checking_assert (key
!= (splay_tree_key
) var
);
795 key
= (splay_tree_key
) &DECL_UID (var
);
796 gcc_checking_assert (key
!= (splay_tree_key
) var
);
798 gcc_assert ((mask
& 1) == 0
799 || !splay_tree_lookup (ctx
->field_map
, key
));
800 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
801 || !splay_tree_lookup (ctx
->sfield_map
, key
));
802 gcc_assert ((mask
& 3) == 3
803 || !is_gimple_omp_oacc (ctx
->stmt
));
805 type
= TREE_TYPE (var
);
806 if ((mask
& 16) != 0)
807 type
= lang_hooks
.decls
.omp_array_data (var
, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type
)
813 && TYPE_RESTRICT (type
))
814 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
818 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
819 type
= build_pointer_type (build_pointer_type (type
));
822 type
= build_pointer_type (type
);
823 else if ((mask
& (32 | 3)) == 1
824 && omp_privatize_by_reference (var
))
825 type
= TREE_TYPE (type
);
827 field
= build_decl (DECL_SOURCE_LOCATION (var
),
828 FIELD_DECL
, DECL_NAME (var
), type
);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field
) = var
;
834 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
836 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
837 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
838 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
841 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
845 insert_field_into_struct (ctx
->record_type
, field
);
846 if (ctx
->srecord_type
)
848 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
849 FIELD_DECL
, DECL_NAME (var
), type
);
850 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
851 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
852 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
853 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
854 insert_field_into_struct (ctx
->srecord_type
, sfield
);
859 if (ctx
->srecord_type
== NULL_TREE
)
863 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
864 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
865 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
867 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
868 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
869 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
870 insert_field_into_struct (ctx
->srecord_type
, sfield
);
871 splay_tree_insert (ctx
->sfield_map
,
872 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
873 (splay_tree_value
) sfield
);
877 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
878 : ctx
->srecord_type
, field
);
882 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
883 if ((mask
& 2) && ctx
->sfield_map
)
884 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
888 install_var_local (tree var
, omp_context
*ctx
)
890 tree new_var
= omp_copy_decl_1 (var
, ctx
);
891 insert_decl_map (&ctx
->cb
, var
, new_var
);
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
899 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
903 new_decl
= lookup_decl (decl
, ctx
);
905 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
908 && DECL_HAS_VALUE_EXPR_P (decl
))
910 tree ve
= DECL_VALUE_EXPR (decl
);
911 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
912 SET_DECL_VALUE_EXPR (new_decl
, ve
);
913 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
918 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
919 if (size
== error_mark_node
)
920 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
921 DECL_SIZE (new_decl
) = size
;
923 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
924 if (size
== error_mark_node
)
925 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
926 DECL_SIZE_UNIT (new_decl
) = size
;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
936 omp_copy_decl (tree var
, copy_body_data
*cb
)
938 omp_context
*ctx
= (omp_context
*) cb
;
941 if (TREE_CODE (var
) == LABEL_DECL
)
943 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
945 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
946 DECL_CONTEXT (new_var
) = current_function_decl
;
947 insert_decl_map (&ctx
->cb
, var
, new_var
);
951 while (!is_taskreg_ctx (ctx
))
956 new_var
= maybe_lookup_decl (var
, ctx
);
961 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
964 return error_mark_node
;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
970 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
972 omp_context
*ctx
= XCNEW (omp_context
);
974 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
975 (splay_tree_value
) ctx
);
980 ctx
->outer
= outer_ctx
;
981 ctx
->cb
= outer_ctx
->cb
;
982 ctx
->cb
.block
= NULL
;
983 ctx
->depth
= outer_ctx
->depth
+ 1;
987 ctx
->cb
.src_fn
= current_function_decl
;
988 ctx
->cb
.dst_fn
= current_function_decl
;
989 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
990 gcc_checking_assert (ctx
->cb
.src_node
);
991 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
992 ctx
->cb
.src_cfun
= cfun
;
993 ctx
->cb
.copy_decl
= omp_copy_decl
;
994 ctx
->cb
.eh_lp_nr
= 0;
995 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
996 ctx
->cb
.adjust_array_error_bounds
= true;
997 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1001 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1006 static gimple_seq
maybe_catch_exception (gimple_seq
);
1008 /* Finalize task copyfn. */
1011 finalize_task_copyfn (gomp_task
*task_stmt
)
1013 struct function
*child_cfun
;
1015 gimple_seq seq
= NULL
, new_seq
;
1018 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1019 if (child_fn
== NULL_TREE
)
1022 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1023 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1025 push_cfun (child_cfun
);
1026 bind
= gimplify_body (child_fn
, false);
1027 gimple_seq_add_stmt (&seq
, bind
);
1028 new_seq
= maybe_catch_exception (seq
);
1031 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1033 gimple_seq_add_stmt (&seq
, bind
);
1035 gimple_set_body (child_fn
, seq
);
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1040 node
->parallelized_function
= 1;
1041 cgraph_node::add_new_function (child_fn
, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1048 delete_omp_context (splay_tree_value value
)
1050 omp_context
*ctx
= (omp_context
*) value
;
1052 delete ctx
->cb
.decl_map
;
1055 splay_tree_delete (ctx
->field_map
);
1056 if (ctx
->sfield_map
)
1057 splay_tree_delete (ctx
->sfield_map
);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx
->record_type
)
1064 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1065 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1067 if (ctx
->srecord_type
)
1070 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1071 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1074 if (ctx
->task_reduction_map
)
1076 ctx
->task_reductions
.release ();
1077 delete ctx
->task_reduction_map
;
1080 delete ctx
->lastprivate_conditional_map
;
1081 delete ctx
->allocate_map
;
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1090 fixup_child_record_type (omp_context
*ctx
)
1092 tree f
, type
= ctx
->record_type
;
1094 if (!ctx
->receiver_decl
)
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1101 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1105 tree name
, new_fields
= NULL
;
1107 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1108 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1109 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1110 TYPE_DECL
, name
, type
);
1111 TYPE_NAME (type
) = name
;
1113 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1115 tree new_f
= copy_node (f
);
1116 DECL_CONTEXT (new_f
) = type
;
1117 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1118 DECL_CHAIN (new_f
) = new_fields
;
1119 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1120 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1122 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1129 (splay_tree_value
) new_f
);
1131 TYPE_FIELDS (type
) = nreverse (new_fields
);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx
->stmt
))
1138 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1140 TREE_TYPE (ctx
->receiver_decl
)
1141 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1148 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1151 bool scan_array_reductions
= false;
1153 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1154 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1165 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
) == 0)
1166 && omp_maybe_offloaded_ctx (ctx
))
1167 error_at (OMP_CLAUSE_LOCATION (c
), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx
->allocate_map
== NULL
)
1170 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1171 tree val
= integer_zero_node
;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1173 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1175 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1176 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1179 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1183 switch (OMP_CLAUSE_CODE (c
))
1185 case OMP_CLAUSE_PRIVATE
:
1186 decl
= OMP_CLAUSE_DECL (c
);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1189 else if (!is_variable_sized (decl
))
1190 install_var_local (decl
, ctx
);
1193 case OMP_CLAUSE_SHARED
:
1194 decl
= OMP_CLAUSE_DECL (c
);
1195 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1196 ctx
->allocate_map
->remove (decl
);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx
))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1205 if (is_global_var (odecl
))
1207 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1210 gcc_assert (is_taskreg_ctx (ctx
));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1212 || !is_variable_sized (decl
));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1219 use_pointer_for_field (decl
, ctx
);
1222 by_ref
= use_pointer_for_field (decl
, NULL
);
1223 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1224 || TREE_ADDRESSABLE (decl
)
1226 || omp_privatize_by_reference (decl
))
1228 by_ref
= use_pointer_for_field (decl
, ctx
);
1229 install_var_field (decl
, by_ref
, 3, ctx
);
1230 install_var_local (decl
, ctx
);
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1237 case OMP_CLAUSE_REDUCTION
:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx
->stmt
)
1240 && is_gimple_omp_offloaded (ctx
->stmt
))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx
));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1247 ctx
->local_reduction_clauses
1248 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1252 case OMP_CLAUSE_IN_REDUCTION
:
1253 decl
= OMP_CLAUSE_DECL (c
);
1254 if (ctx
->allocate_map
1255 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1257 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1258 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx
)))
1262 if (ctx
->allocate_map
->get (decl
))
1263 ctx
->allocate_map
->remove (decl
);
1265 if (TREE_CODE (decl
) == MEM_REF
)
1267 tree t
= TREE_OPERAND (decl
, 0);
1268 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1269 t
= TREE_OPERAND (t
, 0);
1270 if (TREE_CODE (t
) == INDIRECT_REF
1271 || TREE_CODE (t
) == ADDR_EXPR
)
1272 t
= TREE_OPERAND (t
, 0);
1273 if (is_omp_target (ctx
->stmt
))
1275 if (is_variable_sized (t
))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1278 t
= DECL_VALUE_EXPR (t
);
1279 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1280 t
= TREE_OPERAND (t
, 0);
1281 gcc_assert (DECL_P (t
));
1285 scan_omp_op (&at
, ctx
->outer
);
1286 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1287 splay_tree_insert (ctx
->field_map
,
1288 (splay_tree_key
) &DECL_CONTEXT (t
),
1289 (splay_tree_value
) nt
);
1291 splay_tree_insert (ctx
->field_map
,
1292 (splay_tree_key
) &DECL_CONTEXT (at
),
1293 (splay_tree_value
) nt
);
1296 install_var_local (t
, ctx
);
1297 if (is_taskreg_ctx (ctx
)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1299 || (is_task_ctx (ctx
)
1300 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1303 == POINTER_TYPE
)))))
1304 && !is_variable_sized (t
)
1305 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1307 && !is_task_ctx (ctx
))))
1309 by_ref
= use_pointer_for_field (t
, NULL
);
1310 if (is_task_ctx (ctx
)
1311 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1314 install_var_field (t
, false, 1, ctx
);
1315 install_var_field (t
, by_ref
, 2, ctx
);
1318 install_var_field (t
, by_ref
, 3, ctx
);
1322 if (is_omp_target (ctx
->stmt
))
1326 scan_omp_op (&at
, ctx
->outer
);
1327 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1328 splay_tree_insert (ctx
->field_map
,
1329 (splay_tree_key
) &DECL_CONTEXT (decl
),
1330 (splay_tree_value
) nt
);
1332 splay_tree_insert (ctx
->field_map
,
1333 (splay_tree_key
) &DECL_CONTEXT (at
),
1334 (splay_tree_value
) nt
);
1337 if (is_task_ctx (ctx
)
1338 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c
)
1340 && is_parallel_ctx (ctx
)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1346 by_ref
= use_pointer_for_field (decl
, ctx
);
1347 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1348 install_var_field (decl
, by_ref
, 3, ctx
);
1350 install_var_local (decl
, ctx
);
1353 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c
))
1356 install_var_local (decl
, ctx
);
1361 case OMP_CLAUSE_LASTPRIVATE
:
1362 /* Let the corresponding firstprivate clause create
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1368 case OMP_CLAUSE_FIRSTPRIVATE
:
1369 case OMP_CLAUSE_LINEAR
:
1370 decl
= OMP_CLAUSE_DECL (c
);
1372 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1375 && is_gimple_omp_offloaded (ctx
->stmt
))
1377 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks
.decls
.omp_array_data (decl
, true)))
1381 by_ref
= !omp_privatize_by_reference (decl
);
1382 install_var_field (decl
, by_ref
, 3, ctx
);
1384 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1386 if (TREE_CODE (decl
) == INDIRECT_REF
)
1387 decl
= TREE_OPERAND (decl
, 0);
1388 install_var_field (decl
, true, 3, ctx
);
1390 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1391 install_var_field (decl
, true, 3, ctx
);
1393 install_var_field (decl
, false, 3, ctx
);
1395 if (is_variable_sized (decl
))
1397 if (is_task_ctx (ctx
))
1399 if (ctx
->allocate_map
1400 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1403 if (ctx
->allocate_map
->get (decl
))
1404 ctx
->allocate_map
->remove (decl
);
1406 install_var_field (decl
, false, 1, ctx
);
1410 else if (is_taskreg_ctx (ctx
))
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1414 by_ref
= use_pointer_for_field (decl
, NULL
);
1416 if (is_task_ctx (ctx
)
1417 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1419 if (ctx
->allocate_map
1420 && ctx
->allocate_map
->get (decl
))
1421 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1423 install_var_field (decl
, false, 1, ctx
);
1425 install_var_field (decl
, by_ref
, 2, ctx
);
1428 install_var_field (decl
, by_ref
, 3, ctx
);
1430 install_var_local (decl
, ctx
);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx
->stmt
)
1434 && !is_gimple_omp_oacc (ctx
->stmt
)
1435 && lang_hooks
.decls
.omp_array_data (decl
, true))
1437 install_var_field (decl
, false, 16 | 3, ctx
);
1438 install_var_field (decl
, true, 8 | 3, ctx
);
1442 case OMP_CLAUSE_USE_DEVICE_PTR
:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1444 decl
= OMP_CLAUSE_DECL (c
);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1448 install_var_field (decl
, false, 19, ctx
);
1449 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl
)
1451 && !omp_is_allocatable_or_ptr (decl
))
1452 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1453 install_var_field (decl
, true, 11, ctx
);
1455 install_var_field (decl
, false, 11, ctx
);
1456 if (DECL_SIZE (decl
)
1457 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1459 tree decl2
= DECL_VALUE_EXPR (decl
);
1460 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1461 decl2
= TREE_OPERAND (decl2
, 0);
1462 gcc_assert (DECL_P (decl2
));
1463 install_var_local (decl2
, ctx
);
1465 install_var_local (decl
, ctx
);
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1469 decl
= OMP_CLAUSE_DECL (c
);
1470 while (TREE_CODE (decl
) == INDIRECT_REF
1471 || TREE_CODE (decl
) == ARRAY_REF
)
1472 decl
= TREE_OPERAND (decl
, 0);
1475 case OMP_CLAUSE_IS_DEVICE_PTR
:
1476 decl
= OMP_CLAUSE_DECL (c
);
1479 case OMP_CLAUSE__LOOPTEMP_
:
1480 case OMP_CLAUSE__REDUCTEMP_
:
1481 gcc_assert (is_taskreg_ctx (ctx
));
1482 decl
= OMP_CLAUSE_DECL (c
);
1483 install_var_field (decl
, false, 3, ctx
);
1484 install_var_local (decl
, ctx
);
1487 case OMP_CLAUSE_COPYPRIVATE
:
1488 case OMP_CLAUSE_COPYIN
:
1489 decl
= OMP_CLAUSE_DECL (c
);
1490 by_ref
= use_pointer_for_field (decl
, NULL
);
1491 install_var_field (decl
, by_ref
, 3, ctx
);
1494 case OMP_CLAUSE_FINAL
:
1496 case OMP_CLAUSE_NUM_THREADS
:
1497 case OMP_CLAUSE_NUM_TEAMS
:
1498 case OMP_CLAUSE_THREAD_LIMIT
:
1499 case OMP_CLAUSE_DEVICE
:
1500 case OMP_CLAUSE_SCHEDULE
:
1501 case OMP_CLAUSE_DIST_SCHEDULE
:
1502 case OMP_CLAUSE_DEPEND
:
1503 case OMP_CLAUSE_PRIORITY
:
1504 case OMP_CLAUSE_GRAINSIZE
:
1505 case OMP_CLAUSE_NUM_TASKS
:
1506 case OMP_CLAUSE_NUM_GANGS
:
1507 case OMP_CLAUSE_NUM_WORKERS
:
1508 case OMP_CLAUSE_VECTOR_LENGTH
:
1509 case OMP_CLAUSE_DETACH
:
1510 case OMP_CLAUSE_FILTER
:
1512 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1516 case OMP_CLAUSE_FROM
:
1517 case OMP_CLAUSE_MAP
:
1519 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1520 decl
= OMP_CLAUSE_DECL (c
);
1521 /* If requested, make 'decl' addressable. */
1522 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
))
1525 gcc_checking_assert (DECL_P (decl
));
1527 bool decl_addressable
= TREE_ADDRESSABLE (decl
);
1528 if (!decl_addressable
)
1530 if (!make_addressable_vars
)
1531 make_addressable_vars
= BITMAP_ALLOC (NULL
);
1532 bitmap_set_bit (make_addressable_vars
, DECL_UID (decl
));
1533 TREE_ADDRESSABLE (decl
) = 1;
1536 if (dump_enabled_p ())
1538 location_t loc
= OMP_CLAUSE_LOCATION (c
);
1539 const dump_user_location_t d_u_loc
1540 = dump_user_location_t::from_location_t (loc
);
1541 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1543 # pragma GCC diagnostic push
1544 # pragma GCC diagnostic ignored "-Wformat"
1546 if (!decl_addressable
)
1547 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1549 " made addressable\n",
1552 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1554 " already made addressable\n",
1557 # pragma GCC diagnostic pop
1562 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
) = 0;
1564 /* Global variables with "omp declare target" attribute
1565 don't need to be copied, the receiver side will use them
1566 directly. However, global variables with "omp declare target link"
1567 attribute need to be copied. Or when ALWAYS modifier is used. */
1568 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1570 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c
)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1573 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1574 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1575 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1576 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1577 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1578 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1579 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1580 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1581 && varpool_node::get_create (decl
)->offloadable
1582 && !lookup_attribute ("omp declare target link",
1583 DECL_ATTRIBUTES (decl
)))
1585 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1586 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1588 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1589 not offloaded; there is nothing to map for those. */
1590 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1591 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1592 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1595 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1597 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1598 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1599 && is_omp_target (ctx
->stmt
))
1601 /* If this is an offloaded region, an attach operation should
1602 only exist when the pointer variable is mapped in a prior
1604 If we had an error, we may not have attempted to sort clauses
1605 properly, so avoid the test. */
1606 if (is_gimple_omp_offloaded (ctx
->stmt
)
1609 (maybe_lookup_decl (decl
, ctx
)
1610 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1611 && lookup_attribute ("omp declare target",
1612 DECL_ATTRIBUTES (decl
))));
1614 /* By itself, attach/detach is generated as part of pointer
1615 variable mapping and should not create new variables in the
1616 offloaded region, however sender refs for it must be created
1617 for its address to be passed to the runtime. */
1619 = build_decl (OMP_CLAUSE_LOCATION (c
),
1620 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1621 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1622 insert_field_into_struct (ctx
->record_type
, field
);
1623 /* To not clash with a map of the pointer variable itself,
1624 attach/detach maps have their field looked up by the *clause*
1625 tree expression, not the decl. */
1626 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1627 (splay_tree_key
) c
));
1628 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1629 (splay_tree_value
) field
);
1632 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1633 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1634 || (OMP_CLAUSE_MAP_KIND (c
)
1635 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1637 if (TREE_CODE (decl
) == COMPONENT_REF
1638 || (TREE_CODE (decl
) == INDIRECT_REF
1639 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1640 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1642 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1643 == POINTER_TYPE
)))))
1645 if (DECL_SIZE (decl
)
1646 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1648 tree decl2
= DECL_VALUE_EXPR (decl
);
1649 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1650 decl2
= TREE_OPERAND (decl2
, 0);
1651 gcc_assert (DECL_P (decl2
));
1652 install_var_local (decl2
, ctx
);
1654 install_var_local (decl
, ctx
);
1659 if (DECL_SIZE (decl
)
1660 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1662 tree decl2
= DECL_VALUE_EXPR (decl
);
1663 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1664 decl2
= TREE_OPERAND (decl2
, 0);
1665 gcc_assert (DECL_P (decl2
));
1666 install_var_field (decl2
, true, 3, ctx
);
1667 install_var_local (decl2
, ctx
);
1668 install_var_local (decl
, ctx
);
1672 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1673 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1674 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1675 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1676 install_var_field (decl
, true, 7, ctx
);
1678 install_var_field (decl
, true, 3, ctx
);
1679 if (is_gimple_omp_offloaded (ctx
->stmt
)
1680 && !(is_gimple_omp_oacc (ctx
->stmt
)
1681 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1682 install_var_local (decl
, ctx
);
1687 tree base
= get_base_address (decl
);
1688 tree nc
= OMP_CLAUSE_CHAIN (c
);
1691 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1692 && OMP_CLAUSE_DECL (nc
) == base
1693 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1694 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1696 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1697 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1703 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1704 decl
= OMP_CLAUSE_DECL (c
);
1706 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1707 (splay_tree_key
) decl
));
1709 = build_decl (OMP_CLAUSE_LOCATION (c
),
1710 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1711 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1712 insert_field_into_struct (ctx
->record_type
, field
);
1713 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1714 (splay_tree_value
) field
);
1719 case OMP_CLAUSE_ORDER
:
1720 ctx
->order_concurrent
= true;
1723 case OMP_CLAUSE_BIND
:
1727 case OMP_CLAUSE_NOWAIT
:
1728 case OMP_CLAUSE_ORDERED
:
1729 case OMP_CLAUSE_COLLAPSE
:
1730 case OMP_CLAUSE_UNTIED
:
1731 case OMP_CLAUSE_MERGEABLE
:
1732 case OMP_CLAUSE_PROC_BIND
:
1733 case OMP_CLAUSE_SAFELEN
:
1734 case OMP_CLAUSE_SIMDLEN
:
1735 case OMP_CLAUSE_THREADS
:
1736 case OMP_CLAUSE_SIMD
:
1737 case OMP_CLAUSE_NOGROUP
:
1738 case OMP_CLAUSE_DEFAULTMAP
:
1739 case OMP_CLAUSE_ASYNC
:
1740 case OMP_CLAUSE_WAIT
:
1741 case OMP_CLAUSE_GANG
:
1742 case OMP_CLAUSE_WORKER
:
1743 case OMP_CLAUSE_VECTOR
:
1744 case OMP_CLAUSE_INDEPENDENT
:
1745 case OMP_CLAUSE_AUTO
:
1746 case OMP_CLAUSE_SEQ
:
1747 case OMP_CLAUSE_TILE
:
1748 case OMP_CLAUSE__SIMT_
:
1749 case OMP_CLAUSE_DEFAULT
:
1750 case OMP_CLAUSE_NONTEMPORAL
:
1751 case OMP_CLAUSE_IF_PRESENT
:
1752 case OMP_CLAUSE_FINALIZE
:
1753 case OMP_CLAUSE_TASK_REDUCTION
:
1754 case OMP_CLAUSE_ALLOCATE
:
1757 case OMP_CLAUSE_ALIGNED
:
1758 decl
= OMP_CLAUSE_DECL (c
);
1759 if (is_global_var (decl
)
1760 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1761 install_var_local (decl
, ctx
);
1764 case OMP_CLAUSE__CONDTEMP_
:
1765 decl
= OMP_CLAUSE_DECL (c
);
1766 if (is_parallel_ctx (ctx
))
1768 install_var_field (decl
, false, 3, ctx
);
1769 install_var_local (decl
, ctx
);
1771 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1772 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1773 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1774 install_var_local (decl
, ctx
);
1777 case OMP_CLAUSE__CACHE_
:
1778 case OMP_CLAUSE_NOHOST
:
1784 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1786 switch (OMP_CLAUSE_CODE (c
))
1788 case OMP_CLAUSE_LASTPRIVATE
:
1789 /* Let the corresponding firstprivate clause create
1791 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1792 scan_array_reductions
= true;
1793 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1797 case OMP_CLAUSE_FIRSTPRIVATE
:
1798 case OMP_CLAUSE_PRIVATE
:
1799 case OMP_CLAUSE_LINEAR
:
1800 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1801 case OMP_CLAUSE_IS_DEVICE_PTR
:
1802 decl
= OMP_CLAUSE_DECL (c
);
1803 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1805 while (TREE_CODE (decl
) == INDIRECT_REF
1806 || TREE_CODE (decl
) == ARRAY_REF
)
1807 decl
= TREE_OPERAND (decl
, 0);
1810 if (is_variable_sized (decl
))
1812 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1813 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1814 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1815 && is_gimple_omp_offloaded (ctx
->stmt
))
1817 tree decl2
= DECL_VALUE_EXPR (decl
);
1818 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1819 decl2
= TREE_OPERAND (decl2
, 0);
1820 gcc_assert (DECL_P (decl2
));
1821 install_var_local (decl2
, ctx
);
1822 fixup_remapped_decl (decl2
, ctx
, false);
1824 install_var_local (decl
, ctx
);
1826 fixup_remapped_decl (decl
, ctx
,
1827 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1828 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1829 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1830 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1831 scan_array_reductions
= true;
1834 case OMP_CLAUSE_REDUCTION
:
1835 case OMP_CLAUSE_IN_REDUCTION
:
1836 decl
= OMP_CLAUSE_DECL (c
);
1837 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1839 if (is_variable_sized (decl
))
1840 install_var_local (decl
, ctx
);
1841 fixup_remapped_decl (decl
, ctx
, false);
1843 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1844 scan_array_reductions
= true;
1847 case OMP_CLAUSE_TASK_REDUCTION
:
1848 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1849 scan_array_reductions
= true;
1852 case OMP_CLAUSE_SHARED
:
1853 /* Ignore shared directives in teams construct inside of
1854 target construct. */
1855 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1856 && !is_host_teams_ctx (ctx
))
1858 decl
= OMP_CLAUSE_DECL (c
);
1859 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1861 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1863 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1866 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1867 install_var_field (decl
, by_ref
, 11, ctx
);
1870 fixup_remapped_decl (decl
, ctx
, false);
1873 case OMP_CLAUSE_MAP
:
1874 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1876 decl
= OMP_CLAUSE_DECL (c
);
1878 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1879 && (OMP_CLAUSE_MAP_KIND (c
)
1880 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1881 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1882 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1883 && varpool_node::get_create (decl
)->offloadable
)
1885 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1886 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1887 && is_omp_target (ctx
->stmt
)
1888 && !is_gimple_omp_offloaded (ctx
->stmt
))
1892 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1893 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1894 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1895 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1897 tree new_decl
= lookup_decl (decl
, ctx
);
1898 TREE_TYPE (new_decl
)
1899 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1901 else if (DECL_SIZE (decl
)
1902 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1904 tree decl2
= DECL_VALUE_EXPR (decl
);
1905 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1906 decl2
= TREE_OPERAND (decl2
, 0);
1907 gcc_assert (DECL_P (decl2
));
1908 fixup_remapped_decl (decl2
, ctx
, false);
1909 fixup_remapped_decl (decl
, ctx
, true);
1912 fixup_remapped_decl (decl
, ctx
, false);
1916 case OMP_CLAUSE_COPYPRIVATE
:
1917 case OMP_CLAUSE_COPYIN
:
1918 case OMP_CLAUSE_DEFAULT
:
1920 case OMP_CLAUSE_NUM_THREADS
:
1921 case OMP_CLAUSE_NUM_TEAMS
:
1922 case OMP_CLAUSE_THREAD_LIMIT
:
1923 case OMP_CLAUSE_DEVICE
:
1924 case OMP_CLAUSE_SCHEDULE
:
1925 case OMP_CLAUSE_DIST_SCHEDULE
:
1926 case OMP_CLAUSE_NOWAIT
:
1927 case OMP_CLAUSE_ORDERED
:
1928 case OMP_CLAUSE_COLLAPSE
:
1929 case OMP_CLAUSE_UNTIED
:
1930 case OMP_CLAUSE_FINAL
:
1931 case OMP_CLAUSE_MERGEABLE
:
1932 case OMP_CLAUSE_PROC_BIND
:
1933 case OMP_CLAUSE_SAFELEN
:
1934 case OMP_CLAUSE_SIMDLEN
:
1935 case OMP_CLAUSE_ALIGNED
:
1936 case OMP_CLAUSE_DEPEND
:
1937 case OMP_CLAUSE_DETACH
:
1938 case OMP_CLAUSE_ALLOCATE
:
1939 case OMP_CLAUSE__LOOPTEMP_
:
1940 case OMP_CLAUSE__REDUCTEMP_
:
1942 case OMP_CLAUSE_FROM
:
1943 case OMP_CLAUSE_PRIORITY
:
1944 case OMP_CLAUSE_GRAINSIZE
:
1945 case OMP_CLAUSE_NUM_TASKS
:
1946 case OMP_CLAUSE_THREADS
:
1947 case OMP_CLAUSE_SIMD
:
1948 case OMP_CLAUSE_NOGROUP
:
1949 case OMP_CLAUSE_DEFAULTMAP
:
1950 case OMP_CLAUSE_ORDER
:
1951 case OMP_CLAUSE_BIND
:
1952 case OMP_CLAUSE_USE_DEVICE_PTR
:
1953 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1954 case OMP_CLAUSE_NONTEMPORAL
:
1955 case OMP_CLAUSE_ASYNC
:
1956 case OMP_CLAUSE_WAIT
:
1957 case OMP_CLAUSE_NUM_GANGS
:
1958 case OMP_CLAUSE_NUM_WORKERS
:
1959 case OMP_CLAUSE_VECTOR_LENGTH
:
1960 case OMP_CLAUSE_GANG
:
1961 case OMP_CLAUSE_WORKER
:
1962 case OMP_CLAUSE_VECTOR
:
1963 case OMP_CLAUSE_INDEPENDENT
:
1964 case OMP_CLAUSE_AUTO
:
1965 case OMP_CLAUSE_SEQ
:
1966 case OMP_CLAUSE_TILE
:
1967 case OMP_CLAUSE__SIMT_
:
1968 case OMP_CLAUSE_IF_PRESENT
:
1969 case OMP_CLAUSE_FINALIZE
:
1970 case OMP_CLAUSE_FILTER
:
1971 case OMP_CLAUSE__CONDTEMP_
:
1974 case OMP_CLAUSE__CACHE_
:
1975 case OMP_CLAUSE_NOHOST
:
1981 gcc_checking_assert (!scan_array_reductions
1982 || !is_gimple_omp_oacc (ctx
->stmt
));
1983 if (scan_array_reductions
)
1985 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1986 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1987 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1988 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1989 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1991 omp_context
*rctx
= ctx
;
1992 if (is_omp_target (ctx
->stmt
))
1994 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1995 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1997 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1998 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1999 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
2000 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
2001 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
2002 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
2006 /* Create a new name for omp child function. Returns an identifier. */
2009 create_omp_child_function_name (bool task_copy
)
2011 return clone_function_name_numbered (current_function_decl
,
2012 task_copy
? "_omp_cpyfn" : "_omp_fn");
2015 /* Return true if CTX may belong to offloaded code: either if current function
2016 is offloaded, or any enclosing context corresponds to a target region. */
2019 omp_maybe_offloaded_ctx (omp_context
*ctx
)
2021 if (cgraph_node::get (current_function_decl
)->offloadable
)
2023 for (; ctx
; ctx
= ctx
->outer
)
2024 if (is_gimple_omp_offloaded (ctx
->stmt
))
2029 /* Build a decl for the omp child function. It'll not contain a body
2030 yet, just the bare decl. */
2033 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
2035 tree decl
, type
, name
, t
;
2037 name
= create_omp_child_function_name (task_copy
);
2039 type
= build_function_type_list (void_type_node
, ptr_type_node
,
2040 ptr_type_node
, NULL_TREE
);
2042 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2044 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
2046 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
2049 ctx
->cb
.dst_fn
= decl
;
2051 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
2053 TREE_STATIC (decl
) = 1;
2054 TREE_USED (decl
) = 1;
2055 DECL_ARTIFICIAL (decl
) = 1;
2056 DECL_IGNORED_P (decl
) = 0;
2057 TREE_PUBLIC (decl
) = 0;
2058 DECL_UNINLINABLE (decl
) = 1;
2059 DECL_EXTERNAL (decl
) = 0;
2060 DECL_CONTEXT (decl
) = NULL_TREE
;
2061 DECL_INITIAL (decl
) = make_node (BLOCK
);
2062 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
2063 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
2064 /* Remove omp declare simd attribute from the new attributes. */
2065 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
2067 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
2070 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
2071 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
2072 *p
= TREE_CHAIN (*p
);
2075 tree chain
= TREE_CHAIN (*p
);
2076 *p
= copy_node (*p
);
2077 p
= &TREE_CHAIN (*p
);
2081 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2082 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2083 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2084 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2085 DECL_FUNCTION_VERSIONED (decl
)
2086 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2088 if (omp_maybe_offloaded_ctx (ctx
))
2090 cgraph_node::get_create (decl
)->offloadable
= 1;
2091 if (ENABLE_OFFLOADING
)
2092 g
->have_offload
= true;
2095 if (cgraph_node::get_create (decl
)->offloadable
)
2097 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2098 ? "omp target entrypoint"
2099 : "omp declare target");
2100 if (lookup_attribute ("omp declare target",
2101 DECL_ATTRIBUTES (current_function_decl
)))
2103 if (is_gimple_omp_offloaded (ctx
->stmt
))
2104 DECL_ATTRIBUTES (decl
)
2105 = remove_attribute ("omp declare target",
2106 copy_list (DECL_ATTRIBUTES (decl
)));
2111 && is_gimple_omp_offloaded (ctx
->stmt
)
2112 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl
)) == NULL_TREE
)
2113 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("noclone"),
2114 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2116 DECL_ATTRIBUTES (decl
)
2117 = tree_cons (get_identifier (target_attr
),
2118 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2121 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2122 RESULT_DECL
, NULL_TREE
, void_type_node
);
2123 DECL_ARTIFICIAL (t
) = 1;
2124 DECL_IGNORED_P (t
) = 1;
2125 DECL_CONTEXT (t
) = decl
;
2126 DECL_RESULT (decl
) = t
;
2128 tree data_name
= get_identifier (".omp_data_i");
2129 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2131 DECL_ARTIFICIAL (t
) = 1;
2132 DECL_NAMELESS (t
) = 1;
2133 DECL_ARG_TYPE (t
) = ptr_type_node
;
2134 DECL_CONTEXT (t
) = current_function_decl
;
2136 TREE_READONLY (t
) = 1;
2137 DECL_ARGUMENTS (decl
) = t
;
2139 ctx
->receiver_decl
= t
;
2142 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2143 PARM_DECL
, get_identifier (".omp_data_o"),
2145 DECL_ARTIFICIAL (t
) = 1;
2146 DECL_NAMELESS (t
) = 1;
2147 DECL_ARG_TYPE (t
) = ptr_type_node
;
2148 DECL_CONTEXT (t
) = current_function_decl
;
2150 TREE_ADDRESSABLE (t
) = 1;
2151 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2152 DECL_ARGUMENTS (decl
) = t
;
2155 /* Allocate memory for the function structure. The call to
2156 allocate_struct_function clobbers CFUN, so we need to restore
2158 push_struct_function (decl
);
2159 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2160 init_tree_ssa (cfun
);
2164 /* Callback for walk_gimple_seq. Check if combined parallel
2165 contains gimple_omp_for_combined_into_p OMP_FOR. */
2168 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2169 bool *handled_ops_p
,
2170 struct walk_stmt_info
*wi
)
2172 gimple
*stmt
= gsi_stmt (*gsi_p
);
2174 *handled_ops_p
= true;
2175 switch (gimple_code (stmt
))
2179 case GIMPLE_OMP_FOR
:
2180 if (gimple_omp_for_combined_into_p (stmt
)
2181 && gimple_omp_for_kind (stmt
)
2182 == *(const enum gf_mask
*) (wi
->info
))
2185 return integer_zero_node
;
2194 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2197 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2198 omp_context
*outer_ctx
)
2200 struct walk_stmt_info wi
;
2202 memset (&wi
, 0, sizeof (wi
));
2204 wi
.info
= (void *) &msk
;
2205 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2206 if (wi
.info
!= (void *) &msk
)
2208 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2209 struct omp_for_data fd
;
2210 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2211 /* We need two temporaries with fd.loop.v type (istart/iend)
2212 and then (fd.collapse - 1) temporaries with the same
2213 type for count2 ... countN-1 vars if not constant. */
2214 size_t count
= 2, i
;
2215 tree type
= fd
.iter_type
;
2217 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2219 count
+= fd
.collapse
- 1;
2220 /* If there are lastprivate clauses on the inner
2221 GIMPLE_OMP_FOR, add one more temporaries for the total number
2222 of iterations (product of count1 ... countN-1). */
2223 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2224 OMP_CLAUSE_LASTPRIVATE
)
2225 || (msk
== GF_OMP_FOR_KIND_FOR
2226 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2227 OMP_CLAUSE_LASTPRIVATE
)))
2229 tree temp
= create_tmp_var (type
);
2230 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2231 OMP_CLAUSE__LOOPTEMP_
);
2232 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2233 OMP_CLAUSE_DECL (c
) = temp
;
2234 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2235 gimple_omp_taskreg_set_clauses (stmt
, c
);
2238 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2239 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2240 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2242 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2243 tree type2
= TREE_TYPE (v
);
2245 for (i
= 0; i
< 3; i
++)
2247 tree temp
= create_tmp_var (type2
);
2248 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2249 OMP_CLAUSE__LOOPTEMP_
);
2250 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2251 OMP_CLAUSE_DECL (c
) = temp
;
2252 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2253 gimple_omp_taskreg_set_clauses (stmt
, c
);
2257 for (i
= 0; i
< count
; i
++)
2259 tree temp
= create_tmp_var (type
);
2260 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2261 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2262 OMP_CLAUSE_DECL (c
) = temp
;
2263 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2264 gimple_omp_taskreg_set_clauses (stmt
, c
);
2267 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2268 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2269 OMP_CLAUSE_REDUCTION
))
2271 tree type
= build_pointer_type (pointer_sized_int_node
);
2272 tree temp
= create_tmp_var (type
);
2273 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2274 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2275 OMP_CLAUSE_DECL (c
) = temp
;
2276 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2277 gimple_omp_task_set_clauses (stmt
, c
);
2281 /* Scan an OpenMP parallel directive. */
2284 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2288 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2290 /* Ignore parallel directives with empty bodies, unless there
2291 are copyin clauses. */
2293 && empty_body_p (gimple_omp_body (stmt
))
2294 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2295 OMP_CLAUSE_COPYIN
) == NULL
)
2297 gsi_replace (gsi
, gimple_build_nop (), false);
2301 if (gimple_omp_parallel_combined_p (stmt
))
2302 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2303 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2304 OMP_CLAUSE_REDUCTION
);
2305 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2306 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2308 tree type
= build_pointer_type (pointer_sized_int_node
);
2309 tree temp
= create_tmp_var (type
);
2310 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2312 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2313 OMP_CLAUSE_DECL (c
) = temp
;
2314 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2315 gimple_omp_parallel_set_clauses (stmt
, c
);
2318 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2321 ctx
= new_omp_context (stmt
, outer_ctx
);
2322 taskreg_contexts
.safe_push (ctx
);
2323 if (taskreg_nesting_level
> 1)
2324 ctx
->is_nested
= true;
2325 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2326 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2327 name
= create_tmp_var_name (".omp_data_s");
2328 name
= build_decl (gimple_location (stmt
),
2329 TYPE_DECL
, name
, ctx
->record_type
);
2330 DECL_ARTIFICIAL (name
) = 1;
2331 DECL_NAMELESS (name
) = 1;
2332 TYPE_NAME (ctx
->record_type
) = name
;
2333 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2334 create_omp_child_function (ctx
, false);
2335 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2337 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2338 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2340 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2341 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2344 /* Scan an OpenMP task directive. */
2347 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2351 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2353 /* Ignore task directives with empty bodies, unless they have depend
2356 && gimple_omp_body (stmt
)
2357 && empty_body_p (gimple_omp_body (stmt
))
2358 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2360 gsi_replace (gsi
, gimple_build_nop (), false);
2364 if (gimple_omp_task_taskloop_p (stmt
))
2365 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2367 ctx
= new_omp_context (stmt
, outer_ctx
);
2369 if (gimple_omp_task_taskwait_p (stmt
))
2371 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2375 taskreg_contexts
.safe_push (ctx
);
2376 if (taskreg_nesting_level
> 1)
2377 ctx
->is_nested
= true;
2378 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2379 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2380 name
= create_tmp_var_name (".omp_data_s");
2381 name
= build_decl (gimple_location (stmt
),
2382 TYPE_DECL
, name
, ctx
->record_type
);
2383 DECL_ARTIFICIAL (name
) = 1;
2384 DECL_NAMELESS (name
) = 1;
2385 TYPE_NAME (ctx
->record_type
) = name
;
2386 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2387 create_omp_child_function (ctx
, false);
2388 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2390 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2392 if (ctx
->srecord_type
)
2394 name
= create_tmp_var_name (".omp_data_a");
2395 name
= build_decl (gimple_location (stmt
),
2396 TYPE_DECL
, name
, ctx
->srecord_type
);
2397 DECL_ARTIFICIAL (name
) = 1;
2398 DECL_NAMELESS (name
) = 1;
2399 TYPE_NAME (ctx
->srecord_type
) = name
;
2400 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2401 create_omp_child_function (ctx
, true);
2404 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2406 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2408 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2409 t
= build_int_cst (long_integer_type_node
, 0);
2410 gimple_omp_task_set_arg_size (stmt
, t
);
2411 t
= build_int_cst (long_integer_type_node
, 1);
2412 gimple_omp_task_set_arg_align (stmt
, t
);
2416 /* Helper function for finish_taskreg_scan, called through walk_tree.
2417 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2418 tree, replace it in the expression. */
2421 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2425 omp_context
*ctx
= (omp_context
*) data
;
2426 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2429 if (DECL_HAS_VALUE_EXPR_P (t
))
2430 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2435 else if (IS_TYPE_OR_DECL_P (*tp
))
2440 /* If any decls have been made addressable during scan_omp,
2441 adjust their fields if needed, and layout record types
2442 of parallel/task constructs. */
2445 finish_taskreg_scan (omp_context
*ctx
)
2447 if (ctx
->record_type
== NULL_TREE
)
2450 /* If any make_addressable_vars were needed, verify all
2451 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2452 statements if use_pointer_for_field hasn't changed
2453 because of that. If it did, update field types now. */
2454 if (make_addressable_vars
)
2458 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2459 c
; c
= OMP_CLAUSE_CHAIN (c
))
2460 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2461 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2463 tree decl
= OMP_CLAUSE_DECL (c
);
2465 /* Global variables don't need to be copied,
2466 the receiver side will use them directly. */
2467 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2469 if (!bitmap_bit_p (make_addressable_vars
, DECL_UID (decl
))
2470 || !use_pointer_for_field (decl
, ctx
))
2472 tree field
= lookup_field (decl
, ctx
);
2473 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2474 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2476 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2477 TREE_THIS_VOLATILE (field
) = 0;
2478 DECL_USER_ALIGN (field
) = 0;
2479 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2480 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2481 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2482 if (ctx
->srecord_type
)
2484 tree sfield
= lookup_sfield (decl
, ctx
);
2485 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2486 TREE_THIS_VOLATILE (sfield
) = 0;
2487 DECL_USER_ALIGN (sfield
) = 0;
2488 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2489 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2490 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2495 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2497 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2498 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2501 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2502 expects to find it at the start of data. */
2503 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2504 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2508 *p
= DECL_CHAIN (*p
);
2512 p
= &DECL_CHAIN (*p
);
2513 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2514 TYPE_FIELDS (ctx
->record_type
) = f
;
2516 layout_type (ctx
->record_type
);
2517 fixup_child_record_type (ctx
);
2519 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2521 layout_type (ctx
->record_type
);
2522 fixup_child_record_type (ctx
);
2526 location_t loc
= gimple_location (ctx
->stmt
);
2527 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2529 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2531 /* Move VLA fields to the end. */
2532 p
= &TYPE_FIELDS (ctx
->record_type
);
2534 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2535 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2538 *p
= TREE_CHAIN (*p
);
2539 TREE_CHAIN (*q
) = NULL_TREE
;
2540 q
= &TREE_CHAIN (*q
);
2543 p
= &DECL_CHAIN (*p
);
2545 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2547 /* Move fields corresponding to first and second _looptemp_
2548 clause first. There are filled by GOMP_taskloop
2549 and thus need to be in specific positions. */
2550 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2551 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2552 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2553 OMP_CLAUSE__LOOPTEMP_
);
2554 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2555 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2556 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2557 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2558 p
= &TYPE_FIELDS (ctx
->record_type
);
2560 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2561 *p
= DECL_CHAIN (*p
);
2563 p
= &DECL_CHAIN (*p
);
2564 DECL_CHAIN (f1
) = f2
;
2567 DECL_CHAIN (f2
) = f3
;
2568 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2571 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2572 TYPE_FIELDS (ctx
->record_type
) = f1
;
2573 if (ctx
->srecord_type
)
2575 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2576 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2578 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2579 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2581 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2582 *p
= DECL_CHAIN (*p
);
2584 p
= &DECL_CHAIN (*p
);
2585 DECL_CHAIN (f1
) = f2
;
2586 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2589 DECL_CHAIN (f2
) = f3
;
2590 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2593 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2594 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2601 /* Look for a firstprivate clause with the detach event handle. */
2602 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2603 c
; c
= OMP_CLAUSE_CHAIN (c
))
2605 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2607 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2608 == OMP_CLAUSE_DECL (detach_clause
))
2613 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2615 /* Move field corresponding to the detach clause first.
2616 This is filled by GOMP_task and needs to be in a
2617 specific position. */
2618 p
= &TYPE_FIELDS (ctx
->record_type
);
2621 *p
= DECL_CHAIN (*p
);
2623 p
= &DECL_CHAIN (*p
);
2624 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2625 TYPE_FIELDS (ctx
->record_type
) = field
;
2626 if (ctx
->srecord_type
)
2628 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2629 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2632 *p
= DECL_CHAIN (*p
);
2634 p
= &DECL_CHAIN (*p
);
2635 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2636 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2639 layout_type (ctx
->record_type
);
2640 fixup_child_record_type (ctx
);
2641 if (ctx
->srecord_type
)
2642 layout_type (ctx
->srecord_type
);
2643 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2644 TYPE_SIZE_UNIT (ctx
->record_type
));
2645 if (TREE_CODE (t
) != INTEGER_CST
)
2647 t
= unshare_expr (t
);
2648 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2650 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2651 t
= build_int_cst (long_integer_type_node
,
2652 TYPE_ALIGN_UNIT (ctx
->record_type
));
2653 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2657 /* Find the enclosing offload context. */
2659 static omp_context
*
2660 enclosing_target_ctx (omp_context
*ctx
)
2662 for (; ctx
; ctx
= ctx
->outer
)
2663 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2669 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2671 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2674 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2676 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2678 gimple
*stmt
= ctx
->stmt
;
2679 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2680 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2687 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2688 (This doesn't include OpenACC 'kernels' decomposed parts.)
2689 Until kernels handling moves to use the same loop indirection
2690 scheme as parallel, we need to do this checking early. */
2693 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2695 bool checking
= true;
2696 unsigned outer_mask
= 0;
2697 unsigned this_mask
= 0;
2698 bool has_seq
= false, has_auto
= false;
2701 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2705 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2707 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2710 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2712 switch (OMP_CLAUSE_CODE (c
))
2714 case OMP_CLAUSE_GANG
:
2715 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2717 case OMP_CLAUSE_WORKER
:
2718 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2720 case OMP_CLAUSE_VECTOR
:
2721 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2723 case OMP_CLAUSE_SEQ
:
2726 case OMP_CLAUSE_AUTO
:
2736 if (has_seq
&& (this_mask
|| has_auto
))
2737 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2738 " OpenACC loop specifiers");
2739 else if (has_auto
&& this_mask
)
2740 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2741 " OpenACC loop specifiers");
2743 if (this_mask
& outer_mask
)
2744 error_at (gimple_location (stmt
), "inner loop uses same"
2745 " OpenACC parallelism as containing loop");
2748 return outer_mask
| this_mask
;
2751 /* Scan a GIMPLE_OMP_FOR. */
2753 static omp_context
*
2754 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2758 tree clauses
= gimple_omp_for_clauses (stmt
);
2760 ctx
= new_omp_context (stmt
, outer_ctx
);
2762 if (is_gimple_omp_oacc (stmt
))
2764 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2766 if (!(tgt
&& is_oacc_kernels (tgt
)))
2767 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2770 switch (OMP_CLAUSE_CODE (c
))
2772 case OMP_CLAUSE_GANG
:
2773 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2776 case OMP_CLAUSE_WORKER
:
2777 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2780 case OMP_CLAUSE_VECTOR
:
2781 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2790 /* By construction, this is impossible for OpenACC 'kernels'
2791 decomposed parts. */
2792 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2794 error_at (OMP_CLAUSE_LOCATION (c
),
2795 "argument not permitted on %qs clause",
2796 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2798 inform (gimple_location (tgt
->stmt
),
2799 "enclosing parent compute construct");
2800 else if (oacc_get_fn_attrib (current_function_decl
))
2801 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2802 "enclosing routine");
2808 if (tgt
&& is_oacc_kernels (tgt
))
2809 check_oacc_kernel_gwv (stmt
, ctx
);
2811 /* Collect all variables named in reductions on this loop. Ensure
2812 that, if this loop has a reduction on some variable v, and there is
2813 a reduction on v somewhere in an outer context, then there is a
2814 reduction on v on all intervening loops as well. */
2815 tree local_reduction_clauses
= NULL
;
2816 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2818 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2819 local_reduction_clauses
2820 = tree_cons (NULL
, c
, local_reduction_clauses
);
2822 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2823 ctx
->outer_reduction_clauses
2824 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2825 ctx
->outer
->outer_reduction_clauses
);
2826 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2827 tree local_iter
= local_reduction_clauses
;
2828 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2830 tree local_clause
= TREE_VALUE (local_iter
);
2831 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2832 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2833 bool have_outer_reduction
= false;
2834 tree ctx_iter
= outer_reduction_clauses
;
2835 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2837 tree outer_clause
= TREE_VALUE (ctx_iter
);
2838 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2839 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2840 if (outer_var
== local_var
&& outer_op
!= local_op
)
2842 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2843 "conflicting reduction operations for %qE",
2845 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2846 "location of the previous reduction for %qE",
2849 if (outer_var
== local_var
)
2851 have_outer_reduction
= true;
2855 if (have_outer_reduction
)
2857 /* There is a reduction on outer_var both on this loop and on
2858 some enclosing loop. Walk up the context tree until such a
2859 loop with a reduction on outer_var is found, and complain
2860 about all intervening loops that do not have such a
2862 struct omp_context
*curr_loop
= ctx
->outer
;
2864 while (curr_loop
!= NULL
)
2866 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2867 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2869 tree curr_clause
= TREE_VALUE (curr_iter
);
2870 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2871 if (curr_var
== local_var
)
2878 warning_at (gimple_location (curr_loop
->stmt
), 0,
2879 "nested loop in reduction needs "
2880 "reduction clause for %qE",
2884 curr_loop
= curr_loop
->outer
;
2888 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2889 ctx
->outer_reduction_clauses
2890 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2891 ctx
->outer_reduction_clauses
);
2893 if (tgt
&& is_oacc_kernels (tgt
))
2895 /* Strip out reductions, as they are not handled yet. */
2896 tree
*prev_ptr
= &clauses
;
2898 while (tree probe
= *prev_ptr
)
2900 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2902 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2903 *prev_ptr
= *next_ptr
;
2905 prev_ptr
= next_ptr
;
2908 gimple_omp_for_set_clauses (stmt
, clauses
);
2912 scan_sharing_clauses (clauses
, ctx
);
2914 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2915 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2917 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2918 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2919 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2920 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2922 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2926 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2929 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2930 omp_context
*outer_ctx
)
2932 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2933 gsi_replace (gsi
, bind
, false);
2934 gimple_seq seq
= NULL
;
2935 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2936 tree cond
= create_tmp_var_raw (integer_type_node
);
2937 DECL_CONTEXT (cond
) = current_function_decl
;
2938 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2939 gimple_bind_set_vars (bind
, cond
);
2940 gimple_call_set_lhs (g
, cond
);
2941 gimple_seq_add_stmt (&seq
, g
);
2942 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2943 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2944 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2945 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2946 gimple_seq_add_stmt (&seq
, g
);
2947 g
= gimple_build_label (lab1
);
2948 gimple_seq_add_stmt (&seq
, g
);
2949 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2950 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2951 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2952 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2953 gimple_omp_for_set_clauses (new_stmt
, clause
);
2954 gimple_seq_add_stmt (&seq
, new_stmt
);
2955 g
= gimple_build_goto (lab3
);
2956 gimple_seq_add_stmt (&seq
, g
);
2957 g
= gimple_build_label (lab2
);
2958 gimple_seq_add_stmt (&seq
, g
);
2959 gimple_seq_add_stmt (&seq
, stmt
);
2960 g
= gimple_build_label (lab3
);
2961 gimple_seq_add_stmt (&seq
, g
);
2962 gimple_bind_set_body (bind
, seq
);
2964 scan_omp_for (new_stmt
, outer_ctx
);
2965 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2968 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2969 struct walk_stmt_info
*);
2970 static omp_context
*maybe_lookup_ctx (gimple
*);
2972 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2973 for scan phase loop. */
2976 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2977 omp_context
*outer_ctx
)
2979 /* The only change between inclusive and exclusive scan will be
2980 within the first simd loop, so just use inclusive in the
2981 worksharing loop. */
2982 outer_ctx
->scan_inclusive
= true;
2983 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2984 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2986 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2987 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2988 gsi_replace (gsi
, input_stmt
, false);
2989 gimple_seq input_body
= NULL
;
2990 gimple_seq_add_stmt (&input_body
, stmt
);
2991 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2993 gimple_stmt_iterator input1_gsi
= gsi_none ();
2994 struct walk_stmt_info wi
;
2995 memset (&wi
, 0, sizeof (wi
));
2997 wi
.info
= (void *) &input1_gsi
;
2998 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2999 gcc_assert (!gsi_end_p (input1_gsi
));
3001 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
3002 gsi_next (&input1_gsi
);
3003 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
3004 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
3005 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
3006 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3007 std::swap (input_stmt1
, scan_stmt1
);
3009 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
3010 gimple_omp_set_body (input_stmt1
, NULL
);
3012 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
3013 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
3015 gimple_omp_set_body (input_stmt1
, input_body1
);
3016 gimple_omp_set_body (scan_stmt1
, NULL
);
3018 gimple_stmt_iterator input2_gsi
= gsi_none ();
3019 memset (&wi
, 0, sizeof (wi
));
3021 wi
.info
= (void *) &input2_gsi
;
3022 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
3024 gcc_assert (!gsi_end_p (input2_gsi
));
3026 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
3027 gsi_next (&input2_gsi
);
3028 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
3029 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
3030 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3031 std::swap (input_stmt2
, scan_stmt2
);
3033 gimple_omp_set_body (input_stmt2
, NULL
);
3035 gimple_omp_set_body (input_stmt
, input_body
);
3036 gimple_omp_set_body (scan_stmt
, scan_body
);
3038 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
3039 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
3041 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
3042 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
3044 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
3047 /* Scan an OpenMP sections directive. */
3050 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
3054 ctx
= new_omp_context (stmt
, outer_ctx
);
3055 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
3056 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3059 /* Scan an OpenMP single directive. */
3062 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
3067 ctx
= new_omp_context (stmt
, outer_ctx
);
3068 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3069 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3070 name
= create_tmp_var_name (".omp_copy_s");
3071 name
= build_decl (gimple_location (stmt
),
3072 TYPE_DECL
, name
, ctx
->record_type
);
3073 TYPE_NAME (ctx
->record_type
) = name
;
3075 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
3076 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3078 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3079 ctx
->record_type
= NULL
;
3081 layout_type (ctx
->record_type
);
3084 /* Scan a GIMPLE_OMP_TARGET. */
3087 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3091 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3092 tree clauses
= gimple_omp_target_clauses (stmt
);
3094 ctx
= new_omp_context (stmt
, outer_ctx
);
3095 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3096 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3097 name
= create_tmp_var_name (".omp_data_t");
3098 name
= build_decl (gimple_location (stmt
),
3099 TYPE_DECL
, name
, ctx
->record_type
);
3100 DECL_ARTIFICIAL (name
) = 1;
3101 DECL_NAMELESS (name
) = 1;
3102 TYPE_NAME (ctx
->record_type
) = name
;
3103 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3107 create_omp_child_function (ctx
, false);
3108 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3111 scan_sharing_clauses (clauses
, ctx
);
3112 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3114 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3115 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3118 TYPE_FIELDS (ctx
->record_type
)
3119 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3122 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3123 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3125 field
= DECL_CHAIN (field
))
3126 gcc_assert (DECL_ALIGN (field
) == align
);
3128 layout_type (ctx
->record_type
);
3130 fixup_child_record_type (ctx
);
3133 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3135 error_at (gimple_location (stmt
),
3136 "%<target%> construct with nested %<teams%> construct "
3137 "contains directives outside of the %<teams%> construct");
3138 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3142 /* Scan an OpenMP teams directive. */
3145 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3147 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3149 if (!gimple_omp_teams_host (stmt
))
3151 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3152 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3155 taskreg_contexts
.safe_push (ctx
);
3156 gcc_assert (taskreg_nesting_level
== 1);
3157 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3158 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3159 tree name
= create_tmp_var_name (".omp_data_s");
3160 name
= build_decl (gimple_location (stmt
),
3161 TYPE_DECL
, name
, ctx
->record_type
);
3162 DECL_ARTIFICIAL (name
) = 1;
3163 DECL_NAMELESS (name
) = 1;
3164 TYPE_NAME (ctx
->record_type
) = name
;
3165 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3166 create_omp_child_function (ctx
, false);
3167 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3169 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3170 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3172 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3173 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3176 /* Check nesting restrictions. */
3178 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3182 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3183 inside an OpenACC CTX. */
3184 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3185 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3186 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3188 else if (!(is_gimple_omp (stmt
)
3189 && is_gimple_omp_oacc (stmt
)))
3191 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3193 error_at (gimple_location (stmt
),
3194 "non-OpenACC construct inside of OpenACC routine");
3198 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3199 if (is_gimple_omp (octx
->stmt
)
3200 && is_gimple_omp_oacc (octx
->stmt
))
3202 error_at (gimple_location (stmt
),
3203 "non-OpenACC construct inside of OpenACC region");
3210 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3211 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3213 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3215 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3217 error_at (gimple_location (stmt
),
3218 "OpenMP constructs are not allowed in target region "
3219 "with %<ancestor%>");
3223 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3224 ctx
->teams_nested_p
= true;
3226 ctx
->nonteams_nested_p
= true;
3228 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3230 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3232 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3233 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3237 if (ctx
->order_concurrent
3238 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3239 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3240 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3242 error_at (gimple_location (stmt
),
3243 "OpenMP constructs other than %<parallel%>, %<loop%>"
3244 " or %<simd%> may not be nested inside a region with"
3245 " the %<order(concurrent)%> clause");
3248 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3250 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3251 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3253 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3254 && (ctx
->outer
== NULL
3255 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3256 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3257 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3258 != GF_OMP_FOR_KIND_FOR
)
3259 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3261 error_at (gimple_location (stmt
),
3262 "%<ordered simd threads%> must be closely "
3263 "nested inside of %<%s simd%> region",
3264 lang_GNU_Fortran () ? "do" : "for");
3270 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3271 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3272 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3274 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3275 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3277 error_at (gimple_location (stmt
),
3278 "OpenMP constructs other than "
3279 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3280 "not be nested inside %<simd%> region");
3283 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3285 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3286 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3287 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3288 OMP_CLAUSE_BIND
) == NULL_TREE
))
3289 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3291 error_at (gimple_location (stmt
),
3292 "only %<distribute%>, %<parallel%> or %<loop%> "
3293 "regions are allowed to be strictly nested inside "
3294 "%<teams%> region");
3298 else if (ctx
->order_concurrent
3299 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3300 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3301 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3302 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3305 error_at (gimple_location (stmt
),
3306 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3307 "%<simd%> may not be nested inside a %<loop%> region");
3309 error_at (gimple_location (stmt
),
3310 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3311 "%<simd%> may not be nested inside a region with "
3312 "the %<order(concurrent)%> clause");
3316 switch (gimple_code (stmt
))
3318 case GIMPLE_OMP_FOR
:
3319 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3321 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3323 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3325 error_at (gimple_location (stmt
),
3326 "%<distribute%> region must be strictly nested "
3327 "inside %<teams%> construct");
3332 /* We split taskloop into task and nested taskloop in it. */
3333 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3335 /* For now, hope this will change and loop bind(parallel) will not
3336 be allowed in lots of contexts. */
3337 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3338 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3340 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3345 switch (gimple_code (ctx
->stmt
))
3347 case GIMPLE_OMP_FOR
:
3348 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3349 == GF_OMP_FOR_KIND_OACC_LOOP
);
3352 case GIMPLE_OMP_TARGET
:
3353 switch (gimple_omp_target_kind (ctx
->stmt
))
3355 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3356 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3357 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3359 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3370 else if (oacc_get_fn_attrib (current_function_decl
))
3374 error_at (gimple_location (stmt
),
3375 "OpenACC loop directive must be associated with"
3376 " an OpenACC compute region");
3382 if (is_gimple_call (stmt
)
3383 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3384 == BUILT_IN_GOMP_CANCEL
3385 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3386 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3388 const char *bad
= NULL
;
3389 const char *kind
= NULL
;
3390 const char *construct
3391 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3392 == BUILT_IN_GOMP_CANCEL
)
3394 : "cancellation point";
3397 error_at (gimple_location (stmt
), "orphaned %qs construct",
3401 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3402 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3406 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3408 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3409 == BUILT_IN_GOMP_CANCEL
3410 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3411 ctx
->cancellable
= true;
3415 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3416 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3418 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3419 == BUILT_IN_GOMP_CANCEL
3420 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3422 ctx
->cancellable
= true;
3423 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3425 warning_at (gimple_location (stmt
), 0,
3426 "%<cancel for%> inside "
3427 "%<nowait%> for construct");
3428 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3429 OMP_CLAUSE_ORDERED
))
3430 warning_at (gimple_location (stmt
), 0,
3431 "%<cancel for%> inside "
3432 "%<ordered%> for construct");
3437 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3438 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3440 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3441 == BUILT_IN_GOMP_CANCEL
3442 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3444 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3446 ctx
->cancellable
= true;
3447 if (omp_find_clause (gimple_omp_sections_clauses
3450 warning_at (gimple_location (stmt
), 0,
3451 "%<cancel sections%> inside "
3452 "%<nowait%> sections construct");
3456 gcc_assert (ctx
->outer
3457 && gimple_code (ctx
->outer
->stmt
)
3458 == GIMPLE_OMP_SECTIONS
);
3459 ctx
->outer
->cancellable
= true;
3460 if (omp_find_clause (gimple_omp_sections_clauses
3463 warning_at (gimple_location (stmt
), 0,
3464 "%<cancel sections%> inside "
3465 "%<nowait%> sections construct");
3471 if (!is_task_ctx (ctx
)
3472 && (!is_taskloop_ctx (ctx
)
3473 || ctx
->outer
== NULL
3474 || !is_task_ctx (ctx
->outer
)))
3478 for (omp_context
*octx
= ctx
->outer
;
3479 octx
; octx
= octx
->outer
)
3481 switch (gimple_code (octx
->stmt
))
3483 case GIMPLE_OMP_TASKGROUP
:
3485 case GIMPLE_OMP_TARGET
:
3486 if (gimple_omp_target_kind (octx
->stmt
)
3487 != GF_OMP_TARGET_KIND_REGION
)
3490 case GIMPLE_OMP_PARALLEL
:
3491 case GIMPLE_OMP_TEAMS
:
3492 error_at (gimple_location (stmt
),
3493 "%<%s taskgroup%> construct not closely "
3494 "nested inside of %<taskgroup%> region",
3497 case GIMPLE_OMP_TASK
:
3498 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3500 && is_taskloop_ctx (octx
->outer
))
3503 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3504 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3513 ctx
->cancellable
= true;
3518 error_at (gimple_location (stmt
), "invalid arguments");
3523 error_at (gimple_location (stmt
),
3524 "%<%s %s%> construct not closely nested inside of %qs",
3525 construct
, kind
, bad
);
3530 case GIMPLE_OMP_SECTIONS
:
3531 case GIMPLE_OMP_SINGLE
:
3532 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3533 switch (gimple_code (ctx
->stmt
))
3535 case GIMPLE_OMP_FOR
:
3536 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3537 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3540 case GIMPLE_OMP_SECTIONS
:
3541 case GIMPLE_OMP_SINGLE
:
3542 case GIMPLE_OMP_ORDERED
:
3543 case GIMPLE_OMP_MASTER
:
3544 case GIMPLE_OMP_MASKED
:
3545 case GIMPLE_OMP_TASK
:
3546 case GIMPLE_OMP_CRITICAL
:
3547 if (is_gimple_call (stmt
))
3549 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3550 != BUILT_IN_GOMP_BARRIER
)
3552 error_at (gimple_location (stmt
),
3553 "barrier region may not be closely nested inside "
3554 "of work-sharing, %<loop%>, %<critical%>, "
3555 "%<ordered%>, %<master%>, %<masked%>, explicit "
3556 "%<task%> or %<taskloop%> region");
3559 error_at (gimple_location (stmt
),
3560 "work-sharing region may not be closely nested inside "
3561 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3562 "%<master%>, %<masked%>, explicit %<task%> or "
3563 "%<taskloop%> region");
3565 case GIMPLE_OMP_PARALLEL
:
3566 case GIMPLE_OMP_TEAMS
:
3568 case GIMPLE_OMP_TARGET
:
3569 if (gimple_omp_target_kind (ctx
->stmt
)
3570 == GF_OMP_TARGET_KIND_REGION
)
3577 case GIMPLE_OMP_MASTER
:
3578 case GIMPLE_OMP_MASKED
:
3579 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3580 switch (gimple_code (ctx
->stmt
))
3582 case GIMPLE_OMP_FOR
:
3583 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3584 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3587 case GIMPLE_OMP_SECTIONS
:
3588 case GIMPLE_OMP_SINGLE
:
3589 case GIMPLE_OMP_TASK
:
3590 error_at (gimple_location (stmt
),
3591 "%qs region may not be closely nested inside "
3592 "of work-sharing, %<loop%>, explicit %<task%> or "
3593 "%<taskloop%> region",
3594 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3595 ? "master" : "masked");
3597 case GIMPLE_OMP_PARALLEL
:
3598 case GIMPLE_OMP_TEAMS
:
3600 case GIMPLE_OMP_TARGET
:
3601 if (gimple_omp_target_kind (ctx
->stmt
)
3602 == GF_OMP_TARGET_KIND_REGION
)
3609 case GIMPLE_OMP_SCOPE
:
3610 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3611 switch (gimple_code (ctx
->stmt
))
3613 case GIMPLE_OMP_FOR
:
3614 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3615 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3618 case GIMPLE_OMP_SECTIONS
:
3619 case GIMPLE_OMP_SINGLE
:
3620 case GIMPLE_OMP_TASK
:
3621 case GIMPLE_OMP_CRITICAL
:
3622 case GIMPLE_OMP_ORDERED
:
3623 case GIMPLE_OMP_MASTER
:
3624 case GIMPLE_OMP_MASKED
:
3625 error_at (gimple_location (stmt
),
3626 "%<scope%> region may not be closely nested inside "
3627 "of work-sharing, %<loop%>, explicit %<task%>, "
3628 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3629 "or %<masked%> region");
3631 case GIMPLE_OMP_PARALLEL
:
3632 case GIMPLE_OMP_TEAMS
:
3634 case GIMPLE_OMP_TARGET
:
3635 if (gimple_omp_target_kind (ctx
->stmt
)
3636 == GF_OMP_TARGET_KIND_REGION
)
3643 case GIMPLE_OMP_TASK
:
3644 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3645 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3647 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3648 error_at (OMP_CLAUSE_LOCATION (c
),
3649 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3650 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross",
3651 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3655 case GIMPLE_OMP_ORDERED
:
3656 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3657 c
; c
= OMP_CLAUSE_CHAIN (c
))
3659 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
)
3661 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
3663 error_at (OMP_CLAUSE_LOCATION (c
),
3664 "invalid depend kind in omp %<ordered%> %<depend%>");
3667 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3668 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3673 /* Look for containing ordered(N) loop. */
3675 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3677 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3678 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3680 error_at (OMP_CLAUSE_LOCATION (c
),
3681 "%<ordered%> construct with %<depend%> clause "
3682 "must be closely nested inside an %<ordered%> loop");
3686 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3687 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3689 /* ordered simd must be closely nested inside of simd region,
3690 and simd region must not encounter constructs other than
3691 ordered simd, therefore ordered simd may be either orphaned,
3692 or ctx->stmt must be simd. The latter case is handled already
3696 error_at (gimple_location (stmt
),
3697 "%<ordered%> %<simd%> must be closely nested inside "
3702 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3703 switch (gimple_code (ctx
->stmt
))
3705 case GIMPLE_OMP_CRITICAL
:
3706 case GIMPLE_OMP_TASK
:
3707 case GIMPLE_OMP_ORDERED
:
3708 ordered_in_taskloop
:
3709 error_at (gimple_location (stmt
),
3710 "%<ordered%> region may not be closely nested inside "
3711 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3712 "%<taskloop%> region");
3714 case GIMPLE_OMP_FOR
:
3715 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3716 goto ordered_in_taskloop
;
3718 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3719 OMP_CLAUSE_ORDERED
);
3722 error_at (gimple_location (stmt
),
3723 "%<ordered%> region must be closely nested inside "
3724 "a loop region with an %<ordered%> clause");
3727 if (!gimple_omp_ordered_standalone_p (stmt
))
3729 if (OMP_CLAUSE_ORDERED_DOACROSS (o
))
3731 error_at (gimple_location (stmt
),
3732 "%<ordered%> construct without %<doacross%> or "
3733 "%<depend%> clauses must not have the same "
3734 "binding region as %<ordered%> construct with "
3738 else if (OMP_CLAUSE_ORDERED_EXPR (o
))
3741 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3742 OMP_CLAUSE_COLLAPSE
);
3744 o_n
= tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o
));
3745 HOST_WIDE_INT c_n
= 1;
3747 c_n
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co
));
3750 error_at (gimple_location (stmt
),
3751 "%<ordered%> construct without %<doacross%> "
3752 "or %<depend%> clauses binds to loop where "
3753 "%<collapse%> argument %wd is different from "
3754 "%<ordered%> argument %wd", c_n
, o_n
);
3760 case GIMPLE_OMP_TARGET
:
3761 if (gimple_omp_target_kind (ctx
->stmt
)
3762 != GF_OMP_TARGET_KIND_REGION
)
3765 case GIMPLE_OMP_PARALLEL
:
3766 case GIMPLE_OMP_TEAMS
:
3767 error_at (gimple_location (stmt
),
3768 "%<ordered%> region must be closely nested inside "
3769 "a loop region with an %<ordered%> clause");
3775 case GIMPLE_OMP_CRITICAL
:
3778 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3779 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3780 if (gomp_critical
*other_crit
3781 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3782 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3784 error_at (gimple_location (stmt
),
3785 "%<critical%> region may not be nested inside "
3786 "a %<critical%> region with the same name");
3791 case GIMPLE_OMP_TEAMS
:
3794 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3795 || (gimple_omp_target_kind (ctx
->stmt
)
3796 != GF_OMP_TARGET_KIND_REGION
))
3798 /* Teams construct can appear either strictly nested inside of
3799 target construct with no intervening stmts, or can be encountered
3800 only by initial task (so must not appear inside any OpenMP
3802 error_at (gimple_location (stmt
),
3803 "%<teams%> construct must be closely nested inside of "
3804 "%<target%> construct or not nested in any OpenMP "
3809 case GIMPLE_OMP_TARGET
:
3810 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3811 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3813 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3814 error_at (OMP_CLAUSE_LOCATION (c
),
3815 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3816 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3819 if (is_gimple_omp_offloaded (stmt
)
3820 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3822 error_at (gimple_location (stmt
),
3823 "OpenACC region inside of OpenACC routine, nested "
3824 "parallelism not supported yet");
3827 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3829 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3831 if (is_gimple_omp (stmt
)
3832 && is_gimple_omp_oacc (stmt
)
3833 && is_gimple_omp (ctx
->stmt
))
3835 error_at (gimple_location (stmt
),
3836 "OpenACC construct inside of non-OpenACC region");
3842 const char *stmt_name
, *ctx_stmt_name
;
3843 switch (gimple_omp_target_kind (stmt
))
3845 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3846 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3847 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3848 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3849 stmt_name
= "target enter data"; break;
3850 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3851 stmt_name
= "target exit data"; break;
3852 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3853 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3854 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3855 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3856 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3857 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3858 stmt_name
= "enter data"; break;
3859 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3860 stmt_name
= "exit data"; break;
3861 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3862 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3864 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3865 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3866 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3867 /* OpenACC 'kernels' decomposed parts. */
3868 stmt_name
= "kernels"; break;
3869 default: gcc_unreachable ();
3871 switch (gimple_omp_target_kind (ctx
->stmt
))
3873 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3874 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3875 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3876 ctx_stmt_name
= "parallel"; break;
3877 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3878 ctx_stmt_name
= "kernels"; break;
3879 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3880 ctx_stmt_name
= "serial"; break;
3881 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3882 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3883 ctx_stmt_name
= "host_data"; break;
3884 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3885 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3886 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3887 /* OpenACC 'kernels' decomposed parts. */
3888 ctx_stmt_name
= "kernels"; break;
3889 default: gcc_unreachable ();
3892 /* OpenACC/OpenMP mismatch? */
3893 if (is_gimple_omp_oacc (stmt
)
3894 != is_gimple_omp_oacc (ctx
->stmt
))
3896 error_at (gimple_location (stmt
),
3897 "%s %qs construct inside of %s %qs region",
3898 (is_gimple_omp_oacc (stmt
)
3899 ? "OpenACC" : "OpenMP"), stmt_name
,
3900 (is_gimple_omp_oacc (ctx
->stmt
)
3901 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3904 if (is_gimple_omp_offloaded (ctx
->stmt
))
3906 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3907 if (is_gimple_omp_oacc (ctx
->stmt
))
3909 error_at (gimple_location (stmt
),
3910 "%qs construct inside of %qs region",
3911 stmt_name
, ctx_stmt_name
);
3916 if ((gimple_omp_target_kind (ctx
->stmt
)
3917 == GF_OMP_TARGET_KIND_REGION
)
3918 && (gimple_omp_target_kind (stmt
)
3919 == GF_OMP_TARGET_KIND_REGION
))
3921 c
= omp_find_clause (gimple_omp_target_clauses (stmt
),
3923 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3926 warning_at (gimple_location (stmt
), 0,
3927 "%qs construct inside of %qs region",
3928 stmt_name
, ctx_stmt_name
);
3940 /* Helper function scan_omp.
3942 Callback for walk_tree or operators in walk_gimple_stmt used to
3943 scan for OMP directives in TP. */
3946 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3948 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3949 omp_context
*ctx
= (omp_context
*) wi
->info
;
3952 switch (TREE_CODE (t
))
3960 tree repl
= remap_decl (t
, &ctx
->cb
);
3961 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3967 if (ctx
&& TYPE_P (t
))
3968 *tp
= remap_type (t
, &ctx
->cb
);
3969 else if (!DECL_P (t
))
3974 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3975 if (tem
!= TREE_TYPE (t
))
3977 if (TREE_CODE (t
) == INTEGER_CST
)
3978 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3980 TREE_TYPE (t
) = tem
;
3990 /* Return true if FNDECL is a setjmp or a longjmp. */
3993 setjmp_or_longjmp_p (const_tree fndecl
)
3995 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3996 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3999 tree declname
= DECL_NAME (fndecl
);
4001 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4002 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4003 || !TREE_PUBLIC (fndecl
))
4006 const char *name
= IDENTIFIER_POINTER (declname
);
4007 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
4010 /* Return true if FNDECL is an omp_* runtime API call. */
4013 omp_runtime_api_call (const_tree fndecl
)
4015 tree declname
= DECL_NAME (fndecl
);
4017 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4018 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4019 || !TREE_PUBLIC (fndecl
))
4022 const char *name
= IDENTIFIER_POINTER (declname
);
4023 if (!startswith (name
, "omp_"))
4026 static const char *omp_runtime_apis
[] =
4028 /* This array has 3 sections. First omp_* calls that don't
4029 have any suffixes. */
4038 "target_associate_ptr",
4039 "target_disassociate_ptr",
4041 "target_is_accessible",
4042 "target_is_present",
4044 "target_memcpy_async",
4045 "target_memcpy_rect",
4046 "target_memcpy_rect_async",
4048 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4049 DECL_NAME is always omp_* without tailing underscore. */
4051 "destroy_allocator",
4053 "destroy_nest_lock",
4057 "get_affinity_format",
4059 "get_default_allocator",
4060 "get_default_device",
4063 "get_initial_device",
4065 "get_max_active_levels",
4066 "get_max_task_priority",
4075 "get_partition_num_places",
4078 "get_supported_active_levels",
4080 "get_teams_thread_limit",
4090 "is_initial_device",
4092 "pause_resource_all",
4093 "set_affinity_format",
4094 "set_default_allocator",
4102 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4103 as DECL_NAME only omp_* and omp_*_8 appear. */
4105 "get_ancestor_thread_num",
4107 "get_partition_place_nums",
4108 "get_place_num_procs",
4109 "get_place_proc_ids",
4112 "set_default_device",
4114 "set_max_active_levels",
4119 "set_teams_thread_limit"
4123 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4125 if (omp_runtime_apis
[i
] == NULL
)
4130 size_t len
= strlen (omp_runtime_apis
[i
]);
4131 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4132 && (name
[4 + len
] == '\0'
4133 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4139 /* Helper function for scan_omp.
4141 Callback for walk_gimple_stmt used to scan for OMP directives in
4142 the current statement in GSI. */
4145 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4146 struct walk_stmt_info
*wi
)
4148 gimple
*stmt
= gsi_stmt (*gsi
);
4149 omp_context
*ctx
= (omp_context
*) wi
->info
;
4151 if (gimple_has_location (stmt
))
4152 input_location
= gimple_location (stmt
);
4154 /* Check the nesting restrictions. */
4155 bool remove
= false;
4156 if (is_gimple_omp (stmt
))
4157 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4158 else if (is_gimple_call (stmt
))
4160 tree fndecl
= gimple_call_fndecl (stmt
);
4164 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4165 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4166 && setjmp_or_longjmp_p (fndecl
)
4170 error_at (gimple_location (stmt
),
4171 "setjmp/longjmp inside %<simd%> construct");
4173 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4174 switch (DECL_FUNCTION_CODE (fndecl
))
4176 case BUILT_IN_GOMP_BARRIER
:
4177 case BUILT_IN_GOMP_CANCEL
:
4178 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4179 case BUILT_IN_GOMP_TASKYIELD
:
4180 case BUILT_IN_GOMP_TASKWAIT
:
4181 case BUILT_IN_GOMP_TASKGROUP_START
:
4182 case BUILT_IN_GOMP_TASKGROUP_END
:
4183 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4190 omp_context
*octx
= ctx
;
4191 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4193 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4196 error_at (gimple_location (stmt
),
4197 "OpenMP runtime API call %qD in a region with "
4198 "%<order(concurrent)%> clause", fndecl
);
4200 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4201 && omp_runtime_api_call (fndecl
)
4202 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4203 != strlen ("omp_get_num_teams"))
4204 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4205 "omp_get_num_teams") != 0)
4206 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4207 != strlen ("omp_get_team_num"))
4208 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4209 "omp_get_team_num") != 0))
4212 error_at (gimple_location (stmt
),
4213 "OpenMP runtime API call %qD strictly nested in a "
4214 "%<teams%> region", fndecl
);
4216 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4217 && (gimple_omp_target_kind (ctx
->stmt
)
4218 == GF_OMP_TARGET_KIND_REGION
)
4219 && omp_runtime_api_call (fndecl
))
4221 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4222 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4223 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4224 error_at (gimple_location (stmt
),
4225 "OpenMP runtime API call %qD in a region with "
4226 "%<device(ancestor)%> clause", fndecl
);
4233 stmt
= gimple_build_nop ();
4234 gsi_replace (gsi
, stmt
, false);
4237 *handled_ops_p
= true;
4239 switch (gimple_code (stmt
))
4241 case GIMPLE_OMP_PARALLEL
:
4242 taskreg_nesting_level
++;
4243 scan_omp_parallel (gsi
, ctx
);
4244 taskreg_nesting_level
--;
4247 case GIMPLE_OMP_TASK
:
4248 taskreg_nesting_level
++;
4249 scan_omp_task (gsi
, ctx
);
4250 taskreg_nesting_level
--;
4253 case GIMPLE_OMP_FOR
:
4254 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4255 == GF_OMP_FOR_KIND_SIMD
)
4256 && gimple_omp_for_combined_into_p (stmt
)
4257 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4259 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4260 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4261 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4263 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4267 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4268 == GF_OMP_FOR_KIND_SIMD
)
4269 && omp_maybe_offloaded_ctx (ctx
)
4270 && omp_max_simt_vf ()
4271 && gimple_omp_for_collapse (stmt
) == 1)
4272 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4274 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4277 case GIMPLE_OMP_SCOPE
:
4278 ctx
= new_omp_context (stmt
, ctx
);
4279 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4280 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4283 case GIMPLE_OMP_SECTIONS
:
4284 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4287 case GIMPLE_OMP_SINGLE
:
4288 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4291 case GIMPLE_OMP_SCAN
:
4292 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4294 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4295 ctx
->scan_inclusive
= true;
4296 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4297 ctx
->scan_exclusive
= true;
4300 case GIMPLE_OMP_SECTION
:
4301 case GIMPLE_OMP_MASTER
:
4302 case GIMPLE_OMP_ORDERED
:
4303 case GIMPLE_OMP_CRITICAL
:
4304 ctx
= new_omp_context (stmt
, ctx
);
4305 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4308 case GIMPLE_OMP_MASKED
:
4309 ctx
= new_omp_context (stmt
, ctx
);
4310 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4311 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4314 case GIMPLE_OMP_TASKGROUP
:
4315 ctx
= new_omp_context (stmt
, ctx
);
4316 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4317 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4320 case GIMPLE_OMP_TARGET
:
4321 if (is_gimple_omp_offloaded (stmt
))
4323 taskreg_nesting_level
++;
4324 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4325 taskreg_nesting_level
--;
4328 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4331 case GIMPLE_OMP_TEAMS
:
4332 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4334 taskreg_nesting_level
++;
4335 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4336 taskreg_nesting_level
--;
4339 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4346 *handled_ops_p
= false;
4348 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4350 var
= DECL_CHAIN (var
))
4351 insert_decl_map (&ctx
->cb
, var
, var
);
4355 *handled_ops_p
= false;
4363 /* Scan all the statements starting at the current statement. CTX
4364 contains context information about the OMP directives and
4365 clauses found during the scan. */
4368 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4370 location_t saved_location
;
4371 struct walk_stmt_info wi
;
4373 memset (&wi
, 0, sizeof (wi
));
4375 wi
.want_locations
= true;
4377 saved_location
= input_location
;
4378 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4379 input_location
= saved_location
;
4382 /* Re-gimplification and code generation routines. */
4384 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4385 of BIND if in a method. */
4388 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4390 if (DECL_ARGUMENTS (current_function_decl
)
4391 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4392 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4395 tree vars
= gimple_bind_vars (bind
);
4396 for (tree
*pvar
= &vars
; *pvar
; )
4397 if (omp_member_access_dummy_var (*pvar
))
4398 *pvar
= DECL_CHAIN (*pvar
);
4400 pvar
= &DECL_CHAIN (*pvar
);
4401 gimple_bind_set_vars (bind
, vars
);
4405 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4406 block and its subblocks. */
4409 remove_member_access_dummy_vars (tree block
)
4411 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4412 if (omp_member_access_dummy_var (*pvar
))
4413 *pvar
= DECL_CHAIN (*pvar
);
4415 pvar
= &DECL_CHAIN (*pvar
);
4417 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4418 remove_member_access_dummy_vars (block
);
4421 /* If a context was created for STMT when it was scanned, return it. */
4423 static omp_context
*
4424 maybe_lookup_ctx (gimple
*stmt
)
4427 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4428 return n
? (omp_context
*) n
->value
: NULL
;
4432 /* Find the mapping for DECL in CTX or the immediately enclosing
4433 context that has a mapping for DECL.
4435 If CTX is a nested parallel directive, we may have to use the decl
4436 mappings created in CTX's parent context. Suppose that we have the
4437 following parallel nesting (variable UIDs showed for clarity):
4440 #omp parallel shared(iD.1562) -> outer parallel
4441 iD.1562 = iD.1562 + 1;
4443 #omp parallel shared (iD.1562) -> inner parallel
4444 iD.1562 = iD.1562 - 1;
4446 Each parallel structure will create a distinct .omp_data_s structure
4447 for copying iD.1562 in/out of the directive:
4449 outer parallel .omp_data_s.1.i -> iD.1562
4450 inner parallel .omp_data_s.2.i -> iD.1562
4452 A shared variable mapping will produce a copy-out operation before
4453 the parallel directive and a copy-in operation after it. So, in
4454 this case we would have:
4457 .omp_data_o.1.i = iD.1562;
4458 #omp parallel shared(iD.1562) -> outer parallel
4459 .omp_data_i.1 = &.omp_data_o.1
4460 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4462 .omp_data_o.2.i = iD.1562; -> **
4463 #omp parallel shared(iD.1562) -> inner parallel
4464 .omp_data_i.2 = &.omp_data_o.2
4465 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4468 ** This is a problem. The symbol iD.1562 cannot be referenced
4469 inside the body of the outer parallel region. But since we are
4470 emitting this copy operation while expanding the inner parallel
4471 directive, we need to access the CTX structure of the outer
4472 parallel directive to get the correct mapping:
4474 .omp_data_o.2.i = .omp_data_i.1->i
4476 Since there may be other workshare or parallel directives enclosing
4477 the parallel directive, it may be necessary to walk up the context
4478 parent chain. This is not a problem in general because nested
4479 parallelism happens only rarely. */
4482 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4487 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4488 t
= maybe_lookup_decl (decl
, up
);
4490 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4492 return t
? t
: decl
;
4496 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4497 in outer contexts. */
4500 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4505 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4506 t
= maybe_lookup_decl (decl
, up
);
4508 return t
? t
: decl
;
4512 /* Construct the initialization value for reduction operation OP. */
4515 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4524 case TRUTH_ORIF_EXPR
:
4525 case TRUTH_XOR_EXPR
:
4527 return build_zero_cst (type
);
4530 case TRUTH_AND_EXPR
:
4531 case TRUTH_ANDIF_EXPR
:
4533 return fold_convert_loc (loc
, type
, integer_one_node
);
4536 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4539 if (SCALAR_FLOAT_TYPE_P (type
))
4541 REAL_VALUE_TYPE min
;
4542 if (HONOR_INFINITIES (type
))
4543 real_arithmetic (&min
, NEGATE_EXPR
, &dconstinf
, NULL
);
4545 real_maxval (&min
, 1, TYPE_MODE (type
));
4546 return build_real (type
, min
);
4548 else if (POINTER_TYPE_P (type
))
4551 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4552 return wide_int_to_tree (type
, min
);
4556 gcc_assert (INTEGRAL_TYPE_P (type
));
4557 return TYPE_MIN_VALUE (type
);
4561 if (SCALAR_FLOAT_TYPE_P (type
))
4563 REAL_VALUE_TYPE max
;
4564 if (HONOR_INFINITIES (type
))
4567 real_maxval (&max
, 0, TYPE_MODE (type
));
4568 return build_real (type
, max
);
4570 else if (POINTER_TYPE_P (type
))
4573 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4574 return wide_int_to_tree (type
, max
);
4578 gcc_assert (INTEGRAL_TYPE_P (type
));
4579 return TYPE_MAX_VALUE (type
);
4587 /* Construct the initialization value for reduction CLAUSE. */
4590 omp_reduction_init (tree clause
, tree type
)
4592 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4593 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4596 /* Return alignment to be assumed for var in CLAUSE, which should be
4597 OMP_CLAUSE_ALIGNED. */
4600 omp_clause_aligned_alignment (tree clause
)
4602 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4603 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4605 /* Otherwise return implementation defined alignment. */
4606 unsigned int al
= 1;
4607 opt_scalar_mode mode_iter
;
4608 auto_vector_modes modes
;
4609 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4610 static enum mode_class classes
[]
4611 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4612 for (int i
= 0; i
< 4; i
+= 2)
4613 /* The for loop above dictates that we only walk through scalar classes. */
4614 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4616 scalar_mode mode
= mode_iter
.require ();
4617 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4618 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4620 machine_mode alt_vmode
;
4621 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4622 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4623 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4626 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4627 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4629 type
= build_vector_type_for_mode (type
, vmode
);
4630 if (TYPE_MODE (type
) != vmode
)
4632 if (TYPE_ALIGN_UNIT (type
) > al
)
4633 al
= TYPE_ALIGN_UNIT (type
);
4635 return build_int_cst (integer_type_node
, al
);
4639 /* This structure is part of the interface between lower_rec_simd_input_clauses
4640 and lower_rec_input_clauses. */
4642 class omplow_simd_context
{
4644 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4648 vec
<tree
, va_heap
> simt_eargs
;
4649 gimple_seq simt_dlist
;
4650 poly_uint64_pod max_vf
;
4654 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4658 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4659 omplow_simd_context
*sctx
, tree
&ivar
,
4660 tree
&lvar
, tree
*rvar
= NULL
,
4663 if (known_eq (sctx
->max_vf
, 0U))
4665 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4666 if (maybe_gt (sctx
->max_vf
, 1U))
4668 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4669 OMP_CLAUSE_SAFELEN
);
4672 poly_uint64 safe_len
;
4673 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4674 || maybe_lt (safe_len
, 1U))
4677 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4680 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4682 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4683 c
= OMP_CLAUSE_CHAIN (c
))
4685 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4688 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4690 /* UDR reductions are not supported yet for SIMT, disable
4696 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4697 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4699 /* Doing boolean operations on non-integral types is
4700 for conformance only, it's not worth supporting this
4707 if (maybe_gt (sctx
->max_vf
, 1U))
4709 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4710 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4713 if (known_eq (sctx
->max_vf
, 1U))
4718 if (is_gimple_reg (new_var
))
4720 ivar
= lvar
= new_var
;
4723 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4724 ivar
= lvar
= create_tmp_var (type
);
4725 TREE_ADDRESSABLE (ivar
) = 1;
4726 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4727 NULL
, DECL_ATTRIBUTES (ivar
));
4728 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4729 tree clobber
= build_clobber (type
);
4730 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4731 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4735 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4736 tree avar
= create_tmp_var_raw (atype
);
4737 if (TREE_ADDRESSABLE (new_var
))
4738 TREE_ADDRESSABLE (avar
) = 1;
4739 DECL_ATTRIBUTES (avar
)
4740 = tree_cons (get_identifier ("omp simd array"), NULL
,
4741 DECL_ATTRIBUTES (avar
));
4742 gimple_add_tmp_var (avar
);
4744 if (rvar
&& !ctx
->for_simd_scan_phase
)
4746 /* For inscan reductions, create another array temporary,
4747 which will hold the reduced value. */
4748 iavar
= create_tmp_var_raw (atype
);
4749 if (TREE_ADDRESSABLE (new_var
))
4750 TREE_ADDRESSABLE (iavar
) = 1;
4751 DECL_ATTRIBUTES (iavar
)
4752 = tree_cons (get_identifier ("omp simd array"), NULL
,
4753 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4754 DECL_ATTRIBUTES (iavar
)));
4755 gimple_add_tmp_var (iavar
);
4756 ctx
->cb
.decl_map
->put (avar
, iavar
);
4757 if (sctx
->lastlane
== NULL_TREE
)
4758 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4759 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4760 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4761 TREE_THIS_NOTRAP (*rvar
) = 1;
4763 if (ctx
->scan_exclusive
)
4765 /* And for exclusive scan yet another one, which will
4766 hold the value during the scan phase. */
4767 tree savar
= create_tmp_var_raw (atype
);
4768 if (TREE_ADDRESSABLE (new_var
))
4769 TREE_ADDRESSABLE (savar
) = 1;
4770 DECL_ATTRIBUTES (savar
)
4771 = tree_cons (get_identifier ("omp simd array"), NULL
,
4772 tree_cons (get_identifier ("omp simd inscan "
4774 DECL_ATTRIBUTES (savar
)));
4775 gimple_add_tmp_var (savar
);
4776 ctx
->cb
.decl_map
->put (iavar
, savar
);
4777 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4778 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4779 TREE_THIS_NOTRAP (*rvar2
) = 1;
4782 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4783 NULL_TREE
, NULL_TREE
);
4784 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4785 NULL_TREE
, NULL_TREE
);
4786 TREE_THIS_NOTRAP (ivar
) = 1;
4787 TREE_THIS_NOTRAP (lvar
) = 1;
4789 if (DECL_P (new_var
))
4791 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4792 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4797 /* Helper function of lower_rec_input_clauses. For a reference
4798 in simd reduction, add an underlying variable it will reference. */
4801 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4803 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4804 if (TREE_CONSTANT (z
))
4806 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4807 get_name (new_vard
));
4808 gimple_add_tmp_var (z
);
4809 TREE_ADDRESSABLE (z
) = 1;
4810 z
= build_fold_addr_expr_loc (loc
, z
);
4811 gimplify_assign (new_vard
, z
, ilist
);
4815 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4816 code to emit (type) (tskred_temp[idx]). */
4819 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4822 unsigned HOST_WIDE_INT sz
4823 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4824 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4825 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4827 tree v
= create_tmp_var (pointer_sized_int_node
);
4828 gimple
*g
= gimple_build_assign (v
, r
);
4829 gimple_seq_add_stmt (ilist
, g
);
4830 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4832 v
= create_tmp_var (type
);
4833 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4834 gimple_seq_add_stmt (ilist
, g
);
4839 /* Lower early initialization of privatized variable NEW_VAR
4840 if it needs an allocator (has allocate clause). */
4843 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4844 tree
&allocate_ptr
, gimple_seq
*ilist
,
4845 omp_context
*ctx
, bool is_ref
, tree size
)
4849 gcc_assert (allocate_ptr
== NULL_TREE
);
4850 if (ctx
->allocate_map
4851 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4852 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4853 allocator
= *allocatorp
;
4854 if (allocator
== NULL_TREE
)
4856 if (!is_ref
&& omp_privatize_by_reference (var
))
4858 allocator
= NULL_TREE
;
4862 unsigned HOST_WIDE_INT ialign
= 0;
4863 if (TREE_CODE (allocator
) == TREE_LIST
)
4865 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4866 allocator
= TREE_PURPOSE (allocator
);
4868 if (TREE_CODE (allocator
) != INTEGER_CST
)
4869 allocator
= build_outer_var_ref (allocator
, ctx
, OMP_CLAUSE_ALLOCATE
);
4870 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4871 if (TREE_CODE (allocator
) != INTEGER_CST
)
4873 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4874 gimplify_assign (var
, allocator
, ilist
);
4878 tree ptr_type
, align
, sz
= size
;
4879 if (TYPE_P (new_var
))
4881 ptr_type
= build_pointer_type (new_var
);
4882 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4886 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4887 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4891 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4892 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4893 if (sz
== NULL_TREE
)
4894 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4896 align
= build_int_cst (size_type_node
, ialign
);
4897 if (TREE_CODE (sz
) != INTEGER_CST
)
4899 tree szvar
= create_tmp_var (size_type_node
);
4900 gimplify_assign (szvar
, sz
, ilist
);
4903 allocate_ptr
= create_tmp_var (ptr_type
);
4904 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4905 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4906 gimple_call_set_lhs (g
, allocate_ptr
);
4907 gimple_seq_add_stmt (ilist
, g
);
4910 tree x
= build_simple_mem_ref (allocate_ptr
);
4911 TREE_THIS_NOTRAP (x
) = 1;
4912 SET_DECL_VALUE_EXPR (new_var
, x
);
4913 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4918 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4919 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4920 private variables. Initialization statements go in ILIST, while calls
4921 to destructors go in DLIST. */
4924 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4925 omp_context
*ctx
, struct omp_for_data
*fd
)
4927 tree c
, copyin_seq
, x
, ptr
;
4928 bool copyin_by_ref
= false;
4929 bool lastprivate_firstprivate
= false;
4930 bool reduction_omp_orig_ref
= false;
4932 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4933 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4934 omplow_simd_context sctx
= omplow_simd_context ();
4935 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4936 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4937 gimple_seq llist
[4] = { };
4938 tree nonconst_simd_if
= NULL_TREE
;
4941 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4943 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4944 with data sharing clauses referencing variable sized vars. That
4945 is unnecessarily hard to support and very unlikely to result in
4946 vectorized code anyway. */
4948 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4949 switch (OMP_CLAUSE_CODE (c
))
4951 case OMP_CLAUSE_LINEAR
:
4952 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4955 case OMP_CLAUSE_PRIVATE
:
4956 case OMP_CLAUSE_FIRSTPRIVATE
:
4957 case OMP_CLAUSE_LASTPRIVATE
:
4958 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4960 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4962 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4963 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4967 case OMP_CLAUSE_REDUCTION
:
4968 case OMP_CLAUSE_IN_REDUCTION
:
4969 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4970 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4972 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4974 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4975 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4980 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4982 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4983 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4985 case OMP_CLAUSE_SIMDLEN
:
4986 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4989 case OMP_CLAUSE__CONDTEMP_
:
4990 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4998 /* Add a placeholder for simduid. */
4999 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
5000 sctx
.simt_eargs
.safe_push (NULL_TREE
);
5002 unsigned task_reduction_cnt
= 0;
5003 unsigned task_reduction_cntorig
= 0;
5004 unsigned task_reduction_cnt_full
= 0;
5005 unsigned task_reduction_cntorig_full
= 0;
5006 unsigned task_reduction_other_cnt
= 0;
5007 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
5008 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
5009 /* Do all the fixed sized types in the first pass, and the variable sized
5010 types in the second pass. This makes sure that the scalar arguments to
5011 the variable sized types are processed before we use them in the
5012 variable sized operations. For task reductions we use 4 passes, in the
5013 first two we ignore them, in the third one gather arguments for
5014 GOMP_task_reduction_remap call and in the last pass actually handle
5015 the task reductions. */
5016 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
5019 if (pass
== 2 && task_reduction_cnt
)
5022 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
5023 + task_reduction_cntorig
);
5024 tskred_avar
= create_tmp_var_raw (tskred_atype
);
5025 gimple_add_tmp_var (tskred_avar
);
5026 TREE_ADDRESSABLE (tskred_avar
) = 1;
5027 task_reduction_cnt_full
= task_reduction_cnt
;
5028 task_reduction_cntorig_full
= task_reduction_cntorig
;
5030 else if (pass
== 3 && task_reduction_cnt
)
5032 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
5034 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
5035 size_int (task_reduction_cntorig
),
5036 build_fold_addr_expr (tskred_avar
));
5037 gimple_seq_add_stmt (ilist
, g
);
5039 if (pass
== 3 && task_reduction_other_cnt
)
5041 /* For reduction clauses, build
5042 tskred_base = (void *) tskred_temp[2]
5043 + omp_get_thread_num () * tskred_temp[1]
5044 or if tskred_temp[1] is known to be constant, that constant
5045 directly. This is the start of the private reduction copy block
5046 for the current thread. */
5047 tree v
= create_tmp_var (integer_type_node
);
5048 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
5049 gimple
*g
= gimple_build_call (x
, 0);
5050 gimple_call_set_lhs (g
, v
);
5051 gimple_seq_add_stmt (ilist
, g
);
5052 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
5053 tskred_temp
= OMP_CLAUSE_DECL (c
);
5054 if (is_taskreg_ctx (ctx
))
5055 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
5056 tree v2
= create_tmp_var (sizetype
);
5057 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
5058 gimple_seq_add_stmt (ilist
, g
);
5059 if (ctx
->task_reductions
[0])
5060 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
5062 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
5063 tree v3
= create_tmp_var (sizetype
);
5064 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
5065 gimple_seq_add_stmt (ilist
, g
);
5066 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
5067 tskred_base
= create_tmp_var (ptr_type_node
);
5068 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
5069 gimple_seq_add_stmt (ilist
, g
);
5071 task_reduction_cnt
= 0;
5072 task_reduction_cntorig
= 0;
5073 task_reduction_other_cnt
= 0;
5074 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5076 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
5079 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5080 bool task_reduction_p
= false;
5081 bool task_reduction_needs_orig_p
= false;
5082 tree cond
= NULL_TREE
;
5083 tree allocator
, allocate_ptr
;
5087 case OMP_CLAUSE_PRIVATE
:
5088 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
5091 case OMP_CLAUSE_SHARED
:
5092 /* Ignore shared directives in teams construct inside
5093 of target construct. */
5094 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5095 && !is_host_teams_ctx (ctx
))
5097 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
5099 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
5100 || is_global_var (OMP_CLAUSE_DECL (c
)));
5103 case OMP_CLAUSE_FIRSTPRIVATE
:
5104 case OMP_CLAUSE_COPYIN
:
5106 case OMP_CLAUSE_LINEAR
:
5107 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5108 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5109 lastprivate_firstprivate
= true;
5111 case OMP_CLAUSE_REDUCTION
:
5112 case OMP_CLAUSE_IN_REDUCTION
:
5113 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5114 || is_task_ctx (ctx
)
5115 || OMP_CLAUSE_REDUCTION_TASK (c
))
5117 task_reduction_p
= true;
5118 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5120 task_reduction_other_cnt
++;
5125 task_reduction_cnt
++;
5126 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5128 var
= OMP_CLAUSE_DECL (c
);
5129 /* If var is a global variable that isn't privatized
5130 in outer contexts, we don't need to look up the
5131 original address, it is always the address of the
5132 global variable itself. */
5134 || omp_privatize_by_reference (var
)
5136 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5138 task_reduction_needs_orig_p
= true;
5139 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5140 task_reduction_cntorig
++;
5144 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5145 reduction_omp_orig_ref
= true;
5147 case OMP_CLAUSE__REDUCTEMP_
:
5148 if (!is_taskreg_ctx (ctx
))
5151 case OMP_CLAUSE__LOOPTEMP_
:
5152 /* Handle _looptemp_/_reductemp_ clauses only on
5157 case OMP_CLAUSE_LASTPRIVATE
:
5158 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5160 lastprivate_firstprivate
= true;
5161 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5164 /* Even without corresponding firstprivate, if
5165 decl is Fortran allocatable, it needs outer var
5168 && lang_hooks
.decls
.omp_private_outer_ref
5169 (OMP_CLAUSE_DECL (c
)))
5170 lastprivate_firstprivate
= true;
5172 case OMP_CLAUSE_ALIGNED
:
5175 var
= OMP_CLAUSE_DECL (c
);
5176 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5177 && !is_global_var (var
))
5179 new_var
= maybe_lookup_decl (var
, ctx
);
5180 if (new_var
== NULL_TREE
)
5181 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5182 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5183 tree alarg
= omp_clause_aligned_alignment (c
);
5184 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5185 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5186 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5187 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5188 gimplify_and_add (x
, ilist
);
5190 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5191 && is_global_var (var
))
5193 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5194 new_var
= lookup_decl (var
, ctx
);
5195 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5196 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5197 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5198 tree alarg
= omp_clause_aligned_alignment (c
);
5199 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5200 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5201 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5202 x
= create_tmp_var (ptype
);
5203 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5204 gimplify_and_add (t
, ilist
);
5205 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5206 SET_DECL_VALUE_EXPR (new_var
, t
);
5207 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5210 case OMP_CLAUSE__CONDTEMP_
:
5211 if (is_parallel_ctx (ctx
)
5212 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5219 if (task_reduction_p
!= (pass
>= 2))
5222 allocator
= NULL_TREE
;
5223 allocate_ptr
= NULL_TREE
;
5224 new_var
= var
= OMP_CLAUSE_DECL (c
);
5225 if ((c_kind
== OMP_CLAUSE_REDUCTION
5226 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5227 && TREE_CODE (var
) == MEM_REF
)
5229 var
= TREE_OPERAND (var
, 0);
5230 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5231 var
= TREE_OPERAND (var
, 0);
5232 if (TREE_CODE (var
) == INDIRECT_REF
5233 || TREE_CODE (var
) == ADDR_EXPR
)
5234 var
= TREE_OPERAND (var
, 0);
5235 if (is_variable_sized (var
))
5237 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5238 var
= DECL_VALUE_EXPR (var
);
5239 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5240 var
= TREE_OPERAND (var
, 0);
5241 gcc_assert (DECL_P (var
));
5245 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5247 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5248 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5250 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5251 new_var
= lookup_decl (var
, ctx
);
5253 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5258 /* C/C++ array section reductions. */
5259 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5260 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5261 && var
!= OMP_CLAUSE_DECL (c
))
5266 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5267 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5269 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5271 tree b
= TREE_OPERAND (orig_var
, 1);
5272 if (is_omp_target (ctx
->stmt
))
5275 b
= maybe_lookup_decl (b
, ctx
);
5278 b
= TREE_OPERAND (orig_var
, 1);
5279 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5281 if (integer_zerop (bias
))
5285 bias
= fold_convert_loc (clause_loc
,
5286 TREE_TYPE (b
), bias
);
5287 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5288 TREE_TYPE (b
), b
, bias
);
5290 orig_var
= TREE_OPERAND (orig_var
, 0);
5294 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5295 if (is_global_var (out
)
5296 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5297 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5298 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5301 else if (is_omp_target (ctx
->stmt
))
5305 bool by_ref
= use_pointer_for_field (var
, NULL
);
5306 x
= build_receiver_ref (var
, by_ref
, ctx
);
5307 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5308 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5310 x
= build_fold_addr_expr (x
);
5312 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5313 x
= build_simple_mem_ref (x
);
5314 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5316 if (var
== TREE_OPERAND (orig_var
, 0))
5317 x
= build_fold_addr_expr (x
);
5319 bias
= fold_convert (sizetype
, bias
);
5320 x
= fold_convert (ptr_type_node
, x
);
5321 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5322 TREE_TYPE (x
), x
, bias
);
5323 unsigned cnt
= task_reduction_cnt
- 1;
5324 if (!task_reduction_needs_orig_p
)
5325 cnt
+= (task_reduction_cntorig_full
5326 - task_reduction_cntorig
);
5328 cnt
= task_reduction_cntorig
- 1;
5329 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5330 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5331 gimplify_assign (r
, x
, ilist
);
5335 if (TREE_CODE (orig_var
) == INDIRECT_REF
5336 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5337 orig_var
= TREE_OPERAND (orig_var
, 0);
5338 tree d
= OMP_CLAUSE_DECL (c
);
5339 tree type
= TREE_TYPE (d
);
5340 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5341 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5343 const char *name
= get_name (orig_var
);
5344 if (pass
!= 3 && !TREE_CONSTANT (v
))
5347 if (is_omp_target (ctx
->stmt
))
5350 t
= maybe_lookup_decl (v
, ctx
);
5354 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5355 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5356 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5358 build_int_cst (TREE_TYPE (v
), 1));
5359 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5361 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5365 tree xv
= create_tmp_var (ptr_type_node
);
5366 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5368 unsigned cnt
= task_reduction_cnt
- 1;
5369 if (!task_reduction_needs_orig_p
)
5370 cnt
+= (task_reduction_cntorig_full
5371 - task_reduction_cntorig
);
5373 cnt
= task_reduction_cntorig
- 1;
5374 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5375 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5377 gimple
*g
= gimple_build_assign (xv
, x
);
5378 gimple_seq_add_stmt (ilist
, g
);
5382 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5384 if (ctx
->task_reductions
[1 + idx
])
5385 off
= fold_convert (sizetype
,
5386 ctx
->task_reductions
[1 + idx
]);
5388 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5390 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5392 gimple_seq_add_stmt (ilist
, g
);
5394 x
= fold_convert (build_pointer_type (boolean_type_node
),
5396 if (TREE_CONSTANT (v
))
5397 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5398 TYPE_SIZE_UNIT (type
));
5402 if (is_omp_target (ctx
->stmt
))
5405 t
= maybe_lookup_decl (v
, ctx
);
5409 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5410 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5412 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5414 build_int_cst (TREE_TYPE (v
), 1));
5415 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5417 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5418 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5420 cond
= create_tmp_var (TREE_TYPE (x
));
5421 gimplify_assign (cond
, x
, ilist
);
5424 else if (lower_private_allocate (var
, type
, allocator
,
5425 allocate_ptr
, ilist
, ctx
,
5428 ? TYPE_SIZE_UNIT (type
)
5431 else if (TREE_CONSTANT (v
))
5433 x
= create_tmp_var_raw (type
, name
);
5434 gimple_add_tmp_var (x
);
5435 TREE_ADDRESSABLE (x
) = 1;
5436 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5441 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5442 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5443 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5446 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5447 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5448 tree y
= create_tmp_var (ptype
, name
);
5449 gimplify_assign (y
, x
, ilist
);
5453 if (!integer_zerop (bias
))
5455 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5457 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5459 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5460 pointer_sized_int_node
, yb
, bias
);
5461 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5462 yb
= create_tmp_var (ptype
, name
);
5463 gimplify_assign (yb
, x
, ilist
);
5467 d
= TREE_OPERAND (d
, 0);
5468 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5469 d
= TREE_OPERAND (d
, 0);
5470 if (TREE_CODE (d
) == ADDR_EXPR
)
5472 if (orig_var
!= var
)
5474 gcc_assert (is_variable_sized (orig_var
));
5475 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5477 gimplify_assign (new_var
, x
, ilist
);
5478 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5479 tree t
= build_fold_indirect_ref (new_var
);
5480 DECL_IGNORED_P (new_var
) = 0;
5481 TREE_THIS_NOTRAP (t
) = 1;
5482 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5483 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5487 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5488 build_int_cst (ptype
, 0));
5489 SET_DECL_VALUE_EXPR (new_var
, x
);
5490 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5495 gcc_assert (orig_var
== var
);
5496 if (TREE_CODE (d
) == INDIRECT_REF
)
5498 x
= create_tmp_var (ptype
, name
);
5499 TREE_ADDRESSABLE (x
) = 1;
5500 gimplify_assign (x
, yb
, ilist
);
5501 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5503 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5504 gimplify_assign (new_var
, x
, ilist
);
5506 /* GOMP_taskgroup_reduction_register memsets the whole
5507 array to zero. If the initializer is zero, we don't
5508 need to initialize it again, just mark it as ever
5509 used unconditionally, i.e. cond = true. */
5511 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5512 && initializer_zerop (omp_reduction_init (c
,
5515 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5517 gimple_seq_add_stmt (ilist
, g
);
5520 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5524 if (!is_parallel_ctx (ctx
))
5526 tree condv
= create_tmp_var (boolean_type_node
);
5527 g
= gimple_build_assign (condv
,
5528 build_simple_mem_ref (cond
));
5529 gimple_seq_add_stmt (ilist
, g
);
5530 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5531 g
= gimple_build_cond (NE_EXPR
, condv
,
5532 boolean_false_node
, end
, lab1
);
5533 gimple_seq_add_stmt (ilist
, g
);
5534 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5536 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5538 gimple_seq_add_stmt (ilist
, g
);
5541 tree y1
= create_tmp_var (ptype
);
5542 gimplify_assign (y1
, y
, ilist
);
5543 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5544 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5545 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5546 if (task_reduction_needs_orig_p
)
5548 y3
= create_tmp_var (ptype
);
5550 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5551 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5552 size_int (task_reduction_cnt_full
5553 + task_reduction_cntorig
- 1),
5554 NULL_TREE
, NULL_TREE
);
5557 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5558 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5561 gimplify_assign (y3
, ref
, ilist
);
5563 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5567 y2
= create_tmp_var (ptype
);
5568 gimplify_assign (y2
, y
, ilist
);
5570 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5572 tree ref
= build_outer_var_ref (var
, ctx
);
5573 /* For ref build_outer_var_ref already performs this. */
5574 if (TREE_CODE (d
) == INDIRECT_REF
)
5575 gcc_assert (omp_privatize_by_reference (var
));
5576 else if (TREE_CODE (d
) == ADDR_EXPR
)
5577 ref
= build_fold_addr_expr (ref
);
5578 else if (omp_privatize_by_reference (var
))
5579 ref
= build_fold_addr_expr (ref
);
5580 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5581 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5582 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5584 y3
= create_tmp_var (ptype
);
5585 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5589 y4
= create_tmp_var (ptype
);
5590 gimplify_assign (y4
, ref
, dlist
);
5594 tree i
= create_tmp_var (TREE_TYPE (v
));
5595 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5596 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5597 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5600 i2
= create_tmp_var (TREE_TYPE (v
));
5601 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5602 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5603 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5604 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5606 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5608 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5609 tree decl_placeholder
5610 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5611 SET_DECL_VALUE_EXPR (decl_placeholder
,
5612 build_simple_mem_ref (y1
));
5613 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5614 SET_DECL_VALUE_EXPR (placeholder
,
5615 y3
? build_simple_mem_ref (y3
)
5617 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5618 x
= lang_hooks
.decls
.omp_clause_default_ctor
5619 (c
, build_simple_mem_ref (y1
),
5620 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5622 gimplify_and_add (x
, ilist
);
5623 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5625 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5626 lower_omp (&tseq
, ctx
);
5627 gimple_seq_add_seq (ilist
, tseq
);
5629 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5632 SET_DECL_VALUE_EXPR (decl_placeholder
,
5633 build_simple_mem_ref (y2
));
5634 SET_DECL_VALUE_EXPR (placeholder
,
5635 build_simple_mem_ref (y4
));
5636 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5637 lower_omp (&tseq
, ctx
);
5638 gimple_seq_add_seq (dlist
, tseq
);
5639 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5641 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5642 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5645 x
= lang_hooks
.decls
.omp_clause_dtor
5646 (c
, build_simple_mem_ref (y2
));
5648 gimplify_and_add (x
, dlist
);
5653 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5654 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5656 /* reduction(-:var) sums up the partial results, so it
5657 acts identically to reduction(+:var). */
5658 if (code
== MINUS_EXPR
)
5661 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5664 x
= build2 (code
, TREE_TYPE (type
),
5665 build_simple_mem_ref (y4
),
5666 build_simple_mem_ref (y2
));
5667 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5671 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5672 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5673 gimple_seq_add_stmt (ilist
, g
);
5676 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5677 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5678 gimple_seq_add_stmt (ilist
, g
);
5680 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5681 build_int_cst (TREE_TYPE (i
), 1));
5682 gimple_seq_add_stmt (ilist
, g
);
5683 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5684 gimple_seq_add_stmt (ilist
, g
);
5685 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5688 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5689 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5690 gimple_seq_add_stmt (dlist
, g
);
5693 g
= gimple_build_assign
5694 (y4
, POINTER_PLUS_EXPR
, y4
,
5695 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5696 gimple_seq_add_stmt (dlist
, g
);
5698 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5699 build_int_cst (TREE_TYPE (i2
), 1));
5700 gimple_seq_add_stmt (dlist
, g
);
5701 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5702 gimple_seq_add_stmt (dlist
, g
);
5703 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5707 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5708 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5709 gimple_seq_add_stmt (dlist
, g
);
5715 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5716 if (is_global_var (out
))
5718 else if (is_omp_target (ctx
->stmt
))
5722 bool by_ref
= use_pointer_for_field (var
, ctx
);
5723 x
= build_receiver_ref (var
, by_ref
, ctx
);
5725 if (!omp_privatize_by_reference (var
))
5726 x
= build_fold_addr_expr (x
);
5727 x
= fold_convert (ptr_type_node
, x
);
5728 unsigned cnt
= task_reduction_cnt
- 1;
5729 if (!task_reduction_needs_orig_p
)
5730 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5732 cnt
= task_reduction_cntorig
- 1;
5733 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5734 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5735 gimplify_assign (r
, x
, ilist
);
5740 tree type
= TREE_TYPE (new_var
);
5741 if (!omp_privatize_by_reference (var
))
5742 type
= build_pointer_type (type
);
5743 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5745 unsigned cnt
= task_reduction_cnt
- 1;
5746 if (!task_reduction_needs_orig_p
)
5747 cnt
+= (task_reduction_cntorig_full
5748 - task_reduction_cntorig
);
5750 cnt
= task_reduction_cntorig
- 1;
5751 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5752 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5756 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5758 if (ctx
->task_reductions
[1 + idx
])
5759 off
= fold_convert (sizetype
,
5760 ctx
->task_reductions
[1 + idx
]);
5762 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5764 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5767 x
= fold_convert (type
, x
);
5769 if (omp_privatize_by_reference (var
))
5771 gimplify_assign (new_var
, x
, ilist
);
5773 new_var
= build_simple_mem_ref (new_var
);
5777 t
= create_tmp_var (type
);
5778 gimplify_assign (t
, x
, ilist
);
5779 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5780 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5782 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5783 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5784 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5785 cond
= create_tmp_var (TREE_TYPE (t
));
5786 gimplify_assign (cond
, t
, ilist
);
5788 else if (is_variable_sized (var
))
5790 /* For variable sized types, we need to allocate the
5791 actual storage here. Call alloca and store the
5792 result in the pointer decl that we created elsewhere. */
5796 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5800 ptr
= DECL_VALUE_EXPR (new_var
);
5801 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5802 ptr
= TREE_OPERAND (ptr
, 0);
5803 gcc_assert (DECL_P (ptr
));
5804 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5806 if (lower_private_allocate (var
, new_var
, allocator
,
5807 allocate_ptr
, ilist
, ctx
,
5812 /* void *tmp = __builtin_alloca */
5814 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5816 = gimple_build_call (atmp
, 2, x
,
5817 size_int (DECL_ALIGN (var
)));
5818 cfun
->calls_alloca
= 1;
5819 tmp
= create_tmp_var_raw (ptr_type_node
);
5820 gimple_add_tmp_var (tmp
);
5821 gimple_call_set_lhs (stmt
, tmp
);
5823 gimple_seq_add_stmt (ilist
, stmt
);
5826 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5827 gimplify_assign (ptr
, x
, ilist
);
5830 else if (omp_privatize_by_reference (var
)
5831 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5832 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5834 /* For references that are being privatized for Fortran,
5835 allocate new backing storage for the new pointer
5836 variable. This allows us to avoid changing all the
5837 code that expects a pointer to something that expects
5838 a direct variable. */
5842 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5843 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5845 x
= build_receiver_ref (var
, false, ctx
);
5846 if (ctx
->allocate_map
)
5847 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5849 allocator
= *allocatep
;
5850 if (TREE_CODE (allocator
) == TREE_LIST
)
5851 allocator
= TREE_PURPOSE (allocator
);
5852 if (TREE_CODE (allocator
) != INTEGER_CST
)
5853 allocator
= build_outer_var_ref (allocator
, ctx
);
5854 allocator
= fold_convert (pointer_sized_int_node
,
5856 allocate_ptr
= unshare_expr (x
);
5858 if (allocator
== NULL_TREE
)
5859 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5861 else if (lower_private_allocate (var
, new_var
, allocator
,
5863 ilist
, ctx
, true, x
))
5865 else if (TREE_CONSTANT (x
))
5867 /* For reduction in SIMD loop, defer adding the
5868 initialization of the reference, because if we decide
5869 to use SIMD array for it, the initilization could cause
5870 expansion ICE. Ditto for other privatization clauses. */
5875 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5877 gimple_add_tmp_var (x
);
5878 TREE_ADDRESSABLE (x
) = 1;
5879 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5885 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5886 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5887 tree al
= size_int (TYPE_ALIGN (rtype
));
5888 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5893 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5894 gimplify_assign (new_var
, x
, ilist
);
5897 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5899 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5900 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5901 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5909 switch (OMP_CLAUSE_CODE (c
))
5911 case OMP_CLAUSE_SHARED
:
5912 /* Ignore shared directives in teams construct inside
5913 target construct. */
5914 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5915 && !is_host_teams_ctx (ctx
))
5917 /* Shared global vars are just accessed directly. */
5918 if (is_global_var (new_var
))
5920 /* For taskloop firstprivate/lastprivate, represented
5921 as firstprivate and shared clause on the task, new_var
5922 is the firstprivate var. */
5923 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5925 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5926 needs to be delayed until after fixup_child_record_type so
5927 that we get the correct type during the dereference. */
5928 by_ref
= use_pointer_for_field (var
, ctx
);
5929 x
= build_receiver_ref (var
, by_ref
, ctx
);
5930 SET_DECL_VALUE_EXPR (new_var
, x
);
5931 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5933 /* ??? If VAR is not passed by reference, and the variable
5934 hasn't been initialized yet, then we'll get a warning for
5935 the store into the omp_data_s structure. Ideally, we'd be
5936 able to notice this and not store anything at all, but
5937 we're generating code too early. Suppress the warning. */
5939 suppress_warning (var
, OPT_Wuninitialized
);
5942 case OMP_CLAUSE__CONDTEMP_
:
5943 if (is_parallel_ctx (ctx
))
5945 x
= build_receiver_ref (var
, false, ctx
);
5946 SET_DECL_VALUE_EXPR (new_var
, x
);
5947 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5949 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5951 x
= build_zero_cst (TREE_TYPE (var
));
5956 case OMP_CLAUSE_LASTPRIVATE
:
5957 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5961 case OMP_CLAUSE_PRIVATE
:
5962 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5963 x
= build_outer_var_ref (var
, ctx
);
5964 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5966 if (is_task_ctx (ctx
))
5967 x
= build_receiver_ref (var
, false, ctx
);
5969 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5977 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5978 ilist
, ctx
, false, NULL_TREE
);
5979 nx
= unshare_expr (new_var
);
5981 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5982 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5985 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5987 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5990 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5991 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5992 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5993 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5994 || (gimple_omp_for_index (ctx
->stmt
, 0)
5996 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5997 || omp_privatize_by_reference (var
))
5998 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6001 if (omp_privatize_by_reference (var
))
6003 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6004 tree new_vard
= TREE_OPERAND (new_var
, 0);
6005 gcc_assert (DECL_P (new_vard
));
6006 SET_DECL_VALUE_EXPR (new_vard
,
6007 build_fold_addr_expr (lvar
));
6008 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6013 tree iv
= unshare_expr (ivar
);
6015 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
6018 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
6022 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
6024 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
6025 unshare_expr (ivar
), x
);
6029 gimplify_and_add (x
, &llist
[0]);
6030 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6031 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6036 gcc_assert (TREE_CODE (v
) == MEM_REF
);
6037 v
= TREE_OPERAND (v
, 0);
6038 gcc_assert (DECL_P (v
));
6040 v
= *ctx
->lastprivate_conditional_map
->get (v
);
6041 tree t
= create_tmp_var (TREE_TYPE (v
));
6042 tree z
= build_zero_cst (TREE_TYPE (v
));
6044 = build_outer_var_ref (var
, ctx
,
6045 OMP_CLAUSE_LASTPRIVATE
);
6046 gimple_seq_add_stmt (dlist
,
6047 gimple_build_assign (t
, z
));
6048 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
6049 tree civar
= DECL_VALUE_EXPR (v
);
6050 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
6051 civar
= unshare_expr (civar
);
6052 TREE_OPERAND (civar
, 1) = sctx
.idx
;
6053 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
6054 unshare_expr (civar
));
6055 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
6056 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
6057 orig_v
, unshare_expr (ivar
)));
6058 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
6060 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
6062 gimple_seq tseq
= NULL
;
6063 gimplify_and_add (x
, &tseq
);
6065 lower_omp (&tseq
, ctx
->outer
);
6066 gimple_seq_add_seq (&llist
[1], tseq
);
6068 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6069 && ctx
->for_simd_scan_phase
)
6071 x
= unshare_expr (ivar
);
6073 = build_outer_var_ref (var
, ctx
,
6074 OMP_CLAUSE_LASTPRIVATE
);
6075 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6077 gimplify_and_add (x
, &llist
[0]);
6081 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6083 gimplify_and_add (y
, &llist
[1]);
6087 if (omp_privatize_by_reference (var
))
6089 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6090 tree new_vard
= TREE_OPERAND (new_var
, 0);
6091 gcc_assert (DECL_P (new_vard
));
6092 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6093 x
= TYPE_SIZE_UNIT (type
);
6094 if (TREE_CONSTANT (x
))
6096 x
= create_tmp_var_raw (type
, get_name (var
));
6097 gimple_add_tmp_var (x
);
6098 TREE_ADDRESSABLE (x
) = 1;
6099 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6100 x
= fold_convert_loc (clause_loc
,
6101 TREE_TYPE (new_vard
), x
);
6102 gimplify_assign (new_vard
, x
, ilist
);
6107 gimplify_and_add (nx
, ilist
);
6108 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6110 && ctx
->for_simd_scan_phase
)
6112 tree orig_v
= build_outer_var_ref (var
, ctx
,
6113 OMP_CLAUSE_LASTPRIVATE
);
6114 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6116 gimplify_and_add (x
, ilist
);
6121 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6123 gimplify_and_add (x
, dlist
);
6126 if (!is_gimple_val (allocator
))
6128 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6129 gimplify_assign (avar
, allocator
, dlist
);
6132 if (!is_gimple_val (allocate_ptr
))
6134 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6135 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6136 allocate_ptr
= apvar
;
6138 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6140 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6141 gimple_seq_add_stmt (dlist
, g
);
6145 case OMP_CLAUSE_LINEAR
:
6146 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6147 goto do_firstprivate
;
6148 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6151 x
= build_outer_var_ref (var
, ctx
);
6154 case OMP_CLAUSE_FIRSTPRIVATE
:
6155 if (is_task_ctx (ctx
))
6157 if ((omp_privatize_by_reference (var
)
6158 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6159 || is_variable_sized (var
))
6161 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6163 || use_pointer_for_field (var
, NULL
))
6165 x
= build_receiver_ref (var
, false, ctx
);
6166 if (ctx
->allocate_map
)
6167 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6169 allocator
= *allocatep
;
6170 if (TREE_CODE (allocator
) == TREE_LIST
)
6171 allocator
= TREE_PURPOSE (allocator
);
6172 if (TREE_CODE (allocator
) != INTEGER_CST
)
6173 allocator
= build_outer_var_ref (allocator
, ctx
);
6174 allocator
= fold_convert (pointer_sized_int_node
,
6176 allocate_ptr
= unshare_expr (x
);
6177 x
= build_simple_mem_ref (x
);
6178 TREE_THIS_NOTRAP (x
) = 1;
6180 SET_DECL_VALUE_EXPR (new_var
, x
);
6181 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6185 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6186 && omp_privatize_by_reference (var
))
6188 x
= build_outer_var_ref (var
, ctx
);
6189 gcc_assert (TREE_CODE (x
) == MEM_REF
6190 && integer_zerop (TREE_OPERAND (x
, 1)));
6191 x
= TREE_OPERAND (x
, 0);
6192 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6193 (c
, unshare_expr (new_var
), x
);
6194 gimplify_and_add (x
, ilist
);
6198 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6199 ilist
, ctx
, false, NULL_TREE
);
6200 x
= build_outer_var_ref (var
, ctx
);
6203 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6204 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6206 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6208 t
= build_outer_var_ref (t
, ctx
);
6209 tree stept
= TREE_TYPE (t
);
6210 tree ct
= omp_find_clause (clauses
,
6211 OMP_CLAUSE__LOOPTEMP_
);
6213 tree l
= OMP_CLAUSE_DECL (ct
);
6214 tree n1
= fd
->loop
.n1
;
6215 tree step
= fd
->loop
.step
;
6216 tree itype
= TREE_TYPE (l
);
6217 if (POINTER_TYPE_P (itype
))
6218 itype
= signed_type_for (itype
);
6219 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6220 if (TYPE_UNSIGNED (itype
)
6221 && fd
->loop
.cond_code
== GT_EXPR
)
6222 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6223 fold_build1 (NEGATE_EXPR
, itype
, l
),
6224 fold_build1 (NEGATE_EXPR
,
6227 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6228 t
= fold_build2 (MULT_EXPR
, stept
,
6229 fold_convert (stept
, l
), t
);
6231 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6233 if (omp_privatize_by_reference (var
))
6235 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6236 tree new_vard
= TREE_OPERAND (new_var
, 0);
6237 gcc_assert (DECL_P (new_vard
));
6238 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6239 nx
= TYPE_SIZE_UNIT (type
);
6240 if (TREE_CONSTANT (nx
))
6242 nx
= create_tmp_var_raw (type
,
6244 gimple_add_tmp_var (nx
);
6245 TREE_ADDRESSABLE (nx
) = 1;
6246 nx
= build_fold_addr_expr_loc (clause_loc
,
6248 nx
= fold_convert_loc (clause_loc
,
6249 TREE_TYPE (new_vard
),
6251 gimplify_assign (new_vard
, nx
, ilist
);
6255 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6257 gimplify_and_add (x
, ilist
);
6261 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6262 x
= fold_build_pointer_plus (x
, t
);
6264 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
,
6265 fold_convert (TREE_TYPE (x
), t
));
6268 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6269 || TREE_ADDRESSABLE (new_var
)
6270 || omp_privatize_by_reference (var
))
6271 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6274 if (omp_privatize_by_reference (var
))
6276 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6277 tree new_vard
= TREE_OPERAND (new_var
, 0);
6278 gcc_assert (DECL_P (new_vard
));
6279 SET_DECL_VALUE_EXPR (new_vard
,
6280 build_fold_addr_expr (lvar
));
6281 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6283 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6285 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6286 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6287 gimplify_and_add (x
, ilist
);
6288 gimple_stmt_iterator gsi
6289 = gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6291 = gimple_build_assign (unshare_expr (lvar
), iv
);
6292 gsi_insert_before_without_update (&gsi
, g
,
6294 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6295 enum tree_code code
= PLUS_EXPR
;
6296 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6297 code
= POINTER_PLUS_EXPR
;
6298 g
= gimple_build_assign (iv
, code
, iv
, t
);
6299 gsi_insert_before_without_update (&gsi
, g
,
6303 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6304 (c
, unshare_expr (ivar
), x
);
6305 gimplify_and_add (x
, &llist
[0]);
6306 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6308 gimplify_and_add (x
, &llist
[1]);
6311 if (omp_privatize_by_reference (var
))
6313 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6314 tree new_vard
= TREE_OPERAND (new_var
, 0);
6315 gcc_assert (DECL_P (new_vard
));
6316 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6317 nx
= TYPE_SIZE_UNIT (type
);
6318 if (TREE_CONSTANT (nx
))
6320 nx
= create_tmp_var_raw (type
, get_name (var
));
6321 gimple_add_tmp_var (nx
);
6322 TREE_ADDRESSABLE (nx
) = 1;
6323 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6324 nx
= fold_convert_loc (clause_loc
,
6325 TREE_TYPE (new_vard
), nx
);
6326 gimplify_assign (new_vard
, nx
, ilist
);
6330 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6331 (c
, unshare_expr (new_var
), x
);
6332 gimplify_and_add (x
, ilist
);
6335 case OMP_CLAUSE__LOOPTEMP_
:
6336 case OMP_CLAUSE__REDUCTEMP_
:
6337 gcc_assert (is_taskreg_ctx (ctx
));
6338 x
= build_outer_var_ref (var
, ctx
);
6339 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6340 gimplify_and_add (x
, ilist
);
6343 case OMP_CLAUSE_COPYIN
:
6344 by_ref
= use_pointer_for_field (var
, NULL
);
6345 x
= build_receiver_ref (var
, by_ref
, ctx
);
6346 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6347 append_to_statement_list (x
, ©in_seq
);
6348 copyin_by_ref
|= by_ref
;
6351 case OMP_CLAUSE_REDUCTION
:
6352 case OMP_CLAUSE_IN_REDUCTION
:
6353 /* OpenACC reductions are initialized using the
6354 GOACC_REDUCTION internal function. */
6355 if (is_gimple_omp_oacc (ctx
->stmt
))
6357 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6359 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6361 tree ptype
= TREE_TYPE (placeholder
);
6364 x
= error_mark_node
;
6365 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6366 && !task_reduction_needs_orig_p
)
6368 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6370 tree pptype
= build_pointer_type (ptype
);
6371 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6372 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6373 size_int (task_reduction_cnt_full
6374 + task_reduction_cntorig
- 1),
6375 NULL_TREE
, NULL_TREE
);
6379 = *ctx
->task_reduction_map
->get (c
);
6380 x
= task_reduction_read (ilist
, tskred_temp
,
6381 pptype
, 7 + 3 * idx
);
6383 x
= fold_convert (pptype
, x
);
6384 x
= build_simple_mem_ref (x
);
6389 lower_private_allocate (var
, new_var
, allocator
,
6390 allocate_ptr
, ilist
, ctx
, false,
6392 x
= build_outer_var_ref (var
, ctx
);
6394 if (omp_privatize_by_reference (var
)
6395 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6396 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6398 SET_DECL_VALUE_EXPR (placeholder
, x
);
6399 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6400 tree new_vard
= new_var
;
6401 if (omp_privatize_by_reference (var
))
6403 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6404 new_vard
= TREE_OPERAND (new_var
, 0);
6405 gcc_assert (DECL_P (new_vard
));
6407 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6409 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6410 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6413 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6417 if (new_vard
== new_var
)
6419 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6420 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6424 SET_DECL_VALUE_EXPR (new_vard
,
6425 build_fold_addr_expr (ivar
));
6426 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6428 x
= lang_hooks
.decls
.omp_clause_default_ctor
6429 (c
, unshare_expr (ivar
),
6430 build_outer_var_ref (var
, ctx
));
6431 if (rvarp
&& ctx
->for_simd_scan_phase
)
6434 gimplify_and_add (x
, &llist
[0]);
6435 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6437 gimplify_and_add (x
, &llist
[1]);
6444 gimplify_and_add (x
, &llist
[0]);
6446 tree ivar2
= unshare_expr (lvar
);
6447 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6448 x
= lang_hooks
.decls
.omp_clause_default_ctor
6449 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6450 gimplify_and_add (x
, &llist
[0]);
6454 x
= lang_hooks
.decls
.omp_clause_default_ctor
6455 (c
, unshare_expr (rvar2
),
6456 build_outer_var_ref (var
, ctx
));
6457 gimplify_and_add (x
, &llist
[0]);
6460 /* For types that need construction, add another
6461 private var which will be default constructed
6462 and optionally initialized with
6463 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6464 loop we want to assign this value instead of
6465 constructing and destructing it in each
6467 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6468 gimple_add_tmp_var (nv
);
6469 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6473 x
= lang_hooks
.decls
.omp_clause_default_ctor
6474 (c
, nv
, build_outer_var_ref (var
, ctx
));
6475 gimplify_and_add (x
, ilist
);
6477 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6479 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6480 x
= DECL_VALUE_EXPR (new_vard
);
6482 if (new_vard
!= new_var
)
6483 vexpr
= build_fold_addr_expr (nv
);
6484 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6485 lower_omp (&tseq
, ctx
);
6486 SET_DECL_VALUE_EXPR (new_vard
, x
);
6487 gimple_seq_add_seq (ilist
, tseq
);
6488 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6491 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6493 gimplify_and_add (x
, dlist
);
6496 tree ref
= build_outer_var_ref (var
, ctx
);
6497 x
= unshare_expr (ivar
);
6498 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6500 gimplify_and_add (x
, &llist
[0]);
6502 ref
= build_outer_var_ref (var
, ctx
);
6503 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6505 gimplify_and_add (x
, &llist
[3]);
6507 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6508 if (new_vard
== new_var
)
6509 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6511 SET_DECL_VALUE_EXPR (new_vard
,
6512 build_fold_addr_expr (lvar
));
6514 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6516 gimplify_and_add (x
, &llist
[1]);
6518 tree ivar2
= unshare_expr (lvar
);
6519 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6520 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6522 gimplify_and_add (x
, &llist
[1]);
6526 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6528 gimplify_and_add (x
, &llist
[1]);
6533 gimplify_and_add (x
, &llist
[0]);
6534 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6536 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6537 lower_omp (&tseq
, ctx
);
6538 gimple_seq_add_seq (&llist
[0], tseq
);
6540 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6541 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6542 lower_omp (&tseq
, ctx
);
6543 gimple_seq_add_seq (&llist
[1], tseq
);
6544 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6545 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6546 if (new_vard
== new_var
)
6547 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6549 SET_DECL_VALUE_EXPR (new_vard
,
6550 build_fold_addr_expr (lvar
));
6551 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6553 gimplify_and_add (x
, &llist
[1]);
6556 /* If this is a reference to constant size reduction var
6557 with placeholder, we haven't emitted the initializer
6558 for it because it is undesirable if SIMD arrays are used.
6559 But if they aren't used, we need to emit the deferred
6560 initialization now. */
6561 else if (omp_privatize_by_reference (var
) && is_simd
)
6562 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6564 tree lab2
= NULL_TREE
;
6568 if (!is_parallel_ctx (ctx
))
6570 tree condv
= create_tmp_var (boolean_type_node
);
6571 tree m
= build_simple_mem_ref (cond
);
6572 g
= gimple_build_assign (condv
, m
);
6573 gimple_seq_add_stmt (ilist
, g
);
6575 = create_artificial_label (UNKNOWN_LOCATION
);
6576 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6577 g
= gimple_build_cond (NE_EXPR
, condv
,
6580 gimple_seq_add_stmt (ilist
, g
);
6581 gimple_seq_add_stmt (ilist
,
6582 gimple_build_label (lab1
));
6584 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6586 gimple_seq_add_stmt (ilist
, g
);
6588 x
= lang_hooks
.decls
.omp_clause_default_ctor
6589 (c
, unshare_expr (new_var
),
6591 : build_outer_var_ref (var
, ctx
));
6593 gimplify_and_add (x
, ilist
);
6595 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6596 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6598 if (ctx
->for_simd_scan_phase
)
6601 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6603 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6604 gimple_add_tmp_var (nv
);
6605 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6606 x
= lang_hooks
.decls
.omp_clause_default_ctor
6607 (c
, nv
, build_outer_var_ref (var
, ctx
));
6609 gimplify_and_add (x
, ilist
);
6610 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6612 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6614 if (new_vard
!= new_var
)
6615 vexpr
= build_fold_addr_expr (nv
);
6616 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6617 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6618 lower_omp (&tseq
, ctx
);
6619 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6620 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6621 gimple_seq_add_seq (ilist
, tseq
);
6623 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6624 if (is_simd
&& ctx
->scan_exclusive
)
6627 = create_tmp_var_raw (TREE_TYPE (new_var
));
6628 gimple_add_tmp_var (nv2
);
6629 ctx
->cb
.decl_map
->put (nv
, nv2
);
6630 x
= lang_hooks
.decls
.omp_clause_default_ctor
6631 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6632 gimplify_and_add (x
, ilist
);
6633 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6635 gimplify_and_add (x
, dlist
);
6637 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6639 gimplify_and_add (x
, dlist
);
6642 && ctx
->scan_exclusive
6643 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6645 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6646 gimple_add_tmp_var (nv2
);
6647 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6648 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6650 gimplify_and_add (x
, dlist
);
6652 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6656 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6658 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6659 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6660 && is_omp_target (ctx
->stmt
))
6662 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6663 tree oldv
= NULL_TREE
;
6665 if (DECL_HAS_VALUE_EXPR_P (d
))
6666 oldv
= DECL_VALUE_EXPR (d
);
6667 SET_DECL_VALUE_EXPR (d
, new_vard
);
6668 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6669 lower_omp (&tseq
, ctx
);
6671 SET_DECL_VALUE_EXPR (d
, oldv
);
6674 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6675 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6679 lower_omp (&tseq
, ctx
);
6680 gimple_seq_add_seq (ilist
, tseq
);
6682 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6685 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6686 lower_omp (&tseq
, ctx
);
6687 gimple_seq_add_seq (dlist
, tseq
);
6688 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6690 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6694 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6701 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6702 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6703 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6708 tree lab2
= NULL_TREE
;
6709 /* GOMP_taskgroup_reduction_register memsets the whole
6710 array to zero. If the initializer is zero, we don't
6711 need to initialize it again, just mark it as ever
6712 used unconditionally, i.e. cond = true. */
6713 if (initializer_zerop (x
))
6715 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6717 gimple_seq_add_stmt (ilist
, g
);
6722 if (!cond) { cond = true; new_var = x; } */
6723 if (!is_parallel_ctx (ctx
))
6725 tree condv
= create_tmp_var (boolean_type_node
);
6726 tree m
= build_simple_mem_ref (cond
);
6727 g
= gimple_build_assign (condv
, m
);
6728 gimple_seq_add_stmt (ilist
, g
);
6730 = create_artificial_label (UNKNOWN_LOCATION
);
6731 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6732 g
= gimple_build_cond (NE_EXPR
, condv
,
6735 gimple_seq_add_stmt (ilist
, g
);
6736 gimple_seq_add_stmt (ilist
,
6737 gimple_build_label (lab1
));
6739 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6741 gimple_seq_add_stmt (ilist
, g
);
6742 gimplify_assign (new_var
, x
, ilist
);
6744 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6748 /* reduction(-:var) sums up the partial results, so it
6749 acts identically to reduction(+:var). */
6750 if (code
== MINUS_EXPR
)
6754 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6755 tree new_vard
= new_var
;
6756 if (is_simd
&& omp_privatize_by_reference (var
))
6758 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6759 new_vard
= TREE_OPERAND (new_var
, 0);
6760 gcc_assert (DECL_P (new_vard
));
6762 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6764 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6765 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6768 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6772 if (new_vard
!= new_var
)
6774 SET_DECL_VALUE_EXPR (new_vard
,
6775 build_fold_addr_expr (lvar
));
6776 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6779 tree ref
= build_outer_var_ref (var
, ctx
);
6783 if (ctx
->for_simd_scan_phase
)
6785 gimplify_assign (ivar
, ref
, &llist
[0]);
6786 ref
= build_outer_var_ref (var
, ctx
);
6787 gimplify_assign (ref
, rvar
, &llist
[3]);
6791 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6796 simt_lane
= create_tmp_var (unsigned_type_node
);
6797 x
= build_call_expr_internal_loc
6798 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6799 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6800 /* Make sure x is evaluated unconditionally. */
6801 tree bfly_var
= create_tmp_var (TREE_TYPE (ivar
));
6802 gimplify_assign (bfly_var
, x
, &llist
[2]);
6803 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, bfly_var
);
6804 gimplify_assign (ivar
, x
, &llist
[2]);
6810 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6811 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6812 boolean_type_node
, ivar
,
6814 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6815 boolean_type_node
, ref
,
6818 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6820 x
= fold_convert (TREE_TYPE (ref
), x
);
6821 ref
= build_outer_var_ref (var
, ctx
);
6822 gimplify_assign (ref
, x
, &llist
[1]);
6827 lower_private_allocate (var
, new_var
, allocator
,
6828 allocate_ptr
, ilist
, ctx
,
6830 if (omp_privatize_by_reference (var
) && is_simd
)
6831 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6832 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6833 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6835 gimplify_assign (new_var
, x
, ilist
);
6838 tree ref
= build_outer_var_ref (var
, ctx
);
6839 tree new_var2
= new_var
;
6843 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6845 = fold_build2_loc (clause_loc
, NE_EXPR
,
6846 boolean_type_node
, new_var
,
6848 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6849 boolean_type_node
, ref
,
6852 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6854 x
= fold_convert (TREE_TYPE (new_var
), x
);
6855 ref
= build_outer_var_ref (var
, ctx
);
6856 gimplify_assign (ref
, x
, dlist
);
6871 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6872 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6875 if (known_eq (sctx
.max_vf
, 1U))
6877 sctx
.is_simt
= false;
6878 if (ctx
->lastprivate_conditional_map
)
6880 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6882 /* Signal to lower_omp_1 that it should use parent context. */
6883 ctx
->combined_into_simd_safelen1
= true;
6884 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6885 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6886 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6888 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6889 omp_context
*outer
= ctx
->outer
;
6890 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6891 outer
= outer
->outer
;
6892 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6893 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6894 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6900 /* When not vectorized, treat lastprivate(conditional:) like
6901 normal lastprivate, as there will be just one simd lane
6902 writing the privatized variable. */
6903 delete ctx
->lastprivate_conditional_map
;
6904 ctx
->lastprivate_conditional_map
= NULL
;
6909 if (nonconst_simd_if
)
6911 if (sctx
.lane
== NULL_TREE
)
6913 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6914 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6916 /* FIXME: For now. */
6917 sctx
.is_simt
= false;
6920 if (sctx
.lane
|| sctx
.is_simt
)
6922 uid
= create_tmp_var (ptr_type_node
, "simduid");
6923 /* Don't want uninit warnings on simduid, it is always uninitialized,
6924 but we use it not for the value, but for the DECL_UID only. */
6925 suppress_warning (uid
, OPT_Wuninitialized
);
6926 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6927 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6928 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6929 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6931 /* Emit calls denoting privatized variables and initializing a pointer to
6932 structure that holds private variables as fields after ompdevlow pass. */
6935 sctx
.simt_eargs
[0] = uid
;
6937 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6938 gimple_call_set_lhs (g
, uid
);
6939 gimple_seq_add_stmt (ilist
, g
);
6940 sctx
.simt_eargs
.release ();
6942 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6943 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6944 gimple_call_set_lhs (g
, simtrec
);
6945 gimple_seq_add_stmt (ilist
, g
);
6949 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6950 2 + (nonconst_simd_if
!= NULL
),
6951 uid
, integer_zero_node
,
6953 gimple_call_set_lhs (g
, sctx
.lane
);
6954 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6955 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6956 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6957 build_int_cst (unsigned_type_node
, 0));
6958 gimple_seq_add_stmt (ilist
, g
);
6961 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6963 gimple_call_set_lhs (g
, sctx
.lastlane
);
6964 gimple_seq_add_stmt (dlist
, g
);
6965 gimple_seq_add_seq (dlist
, llist
[3]);
6967 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6970 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6971 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6972 gimple_call_set_lhs (g
, simt_vf
);
6973 gimple_seq_add_stmt (dlist
, g
);
6975 tree t
= build_int_cst (unsigned_type_node
, 1);
6976 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6977 gimple_seq_add_stmt (dlist
, g
);
6979 t
= build_int_cst (unsigned_type_node
, 0);
6980 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6981 gimple_seq_add_stmt (dlist
, g
);
6983 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6984 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6985 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6986 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6987 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6989 gimple_seq_add_seq (dlist
, llist
[2]);
6991 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6992 gimple_seq_add_stmt (dlist
, g
);
6994 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6995 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6996 gimple_seq_add_stmt (dlist
, g
);
6998 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
7000 for (int i
= 0; i
< 2; i
++)
7003 tree vf
= create_tmp_var (unsigned_type_node
);
7004 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
7005 gimple_call_set_lhs (g
, vf
);
7006 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
7007 gimple_seq_add_stmt (seq
, g
);
7008 tree t
= build_int_cst (unsigned_type_node
, 0);
7009 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
7010 gimple_seq_add_stmt (seq
, g
);
7011 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7012 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
7013 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7014 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
7015 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
7016 gimple_seq_add_seq (seq
, llist
[i
]);
7017 t
= build_int_cst (unsigned_type_node
, 1);
7018 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
7019 gimple_seq_add_stmt (seq
, g
);
7020 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
7021 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
7022 gimple_seq_add_stmt (seq
, g
);
7023 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
7028 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
7030 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
7031 gimple_seq_add_stmt (dlist
, g
);
7034 /* The copyin sequence is not to be executed by the main thread, since
7035 that would result in self-copies. Perhaps not visible to scalars,
7036 but it certainly is to C++ operator=. */
7039 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
7041 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
7042 build_int_cst (TREE_TYPE (x
), 0));
7043 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
7044 gimplify_and_add (x
, ilist
);
7047 /* If any copyin variable is passed by reference, we must ensure the
7048 master thread doesn't modify it before it is copied over in all
7049 threads. Similarly for variables in both firstprivate and
7050 lastprivate clauses we need to ensure the lastprivate copying
7051 happens after firstprivate copying in all threads. And similarly
7052 for UDRs if initializer expression refers to omp_orig. */
7053 if (copyin_by_ref
|| lastprivate_firstprivate
7054 || (reduction_omp_orig_ref
7055 && !ctx
->scan_inclusive
7056 && !ctx
->scan_exclusive
))
7058 /* Don't add any barrier for #pragma omp simd or
7059 #pragma omp distribute. */
7060 if (!is_task_ctx (ctx
)
7061 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
7062 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
7063 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
7066 /* If max_vf is non-zero, then we can use only a vectorization factor
7067 up to the max_vf we chose. So stick it into the safelen clause. */
7068 if (maybe_ne (sctx
.max_vf
, 0U))
7070 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
7071 OMP_CLAUSE_SAFELEN
);
7072 poly_uint64 safe_len
;
7074 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
7075 && maybe_gt (safe_len
, sctx
.max_vf
)))
7077 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
7078 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
7080 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
7081 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
7086 /* Create temporary variables for lastprivate(conditional:) implementation
7087 in context CTX with CLAUSES. */
7090 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
7092 tree iter_type
= NULL_TREE
;
7093 tree cond_ptr
= NULL_TREE
;
7094 tree iter_var
= NULL_TREE
;
7095 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7096 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
7097 tree next
= *clauses
;
7098 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7099 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7100 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
7104 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7106 if (iter_type
== NULL_TREE
)
7108 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7109 iter_var
= create_tmp_var_raw (iter_type
);
7110 DECL_CONTEXT (iter_var
) = current_function_decl
;
7111 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7112 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7113 ctx
->block_vars
= iter_var
;
7115 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7116 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7117 OMP_CLAUSE_DECL (c3
) = iter_var
;
7118 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7120 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7122 next
= OMP_CLAUSE_CHAIN (cc
);
7123 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7124 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7125 ctx
->lastprivate_conditional_map
->put (o
, v
);
7128 if (iter_type
== NULL
)
7130 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7132 struct omp_for_data fd
;
7133 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7135 iter_type
= unsigned_type_for (fd
.iter_type
);
7137 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7138 iter_type
= unsigned_type_node
;
7139 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7143 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7144 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7148 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7149 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7150 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7151 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7152 ctx
->block_vars
= cond_ptr
;
7153 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7154 OMP_CLAUSE__CONDTEMP_
);
7155 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7156 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7159 iter_var
= create_tmp_var_raw (iter_type
);
7160 DECL_CONTEXT (iter_var
) = current_function_decl
;
7161 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7162 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7163 ctx
->block_vars
= iter_var
;
7165 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7166 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7167 OMP_CLAUSE_DECL (c3
) = iter_var
;
7168 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7169 OMP_CLAUSE_CHAIN (c2
) = c3
;
7170 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7172 tree v
= create_tmp_var_raw (iter_type
);
7173 DECL_CONTEXT (v
) = current_function_decl
;
7174 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7175 DECL_CHAIN (v
) = ctx
->block_vars
;
7176 ctx
->block_vars
= v
;
7177 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7178 ctx
->lastprivate_conditional_map
->put (o
, v
);
7183 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7184 both parallel and workshare constructs. PREDICATE may be NULL if it's
7185 always true. BODY_P is the sequence to insert early initialization
7186 if needed, STMT_LIST is where the non-conditional lastprivate handling
7187 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7191 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7192 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7195 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7196 bool par_clauses
= false;
7197 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7198 unsigned HOST_WIDE_INT conditional_off
= 0;
7199 gimple_seq post_stmt_list
= NULL
;
7201 /* Early exit if there are no lastprivate or linear clauses. */
7202 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7203 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7204 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7205 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7207 if (clauses
== NULL
)
7209 /* If this was a workshare clause, see if it had been combined
7210 with its parallel. In that case, look for the clauses on the
7211 parallel statement itself. */
7212 if (is_parallel_ctx (ctx
))
7216 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7219 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7220 OMP_CLAUSE_LASTPRIVATE
);
7221 if (clauses
== NULL
)
7226 bool maybe_simt
= false;
7227 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7228 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7230 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7231 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7233 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7239 tree label_true
, arm1
, arm2
;
7240 enum tree_code pred_code
= TREE_CODE (predicate
);
7242 label
= create_artificial_label (UNKNOWN_LOCATION
);
7243 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7244 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7246 arm1
= TREE_OPERAND (predicate
, 0);
7247 arm2
= TREE_OPERAND (predicate
, 1);
7248 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7249 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7254 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7255 arm2
= boolean_false_node
;
7256 pred_code
= NE_EXPR
;
7260 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7261 c
= fold_convert (integer_type_node
, c
);
7262 simtcond
= create_tmp_var (integer_type_node
);
7263 gimplify_assign (simtcond
, c
, stmt_list
);
7264 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7266 c
= create_tmp_var (integer_type_node
);
7267 gimple_call_set_lhs (g
, c
);
7268 gimple_seq_add_stmt (stmt_list
, g
);
7269 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7273 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7274 gimple_seq_add_stmt (stmt_list
, stmt
);
7275 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7278 tree cond_ptr
= NULL_TREE
;
7279 for (c
= clauses
; c
;)
7282 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7283 gimple_seq
*this_stmt_list
= stmt_list
;
7284 tree lab2
= NULL_TREE
;
7286 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7287 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7288 && ctx
->lastprivate_conditional_map
7289 && !ctx
->combined_into_simd_safelen1
)
7291 gcc_assert (body_p
);
7294 if (cond_ptr
== NULL_TREE
)
7296 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7297 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7299 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7300 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7301 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7302 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7303 this_stmt_list
= cstmt_list
;
7305 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7307 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7308 build_int_cst (TREE_TYPE (cond_ptr
),
7310 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7313 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7314 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7315 tree mem2
= copy_node (mem
);
7316 gimple_seq seq
= NULL
;
7317 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7318 gimple_seq_add_seq (this_stmt_list
, seq
);
7319 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7320 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7321 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7322 gimple_seq_add_stmt (this_stmt_list
, g
);
7323 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7324 gimplify_assign (mem2
, v
, this_stmt_list
);
7327 && ctx
->combined_into_simd_safelen1
7328 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7329 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7330 && ctx
->lastprivate_conditional_map
)
7331 this_stmt_list
= &post_stmt_list
;
7333 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7334 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7335 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7337 var
= OMP_CLAUSE_DECL (c
);
7338 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7339 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7340 && is_taskloop_ctx (ctx
))
7342 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7343 new_var
= lookup_decl (var
, ctx
->outer
);
7347 new_var
= lookup_decl (var
, ctx
);
7348 /* Avoid uninitialized warnings for lastprivate and
7349 for linear iterators. */
7351 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7352 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7353 suppress_warning (new_var
, OPT_Wuninitialized
);
7356 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7358 tree val
= DECL_VALUE_EXPR (new_var
);
7359 if (TREE_CODE (val
) == ARRAY_REF
7360 && VAR_P (TREE_OPERAND (val
, 0))
7361 && lookup_attribute ("omp simd array",
7362 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7365 if (lastlane
== NULL
)
7367 lastlane
= create_tmp_var (unsigned_type_node
);
7369 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7371 TREE_OPERAND (val
, 1));
7372 gimple_call_set_lhs (g
, lastlane
);
7373 gimple_seq_add_stmt (this_stmt_list
, g
);
7375 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7376 TREE_OPERAND (val
, 0), lastlane
,
7377 NULL_TREE
, NULL_TREE
);
7378 TREE_THIS_NOTRAP (new_var
) = 1;
7381 else if (maybe_simt
)
7383 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7384 ? DECL_VALUE_EXPR (new_var
)
7386 if (simtlast
== NULL
)
7388 simtlast
= create_tmp_var (unsigned_type_node
);
7389 gcall
*g
= gimple_build_call_internal
7390 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7391 gimple_call_set_lhs (g
, simtlast
);
7392 gimple_seq_add_stmt (this_stmt_list
, g
);
7394 x
= build_call_expr_internal_loc
7395 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7396 TREE_TYPE (val
), 2, val
, simtlast
);
7397 new_var
= unshare_expr (new_var
);
7398 gimplify_assign (new_var
, x
, this_stmt_list
);
7399 new_var
= unshare_expr (new_var
);
7402 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7403 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7405 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7406 gimple_seq_add_seq (this_stmt_list
,
7407 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7408 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7410 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7411 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7413 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7414 gimple_seq_add_seq (this_stmt_list
,
7415 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7416 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7420 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7421 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7422 && is_taskloop_ctx (ctx
))
7424 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7426 if (is_global_var (ovar
))
7430 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7431 if (omp_privatize_by_reference (var
))
7432 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7433 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7434 gimplify_and_add (x
, this_stmt_list
);
7437 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7441 c
= OMP_CLAUSE_CHAIN (c
);
7442 if (c
== NULL
&& !par_clauses
)
7444 /* If this was a workshare clause, see if it had been combined
7445 with its parallel. In that case, continue looking for the
7446 clauses also on the parallel statement itself. */
7447 if (is_parallel_ctx (ctx
))
7451 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7454 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7455 OMP_CLAUSE_LASTPRIVATE
);
7461 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7462 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7465 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7466 (which might be a placeholder). INNER is true if this is an inner
7467 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7468 join markers. Generate the before-loop forking sequence in
7469 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7470 general form of these sequences is
7472 GOACC_REDUCTION_SETUP
7474 GOACC_REDUCTION_INIT
7476 GOACC_REDUCTION_FINI
7478 GOACC_REDUCTION_TEARDOWN. */
7481 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7482 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7483 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7486 gimple_seq before_fork
= NULL
;
7487 gimple_seq after_fork
= NULL
;
7488 gimple_seq before_join
= NULL
;
7489 gimple_seq after_join
= NULL
;
7490 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7491 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7492 unsigned offset
= 0;
7494 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7495 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7497 /* No 'reduction' clauses on OpenACC 'kernels'. */
7498 gcc_checking_assert (!is_oacc_kernels (ctx
));
7499 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7500 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7502 tree orig
= OMP_CLAUSE_DECL (c
);
7503 tree var
= maybe_lookup_decl (orig
, ctx
);
7504 tree ref_to_res
= NULL_TREE
;
7505 tree incoming
, outgoing
, v1
, v2
, v3
;
7506 bool is_private
= false;
7508 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7509 if (rcode
== MINUS_EXPR
)
7511 else if (rcode
== TRUTH_ANDIF_EXPR
)
7512 rcode
= BIT_AND_EXPR
;
7513 else if (rcode
== TRUTH_ORIF_EXPR
)
7514 rcode
= BIT_IOR_EXPR
;
7515 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7520 incoming
= outgoing
= var
;
7524 /* See if an outer construct also reduces this variable. */
7525 omp_context
*outer
= ctx
;
7527 while (omp_context
*probe
= outer
->outer
)
7529 enum gimple_code type
= gimple_code (probe
->stmt
);
7534 case GIMPLE_OMP_FOR
:
7535 cls
= gimple_omp_for_clauses (probe
->stmt
);
7538 case GIMPLE_OMP_TARGET
:
7539 /* No 'reduction' clauses inside OpenACC 'kernels'
7541 gcc_checking_assert (!is_oacc_kernels (probe
));
7543 if (!is_gimple_omp_offloaded (probe
->stmt
))
7546 cls
= gimple_omp_target_clauses (probe
->stmt
);
7554 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7555 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7556 && orig
== OMP_CLAUSE_DECL (cls
))
7558 incoming
= outgoing
= lookup_decl (orig
, probe
);
7559 goto has_outer_reduction
;
7561 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7562 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7563 && orig
== OMP_CLAUSE_DECL (cls
))
7571 /* This is the outermost construct with this reduction,
7572 see if there's a mapping for it. */
7573 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7574 && maybe_lookup_field (orig
, outer
) && !is_private
)
7576 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7577 if (omp_privatize_by_reference (orig
))
7578 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7580 tree type
= TREE_TYPE (var
);
7581 if (POINTER_TYPE_P (type
))
7582 type
= TREE_TYPE (type
);
7585 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7589 /* Try to look at enclosing contexts for reduction var,
7590 use original if no mapping found. */
7592 omp_context
*c
= ctx
->outer
;
7595 t
= maybe_lookup_decl (orig
, c
);
7598 incoming
= outgoing
= (t
? t
: orig
);
7601 has_outer_reduction
:;
7605 ref_to_res
= integer_zero_node
;
7607 if (omp_privatize_by_reference (orig
))
7609 tree type
= TREE_TYPE (var
);
7610 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7614 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7615 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7618 v1
= create_tmp_var (type
, id
);
7619 v2
= create_tmp_var (type
, id
);
7620 v3
= create_tmp_var (type
, id
);
7622 gimplify_assign (v1
, var
, fork_seq
);
7623 gimplify_assign (v2
, var
, fork_seq
);
7624 gimplify_assign (v3
, var
, fork_seq
);
7626 var
= build_simple_mem_ref (var
);
7627 v1
= build_simple_mem_ref (v1
);
7628 v2
= build_simple_mem_ref (v2
);
7629 v3
= build_simple_mem_ref (v3
);
7630 outgoing
= build_simple_mem_ref (outgoing
);
7632 if (!TREE_CONSTANT (incoming
))
7633 incoming
= build_simple_mem_ref (incoming
);
7636 /* Note that 'var' might be a mem ref. */
7639 /* Determine position in reduction buffer, which may be used
7640 by target. The parser has ensured that this is not a
7641 variable-sized type. */
7642 fixed_size_mode mode
7643 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7644 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7645 offset
= (offset
+ align
- 1) & ~(align
- 1);
7646 tree off
= build_int_cst (sizetype
, offset
);
7647 offset
+= GET_MODE_SIZE (mode
);
7651 init_code
= build_int_cst (integer_type_node
,
7652 IFN_GOACC_REDUCTION_INIT
);
7653 fini_code
= build_int_cst (integer_type_node
,
7654 IFN_GOACC_REDUCTION_FINI
);
7655 setup_code
= build_int_cst (integer_type_node
,
7656 IFN_GOACC_REDUCTION_SETUP
);
7657 teardown_code
= build_int_cst (integer_type_node
,
7658 IFN_GOACC_REDUCTION_TEARDOWN
);
7662 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7663 TREE_TYPE (var
), 6, setup_code
,
7664 unshare_expr (ref_to_res
),
7665 unshare_expr (incoming
),
7668 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7669 TREE_TYPE (var
), 6, init_code
,
7670 unshare_expr (ref_to_res
),
7671 unshare_expr (v1
), level
, op
, off
);
7673 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7674 TREE_TYPE (var
), 6, fini_code
,
7675 unshare_expr (ref_to_res
),
7676 unshare_expr (v2
), level
, op
, off
);
7678 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7679 TREE_TYPE (var
), 6, teardown_code
,
7680 ref_to_res
, unshare_expr (v3
),
7683 gimplify_assign (unshare_expr (v1
), setup_call
, &before_fork
);
7684 gimplify_assign (unshare_expr (v2
), init_call
, &after_fork
);
7685 gimplify_assign (unshare_expr (v3
), fini_call
, &before_join
);
7686 gimplify_assign (unshare_expr (outgoing
), teardown_call
, &after_join
);
7689 /* Now stitch things together. */
7690 gimple_seq_add_seq (fork_seq
, before_fork
);
7692 gimple_seq_add_stmt (fork_seq
, private_marker
);
7694 gimple_seq_add_stmt (fork_seq
, fork
);
7695 gimple_seq_add_seq (fork_seq
, after_fork
);
7697 gimple_seq_add_seq (join_seq
, before_join
);
7699 gimple_seq_add_stmt (join_seq
, join
);
7700 gimple_seq_add_seq (join_seq
, after_join
);
7703 /* Generate code to implement the REDUCTION clauses, append it
7704 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7705 that should be emitted also inside of the critical section,
7706 in that case clear *CLIST afterwards, otherwise leave it as is
7707 and let the caller emit it itself. */
7710 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7711 gimple_seq
*clist
, omp_context
*ctx
)
7713 gimple_seq sub_seq
= NULL
;
7718 /* OpenACC loop reductions are handled elsewhere. */
7719 if (is_gimple_omp_oacc (ctx
->stmt
))
7722 /* SIMD reductions are handled in lower_rec_input_clauses. */
7723 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7724 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7727 /* inscan reductions are handled elsewhere. */
7728 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7731 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7732 update in that case, otherwise use a lock. */
7733 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7734 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7735 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7737 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7738 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7740 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7750 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7752 tree var
, ref
, new_var
, orig_var
;
7753 enum tree_code code
;
7754 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7756 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7757 || OMP_CLAUSE_REDUCTION_TASK (c
))
7760 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7761 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7762 if (TREE_CODE (var
) == MEM_REF
)
7764 var
= TREE_OPERAND (var
, 0);
7765 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7766 var
= TREE_OPERAND (var
, 0);
7767 if (TREE_CODE (var
) == ADDR_EXPR
)
7768 var
= TREE_OPERAND (var
, 0);
7771 /* If this is a pointer or referenced based array
7772 section, the var could be private in the outer
7773 context e.g. on orphaned loop construct. Pretend this
7774 is private variable's outer reference. */
7775 ccode
= OMP_CLAUSE_PRIVATE
;
7776 if (TREE_CODE (var
) == INDIRECT_REF
)
7777 var
= TREE_OPERAND (var
, 0);
7780 if (is_variable_sized (var
))
7782 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7783 var
= DECL_VALUE_EXPR (var
);
7784 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7785 var
= TREE_OPERAND (var
, 0);
7786 gcc_assert (DECL_P (var
));
7789 new_var
= lookup_decl (var
, ctx
);
7790 if (var
== OMP_CLAUSE_DECL (c
)
7791 && omp_privatize_by_reference (var
))
7792 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7793 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7794 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7796 /* reduction(-:var) sums up the partial results, so it acts
7797 identically to reduction(+:var). */
7798 if (code
== MINUS_EXPR
)
7801 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7804 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7806 addr
= save_expr (addr
);
7807 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7808 tree new_var2
= new_var
;
7812 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7813 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7814 boolean_type_node
, new_var
, zero
);
7815 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7818 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7821 x
= fold_convert (TREE_TYPE (new_var
), x
);
7822 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7823 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7824 gimplify_and_add (x
, stmt_seqp
);
7827 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7829 tree d
= OMP_CLAUSE_DECL (c
);
7830 tree type
= TREE_TYPE (d
);
7831 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7832 tree i
= create_tmp_var (TREE_TYPE (v
));
7833 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7834 tree bias
= TREE_OPERAND (d
, 1);
7835 d
= TREE_OPERAND (d
, 0);
7836 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7838 tree b
= TREE_OPERAND (d
, 1);
7839 b
= maybe_lookup_decl (b
, ctx
);
7842 b
= TREE_OPERAND (d
, 1);
7843 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7845 if (integer_zerop (bias
))
7849 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7850 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7851 TREE_TYPE (b
), b
, bias
);
7853 d
= TREE_OPERAND (d
, 0);
7855 /* For ref build_outer_var_ref already performs this, so
7856 only new_var needs a dereference. */
7857 if (TREE_CODE (d
) == INDIRECT_REF
)
7859 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7860 gcc_assert (omp_privatize_by_reference (var
)
7861 && var
== orig_var
);
7863 else if (TREE_CODE (d
) == ADDR_EXPR
)
7865 if (orig_var
== var
)
7867 new_var
= build_fold_addr_expr (new_var
);
7868 ref
= build_fold_addr_expr (ref
);
7873 gcc_assert (orig_var
== var
);
7874 if (omp_privatize_by_reference (var
))
7875 ref
= build_fold_addr_expr (ref
);
7879 tree t
= maybe_lookup_decl (v
, ctx
);
7883 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7884 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7886 if (!integer_zerop (bias
))
7888 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7889 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7890 TREE_TYPE (new_var
), new_var
,
7891 unshare_expr (bias
));
7892 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7893 TREE_TYPE (ref
), ref
, bias
);
7895 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7896 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7897 tree m
= create_tmp_var (ptype
);
7898 gimplify_assign (m
, new_var
, stmt_seqp
);
7900 m
= create_tmp_var (ptype
);
7901 gimplify_assign (m
, ref
, stmt_seqp
);
7903 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7904 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7905 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7906 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7907 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7908 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7909 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7911 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7912 tree decl_placeholder
7913 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7914 SET_DECL_VALUE_EXPR (placeholder
, out
);
7915 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7916 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7917 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7918 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7919 gimple_seq_add_seq (&sub_seq
,
7920 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7921 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7922 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7923 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7931 tree zero
= build_zero_cst (TREE_TYPE (out
));
7932 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7933 boolean_type_node
, out
, zero
);
7934 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7935 boolean_type_node
, priv
, zero
);
7937 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7939 x
= fold_convert (TREE_TYPE (out
), x
);
7940 out
= unshare_expr (out
);
7941 gimplify_assign (out
, x
, &sub_seq
);
7943 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7944 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7945 gimple_seq_add_stmt (&sub_seq
, g
);
7946 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7947 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7948 gimple_seq_add_stmt (&sub_seq
, g
);
7949 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7950 build_int_cst (TREE_TYPE (i
), 1));
7951 gimple_seq_add_stmt (&sub_seq
, g
);
7952 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7953 gimple_seq_add_stmt (&sub_seq
, g
);
7954 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7956 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7958 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7960 if (omp_privatize_by_reference (var
)
7961 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7963 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7964 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7965 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7966 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7967 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7968 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7969 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7973 tree new_var2
= new_var
;
7977 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7978 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7979 boolean_type_node
, new_var
, zero
);
7980 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7983 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7985 x
= fold_convert (TREE_TYPE (new_var
), x
);
7986 ref
= build_outer_var_ref (var
, ctx
);
7987 gimplify_assign (ref
, x
, &sub_seq
);
7991 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7993 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7995 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7999 gimple_seq_add_seq (stmt_seqp
, *clist
);
8003 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
8005 gimple_seq_add_stmt (stmt_seqp
, stmt
);
8009 /* Generate code to implement the COPYPRIVATE clauses. */
8012 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
8017 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8019 tree var
, new_var
, ref
, x
;
8021 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8023 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
8026 var
= OMP_CLAUSE_DECL (c
);
8027 by_ref
= use_pointer_for_field (var
, NULL
);
8029 ref
= build_sender_ref (var
, ctx
);
8030 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
8033 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
8034 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
8036 gimplify_assign (ref
, x
, slist
);
8038 ref
= build_receiver_ref (var
, false, ctx
);
8041 ref
= fold_convert_loc (clause_loc
,
8042 build_pointer_type (TREE_TYPE (new_var
)),
8044 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
8046 if (omp_privatize_by_reference (var
))
8048 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
8049 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
8050 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8052 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
8053 gimplify_and_add (x
, rlist
);
8058 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8059 and REDUCTION from the sender (aka parent) side. */
8062 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
8066 int ignored_looptemp
= 0;
8067 bool is_taskloop
= false;
8069 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8070 by GOMP_taskloop. */
8071 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
8073 ignored_looptemp
= 2;
8077 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8079 tree val
, ref
, x
, var
;
8080 bool by_ref
, do_in
= false, do_out
= false;
8081 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8083 switch (OMP_CLAUSE_CODE (c
))
8085 case OMP_CLAUSE_PRIVATE
:
8086 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8089 case OMP_CLAUSE_FIRSTPRIVATE
:
8090 case OMP_CLAUSE_COPYIN
:
8091 case OMP_CLAUSE_LASTPRIVATE
:
8092 case OMP_CLAUSE_IN_REDUCTION
:
8093 case OMP_CLAUSE__REDUCTEMP_
:
8095 case OMP_CLAUSE_REDUCTION
:
8096 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
8099 case OMP_CLAUSE_SHARED
:
8100 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8103 case OMP_CLAUSE__LOOPTEMP_
:
8104 if (ignored_looptemp
)
8114 val
= OMP_CLAUSE_DECL (c
);
8115 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8116 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8117 && TREE_CODE (val
) == MEM_REF
)
8119 val
= TREE_OPERAND (val
, 0);
8120 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8121 val
= TREE_OPERAND (val
, 0);
8122 if (TREE_CODE (val
) == INDIRECT_REF
8123 || TREE_CODE (val
) == ADDR_EXPR
)
8124 val
= TREE_OPERAND (val
, 0);
8125 if (is_variable_sized (val
))
8129 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8130 outer taskloop region. */
8131 omp_context
*ctx_for_o
= ctx
;
8133 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8134 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8135 ctx_for_o
= ctx
->outer
;
8137 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8139 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8140 && is_global_var (var
)
8141 && (val
== OMP_CLAUSE_DECL (c
)
8142 || !is_task_ctx (ctx
)
8143 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8144 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8145 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8146 != POINTER_TYPE
)))))
8149 t
= omp_member_access_dummy_var (var
);
8152 var
= DECL_VALUE_EXPR (var
);
8153 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8155 var
= unshare_and_remap (var
, t
, o
);
8157 var
= unshare_expr (var
);
8160 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8162 /* Handle taskloop firstprivate/lastprivate, where the
8163 lastprivate on GIMPLE_OMP_TASK is represented as
8164 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8165 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8166 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8167 if (use_pointer_for_field (val
, ctx
))
8168 var
= build_fold_addr_expr (var
);
8169 gimplify_assign (x
, var
, ilist
);
8170 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8174 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8175 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8176 || val
== OMP_CLAUSE_DECL (c
))
8177 && is_variable_sized (val
))
8179 by_ref
= use_pointer_for_field (val
, NULL
);
8181 switch (OMP_CLAUSE_CODE (c
))
8183 case OMP_CLAUSE_FIRSTPRIVATE
:
8184 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8186 && is_task_ctx (ctx
))
8187 suppress_warning (var
);
8191 case OMP_CLAUSE_PRIVATE
:
8192 case OMP_CLAUSE_COPYIN
:
8193 case OMP_CLAUSE__LOOPTEMP_
:
8194 case OMP_CLAUSE__REDUCTEMP_
:
8198 case OMP_CLAUSE_LASTPRIVATE
:
8199 if (by_ref
|| omp_privatize_by_reference (val
))
8201 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8208 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8213 case OMP_CLAUSE_REDUCTION
:
8214 case OMP_CLAUSE_IN_REDUCTION
:
8216 if (val
== OMP_CLAUSE_DECL (c
))
8218 if (is_task_ctx (ctx
))
8219 by_ref
= use_pointer_for_field (val
, ctx
);
8221 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8224 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8233 ref
= build_sender_ref (val
, ctx
);
8234 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8235 gimplify_assign (ref
, x
, ilist
);
8236 if (is_task_ctx (ctx
))
8237 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8242 ref
= build_sender_ref (val
, ctx
);
8243 gimplify_assign (var
, ref
, olist
);
8248 /* Generate code to implement SHARED from the sender (aka parent)
8249 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8250 list things that got automatically shared. */
8253 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8255 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8257 if (ctx
->record_type
== NULL
)
8260 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8261 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8263 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8264 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8267 nvar
= maybe_lookup_decl (ovar
, ctx
);
8269 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8270 || (ctx
->allocate_map
8271 && ctx
->allocate_map
->get (ovar
)))
8274 /* If CTX is a nested parallel directive. Find the immediately
8275 enclosing parallel or workshare construct that contains a
8276 mapping for OVAR. */
8277 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8279 t
= omp_member_access_dummy_var (var
);
8282 var
= DECL_VALUE_EXPR (var
);
8283 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8285 var
= unshare_and_remap (var
, t
, o
);
8287 var
= unshare_expr (var
);
8290 if (use_pointer_for_field (ovar
, ctx
))
8292 x
= build_sender_ref (ovar
, ctx
);
8293 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8294 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8296 gcc_assert (is_parallel_ctx (ctx
)
8297 && DECL_ARTIFICIAL (ovar
));
8298 /* _condtemp_ clause. */
8299 var
= build_constructor (TREE_TYPE (x
), NULL
);
8302 var
= build_fold_addr_expr (var
);
8303 gimplify_assign (x
, var
, ilist
);
8307 x
= build_sender_ref (ovar
, ctx
);
8308 gimplify_assign (x
, var
, ilist
);
8310 if (!TREE_READONLY (var
)
8311 /* We don't need to receive a new reference to a result
8312 or parm decl. In fact we may not store to it as we will
8313 invalidate any pending RSO and generate wrong gimple
8315 && !((TREE_CODE (var
) == RESULT_DECL
8316 || TREE_CODE (var
) == PARM_DECL
)
8317 && DECL_BY_REFERENCE (var
)))
8319 x
= build_sender_ref (ovar
, ctx
);
8320 gimplify_assign (var
, x
, olist
);
8326 /* Emit an OpenACC head marker call, encapulating the partitioning and
8327 other information that must be processed by the target compiler.
8328 Return the maximum number of dimensions the associated loop might
8329 be partitioned over. */
8332 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8333 gimple_seq
*seq
, omp_context
*ctx
)
8335 unsigned levels
= 0;
8337 tree gang_static
= NULL_TREE
;
8338 auto_vec
<tree
, 5> args
;
8340 args
.quick_push (build_int_cst
8341 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8342 args
.quick_push (ddvar
);
8343 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8345 switch (OMP_CLAUSE_CODE (c
))
8347 case OMP_CLAUSE_GANG
:
8348 tag
|= OLF_DIM_GANG
;
8349 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8350 /* static:* is represented by -1, and we can ignore it, as
8351 scheduling is always static. */
8352 if (gang_static
&& integer_minus_onep (gang_static
))
8353 gang_static
= NULL_TREE
;
8357 case OMP_CLAUSE_WORKER
:
8358 tag
|= OLF_DIM_WORKER
;
8362 case OMP_CLAUSE_VECTOR
:
8363 tag
|= OLF_DIM_VECTOR
;
8367 case OMP_CLAUSE_SEQ
:
8371 case OMP_CLAUSE_AUTO
:
8375 case OMP_CLAUSE_INDEPENDENT
:
8376 tag
|= OLF_INDEPENDENT
;
8379 case OMP_CLAUSE_TILE
:
8383 case OMP_CLAUSE_REDUCTION
:
8384 tag
|= OLF_REDUCTION
;
8394 if (DECL_P (gang_static
))
8395 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8396 tag
|= OLF_GANG_STATIC
;
8399 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8400 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8402 else if (is_oacc_kernels (tgt
))
8403 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8405 else if (is_oacc_kernels_decomposed_part (tgt
))
8410 /* In a parallel region, loops are implicitly INDEPENDENT. */
8411 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8412 tag
|= OLF_INDEPENDENT
;
8414 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8415 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8416 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8418 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8419 gcc_assert (!(tag
& OLF_AUTO
));
8423 /* Tiling could use all 3 levels. */
8427 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8428 Ensure at least one level, or 2 for possible auto
8430 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8431 << OLF_DIM_BASE
) | OLF_SEQ
));
8433 if (levels
< 1u + maybe_auto
)
8434 levels
= 1u + maybe_auto
;
8437 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8438 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8440 args
.quick_push (gang_static
);
8442 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8443 gimple_set_location (call
, loc
);
8444 gimple_set_lhs (call
, ddvar
);
8445 gimple_seq_add_stmt (seq
, call
);
8450 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8451 partitioning level of the enclosed region. */
8454 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8455 tree tofollow
, gimple_seq
*seq
)
8457 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8458 : IFN_UNIQUE_OACC_TAIL_MARK
);
8459 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8460 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8461 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8462 marker
, ddvar
, tofollow
);
8463 gimple_set_location (call
, loc
);
8464 gimple_set_lhs (call
, ddvar
);
8465 gimple_seq_add_stmt (seq
, call
);
8468 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8469 the loop clauses, from which we extract reductions. Initialize
8473 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8474 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8477 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8478 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8480 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8484 gimple_set_location (private_marker
, loc
);
8485 gimple_call_set_lhs (private_marker
, ddvar
);
8486 gimple_call_set_arg (private_marker
, 1, ddvar
);
8489 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8490 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8493 for (unsigned done
= 1; count
; count
--, done
++)
8495 gimple_seq fork_seq
= NULL
;
8496 gimple_seq join_seq
= NULL
;
8498 tree place
= build_int_cst (integer_type_node
, -1);
8499 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8500 fork_kind
, ddvar
, place
);
8501 gimple_set_location (fork
, loc
);
8502 gimple_set_lhs (fork
, ddvar
);
8504 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8505 join_kind
, ddvar
, place
);
8506 gimple_set_location (join
, loc
);
8507 gimple_set_lhs (join
, ddvar
);
8509 /* Mark the beginning of this level sequence. */
8511 lower_oacc_loop_marker (loc
, ddvar
, true,
8512 build_int_cst (integer_type_node
, count
),
8514 lower_oacc_loop_marker (loc
, ddvar
, false,
8515 build_int_cst (integer_type_node
, done
),
8518 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8519 fork
, (count
== 1) ? private_marker
: NULL
,
8520 join
, &fork_seq
, &join_seq
, ctx
);
8522 /* Append this level to head. */
8523 gimple_seq_add_seq (head
, fork_seq
);
8524 /* Prepend it to tail. */
8525 gimple_seq_add_seq (&join_seq
, *tail
);
8531 /* Mark the end of the sequence. */
8532 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8533 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8536 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8537 catch handler and return it. This prevents programs from violating the
8538 structured block semantics with throws. */
8541 maybe_catch_exception (gimple_seq body
)
8546 if (!flag_exceptions
)
8549 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8550 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8552 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8554 g
= gimple_build_eh_must_not_throw (decl
);
8555 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8558 return gimple_seq_alloc_with_stmt (g
);
8562 /* Routines to lower OMP directives into OMP-GIMPLE. */
8564 /* If ctx is a worksharing context inside of a cancellable parallel
8565 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8566 and conditional branch to parallel's cancel_label to handle
8567 cancellation in the implicit barrier. */
8570 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8573 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8574 if (gimple_omp_return_nowait_p (omp_return
))
8576 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8577 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8578 && outer
->cancellable
)
8580 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8581 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8582 tree lhs
= create_tmp_var (c_bool_type
);
8583 gimple_omp_return_set_lhs (omp_return
, lhs
);
8584 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8585 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8586 fold_convert (c_bool_type
,
8587 boolean_false_node
),
8588 outer
->cancel_label
, fallthru_label
);
8589 gimple_seq_add_stmt (body
, g
);
8590 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8592 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8593 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8597 /* Find the first task_reduction or reduction clause or return NULL
8598 if there are none. */
8601 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8602 enum omp_clause_code ccode
)
8606 clauses
= omp_find_clause (clauses
, ccode
);
8607 if (clauses
== NULL_TREE
)
8609 if (ccode
!= OMP_CLAUSE_REDUCTION
8610 || code
== OMP_TASKLOOP
8611 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8613 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8617 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8618 gimple_seq
*, gimple_seq
*);
8620 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8621 CTX is the enclosing OMP context for the current statement. */
8624 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8626 tree block
, control
;
8627 gimple_stmt_iterator tgsi
;
8628 gomp_sections
*stmt
;
8630 gbind
*new_stmt
, *bind
;
8631 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8633 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8635 push_gimplify_context ();
8641 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8642 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8643 tree rtmp
= NULL_TREE
;
8646 tree type
= build_pointer_type (pointer_sized_int_node
);
8647 tree temp
= create_tmp_var (type
);
8648 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8649 OMP_CLAUSE_DECL (c
) = temp
;
8650 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8651 gimple_omp_sections_set_clauses (stmt
, c
);
8652 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8653 gimple_omp_sections_clauses (stmt
),
8654 &ilist
, &tred_dlist
);
8656 rtmp
= make_ssa_name (type
);
8657 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8660 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8661 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8663 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8664 &ilist
, &dlist
, ctx
, NULL
);
8666 control
= create_tmp_var (unsigned_type_node
, ".section");
8667 gimple_omp_sections_set_control (stmt
, control
);
8669 new_body
= gimple_omp_body (stmt
);
8670 gimple_omp_set_body (stmt
, NULL
);
8671 tgsi
= gsi_start (new_body
);
8672 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8677 sec_start
= gsi_stmt (tgsi
);
8678 sctx
= maybe_lookup_ctx (sec_start
);
8681 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8682 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8683 GSI_CONTINUE_LINKING
);
8684 gimple_omp_set_body (sec_start
, NULL
);
8686 if (gsi_one_before_end_p (tgsi
))
8688 gimple_seq l
= NULL
;
8689 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8690 &ilist
, &l
, &clist
, ctx
);
8691 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8692 gimple_omp_section_set_last (sec_start
);
8695 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8696 GSI_CONTINUE_LINKING
);
8699 block
= make_node (BLOCK
);
8700 bind
= gimple_build_bind (NULL
, new_body
, block
);
8703 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8707 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8708 gcall
*g
= gimple_build_call (fndecl
, 0);
8709 gimple_seq_add_stmt (&olist
, g
);
8710 gimple_seq_add_seq (&olist
, clist
);
8711 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8712 g
= gimple_build_call (fndecl
, 0);
8713 gimple_seq_add_stmt (&olist
, g
);
8716 block
= make_node (BLOCK
);
8717 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8718 gsi_replace (gsi_p
, new_stmt
, true);
8720 pop_gimplify_context (new_stmt
);
8721 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8722 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8723 if (BLOCK_VARS (block
))
8724 TREE_USED (block
) = 1;
8727 gimple_seq_add_seq (&new_body
, ilist
);
8728 gimple_seq_add_stmt (&new_body
, stmt
);
8729 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8730 gimple_seq_add_stmt (&new_body
, bind
);
8732 t
= gimple_build_omp_continue (control
, control
);
8733 gimple_seq_add_stmt (&new_body
, t
);
8735 gimple_seq_add_seq (&new_body
, olist
);
8736 if (ctx
->cancellable
)
8737 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8738 gimple_seq_add_seq (&new_body
, dlist
);
8740 new_body
= maybe_catch_exception (new_body
);
8742 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8743 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8744 t
= gimple_build_omp_return (nowait
);
8745 gimple_seq_add_stmt (&new_body
, t
);
8746 gimple_seq_add_seq (&new_body
, tred_dlist
);
8747 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8750 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8752 gimple_bind_set_body (new_stmt
, new_body
);
8756 /* A subroutine of lower_omp_single. Expand the simple form of
8757 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8759 if (GOMP_single_start ())
8761 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8763 FIXME. It may be better to delay expanding the logic of this until
8764 pass_expand_omp. The expanded logic may make the job more difficult
8765 to a synchronization analysis pass. */
8768 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8770 location_t loc
= gimple_location (single_stmt
);
8771 tree tlabel
= create_artificial_label (loc
);
8772 tree flabel
= create_artificial_label (loc
);
8773 gimple
*call
, *cond
;
8776 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8777 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8778 call
= gimple_build_call (decl
, 0);
8779 gimple_call_set_lhs (call
, lhs
);
8780 gimple_seq_add_stmt (pre_p
, call
);
8782 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8783 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8786 gimple_seq_add_stmt (pre_p
, cond
);
8787 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8788 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8789 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8793 /* A subroutine of lower_omp_single. Expand the simple form of
8794 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8796 #pragma omp single copyprivate (a, b, c)
8798 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8801 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8807 GOMP_single_copy_end (©out);
8818 FIXME. It may be better to delay expanding the logic of this until
8819 pass_expand_omp. The expanded logic may make the job more difficult
8820 to a synchronization analysis pass. */
8823 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8826 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8827 gimple_seq copyin_seq
;
8828 location_t loc
= gimple_location (single_stmt
);
8830 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8832 ptr_type
= build_pointer_type (ctx
->record_type
);
8833 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8835 l0
= create_artificial_label (loc
);
8836 l1
= create_artificial_label (loc
);
8837 l2
= create_artificial_label (loc
);
8839 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8840 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8841 t
= fold_convert_loc (loc
, ptr_type
, t
);
8842 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8844 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8845 build_int_cst (ptr_type
, 0));
8846 t
= build3 (COND_EXPR
, void_type_node
, t
,
8847 build_and_jump (&l0
), build_and_jump (&l1
));
8848 gimplify_and_add (t
, pre_p
);
8850 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8852 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8855 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8858 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8859 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8860 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8861 gimplify_and_add (t
, pre_p
);
8863 t
= build_and_jump (&l2
);
8864 gimplify_and_add (t
, pre_p
);
8866 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8868 gimple_seq_add_seq (pre_p
, copyin_seq
);
8870 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8874 /* Expand code for an OpenMP single directive. */
8877 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8880 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8882 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8884 push_gimplify_context ();
8886 block
= make_node (BLOCK
);
8887 bind
= gimple_build_bind (NULL
, NULL
, block
);
8888 gsi_replace (gsi_p
, bind
, true);
8891 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8892 &bind_body
, &dlist
, ctx
, NULL
);
8893 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8895 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8897 if (ctx
->record_type
)
8898 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8900 lower_omp_single_simple (single_stmt
, &bind_body
);
8902 gimple_omp_set_body (single_stmt
, NULL
);
8904 gimple_seq_add_seq (&bind_body
, dlist
);
8906 bind_body
= maybe_catch_exception (bind_body
);
8908 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8909 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8910 gimple
*g
= gimple_build_omp_return (nowait
);
8911 gimple_seq_add_stmt (&bind_body_tail
, g
);
8912 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8913 if (ctx
->record_type
)
8915 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8916 tree clobber
= build_clobber (ctx
->record_type
);
8917 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8918 clobber
), GSI_SAME_STMT
);
8920 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8921 gimple_bind_set_body (bind
, bind_body
);
8923 pop_gimplify_context (bind
);
8925 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8926 BLOCK_VARS (block
) = ctx
->block_vars
;
8927 if (BLOCK_VARS (block
))
8928 TREE_USED (block
) = 1;
8932 /* Lower code for an OMP scope directive. */
8935 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8938 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8940 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8941 gimple_seq tred_dlist
= NULL
;
8943 push_gimplify_context ();
8945 block
= make_node (BLOCK
);
8946 bind
= gimple_build_bind (NULL
, NULL
, block
);
8947 gsi_replace (gsi_p
, bind
, true);
8952 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8953 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8956 tree type
= build_pointer_type (pointer_sized_int_node
);
8957 tree temp
= create_tmp_var (type
);
8958 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8959 OMP_CLAUSE_DECL (c
) = temp
;
8960 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8961 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8962 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8963 gimple_omp_scope_clauses (scope_stmt
),
8964 &bind_body
, &tred_dlist
);
8966 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8967 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8968 gimple_seq_add_stmt (&bind_body
, stmt
);
8971 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8972 &bind_body
, &dlist
, ctx
, NULL
);
8973 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8975 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8977 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8979 gimple_omp_set_body (scope_stmt
, NULL
);
8981 gimple_seq clist
= NULL
;
8982 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8983 &bind_body
, &clist
, ctx
);
8986 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8987 gcall
*g
= gimple_build_call (fndecl
, 0);
8988 gimple_seq_add_stmt (&bind_body
, g
);
8989 gimple_seq_add_seq (&bind_body
, clist
);
8990 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8991 g
= gimple_build_call (fndecl
, 0);
8992 gimple_seq_add_stmt (&bind_body
, g
);
8995 gimple_seq_add_seq (&bind_body
, dlist
);
8997 bind_body
= maybe_catch_exception (bind_body
);
8999 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
9000 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
9001 gimple
*g
= gimple_build_omp_return (nowait
);
9002 gimple_seq_add_stmt (&bind_body_tail
, g
);
9003 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
9004 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
9005 if (ctx
->record_type
)
9007 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
9008 tree clobber
= build_clobber (ctx
->record_type
);
9009 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
9010 clobber
), GSI_SAME_STMT
);
9012 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
9014 gimple_bind_set_body (bind
, bind_body
);
9016 pop_gimplify_context (bind
);
9018 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9019 BLOCK_VARS (block
) = ctx
->block_vars
;
9020 if (BLOCK_VARS (block
))
9021 TREE_USED (block
) = 1;
9023 /* Expand code for an OpenMP master or masked directive. */
9026 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9028 tree block
, lab
= NULL
, x
, bfn_decl
;
9029 gimple
*stmt
= gsi_stmt (*gsi_p
);
9031 location_t loc
= gimple_location (stmt
);
9033 tree filter
= integer_zero_node
;
9035 push_gimplify_context ();
9037 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
9039 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
9042 filter
= fold_convert (integer_type_node
,
9043 OMP_CLAUSE_FILTER_EXPR (filter
));
9045 filter
= integer_zero_node
;
9047 block
= make_node (BLOCK
);
9048 bind
= gimple_build_bind (NULL
, NULL
, block
);
9049 gsi_replace (gsi_p
, bind
, true);
9050 gimple_bind_add_stmt (bind
, stmt
);
9052 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9053 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
9054 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
9055 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
9057 gimplify_and_add (x
, &tseq
);
9058 gimple_bind_add_seq (bind
, tseq
);
9060 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9061 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9062 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9063 gimple_omp_set_body (stmt
, NULL
);
9065 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
9067 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9069 pop_gimplify_context (bind
);
9071 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9072 BLOCK_VARS (block
) = ctx
->block_vars
;
9075 /* Helper function for lower_omp_task_reductions. For a specific PASS
9076 find out the current clause it should be processed, or return false
9077 if all have been processed already. */
9080 omp_task_reduction_iterate (int pass
, enum tree_code code
,
9081 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
9082 tree
*type
, tree
*next
)
9084 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
9086 if (ccode
== OMP_CLAUSE_REDUCTION
9087 && code
!= OMP_TASKLOOP
9088 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
9090 *decl
= OMP_CLAUSE_DECL (*c
);
9091 *type
= TREE_TYPE (*decl
);
9092 if (TREE_CODE (*decl
) == MEM_REF
)
9099 if (omp_privatize_by_reference (*decl
))
9100 *type
= TREE_TYPE (*type
);
9101 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
9104 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9113 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9114 OMP_TASKGROUP only with task modifier). Register mapping of those in
9115 START sequence and reducing them and unregister them in the END sequence. */
9118 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9119 gimple_seq
*start
, gimple_seq
*end
)
9121 enum omp_clause_code ccode
9122 = (code
== OMP_TASKGROUP
9123 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9124 tree cancellable
= NULL_TREE
;
9125 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9126 if (clauses
== NULL_TREE
)
9128 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9130 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9131 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9132 && outer
->cancellable
)
9134 cancellable
= error_mark_node
;
9137 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9138 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9141 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9142 tree
*last
= &TYPE_FIELDS (record_type
);
9146 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9148 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9151 DECL_CHAIN (field
) = ifield
;
9152 last
= &DECL_CHAIN (ifield
);
9153 DECL_CONTEXT (field
) = record_type
;
9154 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9155 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9156 DECL_CONTEXT (ifield
) = record_type
;
9157 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9158 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9160 for (int pass
= 0; pass
< 2; pass
++)
9162 tree decl
, type
, next
;
9163 for (tree c
= clauses
;
9164 omp_task_reduction_iterate (pass
, code
, ccode
,
9165 &c
, &decl
, &type
, &next
); c
= next
)
9168 tree new_type
= type
;
9170 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9172 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9173 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9175 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9177 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9178 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9179 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9182 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9183 DECL_CONTEXT (field
) = record_type
;
9184 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9185 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9187 last
= &DECL_CHAIN (field
);
9189 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9191 DECL_CONTEXT (bfield
) = record_type
;
9192 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9193 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9195 last
= &DECL_CHAIN (bfield
);
9199 layout_type (record_type
);
9201 /* Build up an array which registers with the runtime all the reductions
9202 and deregisters them at the end. Format documented in libgomp/task.c. */
9203 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9204 tree avar
= create_tmp_var_raw (atype
);
9205 gimple_add_tmp_var (avar
);
9206 TREE_ADDRESSABLE (avar
) = 1;
9207 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9208 NULL_TREE
, NULL_TREE
);
9209 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9210 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9211 gimple_seq seq
= NULL
;
9212 tree sz
= fold_convert (pointer_sized_int_node
,
9213 TYPE_SIZE_UNIT (record_type
));
9215 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9216 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9217 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9218 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9219 ctx
->task_reductions
.create (1 + cnt
);
9220 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9221 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9223 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9224 gimple_seq_add_seq (start
, seq
);
9225 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9226 NULL_TREE
, NULL_TREE
);
9227 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9228 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9229 NULL_TREE
, NULL_TREE
);
9230 t
= build_int_cst (pointer_sized_int_node
,
9231 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9232 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9233 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9234 NULL_TREE
, NULL_TREE
);
9235 t
= build_int_cst (pointer_sized_int_node
, -1);
9236 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9237 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9238 NULL_TREE
, NULL_TREE
);
9239 t
= build_int_cst (pointer_sized_int_node
, 0);
9240 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9242 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9243 and for each task reduction checks a bool right after the private variable
9244 within that thread's chunk; if the bool is clear, it hasn't been
9245 initialized and thus isn't going to be reduced nor destructed, otherwise
9246 reduce and destruct it. */
9247 tree idx
= create_tmp_var (size_type_node
);
9248 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9249 tree num_thr_sz
= create_tmp_var (size_type_node
);
9250 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9251 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9252 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9254 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9256 /* For worksharing constructs or scope, only perform it in the master
9257 thread, with the exception of cancelled implicit barriers - then only
9258 handle the current thread. */
9259 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9260 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9261 tree thr_num
= create_tmp_var (integer_type_node
);
9262 g
= gimple_build_call (t
, 0);
9263 gimple_call_set_lhs (g
, thr_num
);
9264 gimple_seq_add_stmt (end
, g
);
9268 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9269 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9270 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9271 if (code
== OMP_FOR
)
9272 c
= gimple_omp_for_clauses (ctx
->stmt
);
9273 else if (code
== OMP_SECTIONS
)
9274 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9275 else /* if (code == OMP_SCOPE) */
9276 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9277 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9279 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9281 gimple_seq_add_stmt (end
, g
);
9282 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9283 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9284 gimple_seq_add_stmt (end
, g
);
9285 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9286 build_one_cst (TREE_TYPE (idx
)));
9287 gimple_seq_add_stmt (end
, g
);
9288 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9289 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9291 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9292 gimple_seq_add_stmt (end
, g
);
9293 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9295 if (code
!= OMP_PARALLEL
)
9297 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9298 tree num_thr
= create_tmp_var (integer_type_node
);
9299 g
= gimple_build_call (t
, 0);
9300 gimple_call_set_lhs (g
, num_thr
);
9301 gimple_seq_add_stmt (end
, g
);
9302 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9303 gimple_seq_add_stmt (end
, g
);
9305 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9309 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9310 OMP_CLAUSE__REDUCTEMP_
);
9311 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9312 t
= fold_convert (size_type_node
, t
);
9313 gimplify_assign (num_thr_sz
, t
, end
);
9315 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9316 NULL_TREE
, NULL_TREE
);
9317 tree data
= create_tmp_var (pointer_sized_int_node
);
9318 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9319 if (code
== OMP_TASKLOOP
)
9321 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9322 g
= gimple_build_cond (NE_EXPR
, data
,
9323 build_zero_cst (pointer_sized_int_node
),
9325 gimple_seq_add_stmt (end
, g
);
9327 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9329 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9330 ptr
= create_tmp_var (build_pointer_type (record_type
));
9332 ptr
= create_tmp_var (ptr_type_node
);
9333 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9335 tree field
= TYPE_FIELDS (record_type
);
9338 field
= DECL_CHAIN (DECL_CHAIN (field
));
9339 for (int pass
= 0; pass
< 2; pass
++)
9341 tree decl
, type
, next
;
9342 for (tree c
= clauses
;
9343 omp_task_reduction_iterate (pass
, code
, ccode
,
9344 &c
, &decl
, &type
, &next
); c
= next
)
9346 tree var
= decl
, ref
;
9347 if (TREE_CODE (decl
) == MEM_REF
)
9349 var
= TREE_OPERAND (var
, 0);
9350 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9351 var
= TREE_OPERAND (var
, 0);
9353 if (TREE_CODE (var
) == ADDR_EXPR
)
9354 var
= TREE_OPERAND (var
, 0);
9355 else if (TREE_CODE (var
) == INDIRECT_REF
)
9356 var
= TREE_OPERAND (var
, 0);
9357 tree orig_var
= var
;
9358 if (is_variable_sized (var
))
9360 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9361 var
= DECL_VALUE_EXPR (var
);
9362 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9363 var
= TREE_OPERAND (var
, 0);
9364 gcc_assert (DECL_P (var
));
9366 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9367 if (orig_var
!= var
)
9368 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9369 else if (TREE_CODE (v
) == ADDR_EXPR
)
9370 t
= build_fold_addr_expr (t
);
9371 else if (TREE_CODE (v
) == INDIRECT_REF
)
9372 t
= build_fold_indirect_ref (t
);
9373 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9375 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9376 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9377 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9379 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9380 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9381 fold_convert (size_type_node
,
9382 TREE_OPERAND (decl
, 1)));
9386 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9387 if (!omp_privatize_by_reference (decl
))
9388 t
= build_fold_addr_expr (t
);
9390 t
= fold_convert (pointer_sized_int_node
, t
);
9392 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9393 gimple_seq_add_seq (start
, seq
);
9394 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9395 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9396 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9397 t
= unshare_expr (byte_position (field
));
9398 t
= fold_convert (pointer_sized_int_node
, t
);
9399 ctx
->task_reduction_map
->put (c
, cnt
);
9400 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9403 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9404 gimple_seq_add_seq (start
, seq
);
9405 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9406 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9407 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9409 tree bfield
= DECL_CHAIN (field
);
9411 if (code
== OMP_PARALLEL
9413 || code
== OMP_SECTIONS
9414 || code
== OMP_SCOPE
)
9415 /* In parallel, worksharing or scope all threads unconditionally
9416 initialize all their task reduction private variables. */
9417 cond
= boolean_true_node
;
9418 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9420 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9421 unshare_expr (byte_position (bfield
)));
9423 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9424 gimple_seq_add_seq (end
, seq
);
9425 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9426 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9427 build_int_cst (pbool
, 0));
9430 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9431 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9432 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9433 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9434 tree condv
= create_tmp_var (boolean_type_node
);
9435 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9436 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9438 gimple_seq_add_stmt (end
, g
);
9439 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9440 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9442 /* If this reduction doesn't need destruction and parallel
9443 has been cancelled, there is nothing to do for this
9444 reduction, so jump around the merge operation. */
9445 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9446 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9447 build_zero_cst (TREE_TYPE (cancellable
)),
9449 gimple_seq_add_stmt (end
, g
);
9450 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9454 if (TREE_TYPE (ptr
) == ptr_type_node
)
9456 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9457 unshare_expr (byte_position (field
)));
9459 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9460 gimple_seq_add_seq (end
, seq
);
9461 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9462 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9463 build_int_cst (pbool
, 0));
9466 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9467 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9469 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9470 if (TREE_CODE (decl
) != MEM_REF
9471 && omp_privatize_by_reference (decl
))
9472 ref
= build_simple_mem_ref (ref
);
9473 /* reduction(-:var) sums up the partial results, so it acts
9474 identically to reduction(+:var). */
9475 if (rcode
== MINUS_EXPR
)
9477 if (TREE_CODE (decl
) == MEM_REF
)
9479 tree type
= TREE_TYPE (new_var
);
9480 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9481 tree i
= create_tmp_var (TREE_TYPE (v
));
9482 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9485 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9486 tree vv
= create_tmp_var (TREE_TYPE (v
));
9487 gimplify_assign (vv
, v
, start
);
9490 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9491 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9492 new_var
= build_fold_addr_expr (new_var
);
9493 new_var
= fold_convert (ptype
, new_var
);
9494 ref
= fold_convert (ptype
, ref
);
9495 tree m
= create_tmp_var (ptype
);
9496 gimplify_assign (m
, new_var
, end
);
9498 m
= create_tmp_var (ptype
);
9499 gimplify_assign (m
, ref
, end
);
9501 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9502 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9503 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9504 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9505 tree priv
= build_simple_mem_ref (new_var
);
9506 tree out
= build_simple_mem_ref (ref
);
9507 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9509 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9510 tree decl_placeholder
9511 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9512 tree lab6
= NULL_TREE
;
9515 /* If this reduction needs destruction and parallel
9516 has been cancelled, jump around the merge operation
9517 to the destruction. */
9518 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9519 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9520 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9521 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9523 gimple_seq_add_stmt (end
, g
);
9524 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9526 SET_DECL_VALUE_EXPR (placeholder
, out
);
9527 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9528 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9529 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9530 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9531 gimple_seq_add_seq (end
,
9532 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9533 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9534 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9536 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9537 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9540 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9541 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9544 gimple_seq tseq
= NULL
;
9545 gimplify_stmt (&x
, &tseq
);
9546 gimple_seq_add_seq (end
, tseq
);
9551 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9552 out
= unshare_expr (out
);
9553 gimplify_assign (out
, x
, end
);
9556 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9557 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9558 gimple_seq_add_stmt (end
, g
);
9559 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9560 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9561 gimple_seq_add_stmt (end
, g
);
9562 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9563 build_int_cst (TREE_TYPE (i
), 1));
9564 gimple_seq_add_stmt (end
, g
);
9565 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9566 gimple_seq_add_stmt (end
, g
);
9567 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9569 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9571 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9572 tree oldv
= NULL_TREE
;
9573 tree lab6
= NULL_TREE
;
9576 /* If this reduction needs destruction and parallel
9577 has been cancelled, jump around the merge operation
9578 to the destruction. */
9579 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9580 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9581 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9582 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9584 gimple_seq_add_stmt (end
, g
);
9585 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9587 if (omp_privatize_by_reference (decl
)
9588 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9590 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9591 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9592 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9593 gimplify_assign (refv
, ref
, end
);
9594 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9595 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9596 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9597 tree d
= maybe_lookup_decl (decl
, ctx
);
9599 if (DECL_HAS_VALUE_EXPR_P (d
))
9600 oldv
= DECL_VALUE_EXPR (d
);
9601 if (omp_privatize_by_reference (var
))
9603 tree v
= fold_convert (TREE_TYPE (d
),
9604 build_fold_addr_expr (new_var
));
9605 SET_DECL_VALUE_EXPR (d
, v
);
9608 SET_DECL_VALUE_EXPR (d
, new_var
);
9609 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9610 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9612 SET_DECL_VALUE_EXPR (d
, oldv
);
9615 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9616 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9618 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9619 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9620 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9621 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9623 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9624 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9627 gimple_seq tseq
= NULL
;
9628 gimplify_stmt (&x
, &tseq
);
9629 gimple_seq_add_seq (end
, tseq
);
9634 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9635 ref
= unshare_expr (ref
);
9636 gimplify_assign (ref
, x
, end
);
9638 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9640 field
= DECL_CHAIN (bfield
);
9644 if (code
== OMP_TASKGROUP
)
9646 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9647 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9648 gimple_seq_add_stmt (start
, g
);
9653 if (code
== OMP_FOR
)
9654 c
= gimple_omp_for_clauses (ctx
->stmt
);
9655 else if (code
== OMP_SECTIONS
)
9656 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9657 else if (code
== OMP_SCOPE
)
9658 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9660 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9661 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9662 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9663 build_fold_addr_expr (avar
));
9664 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9667 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9668 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9670 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9671 gimple_seq_add_stmt (end
, g
);
9672 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9673 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9675 enum built_in_function bfn
9676 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9677 t
= builtin_decl_explicit (bfn
);
9678 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9682 arg
= create_tmp_var (c_bool_type
);
9683 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9687 arg
= build_int_cst (c_bool_type
, 0);
9688 g
= gimple_build_call (t
, 1, arg
);
9692 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9693 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9695 gimple_seq_add_stmt (end
, g
);
9697 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9698 t
= build_constructor (atype
, NULL
);
9699 TREE_THIS_VOLATILE (t
) = 1;
9700 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9703 /* Expand code for an OpenMP taskgroup directive. */
9706 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9708 gimple
*stmt
= gsi_stmt (*gsi_p
);
9711 gimple_seq dseq
= NULL
;
9712 tree block
= make_node (BLOCK
);
9714 bind
= gimple_build_bind (NULL
, NULL
, block
);
9715 gsi_replace (gsi_p
, bind
, true);
9716 gimple_bind_add_stmt (bind
, stmt
);
9718 push_gimplify_context ();
9720 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9722 gimple_bind_add_stmt (bind
, x
);
9724 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9725 gimple_omp_taskgroup_clauses (stmt
),
9726 gimple_bind_body_ptr (bind
), &dseq
);
9728 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9729 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9730 gimple_omp_set_body (stmt
, NULL
);
9732 gimple_bind_add_seq (bind
, dseq
);
9734 pop_gimplify_context (bind
);
9736 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9737 BLOCK_VARS (block
) = ctx
->block_vars
;
9741 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9744 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9747 struct omp_for_data fd
;
9748 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9751 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9752 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9753 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9757 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9758 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9759 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
9760 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
9762 /* Merge depend clauses from multiple adjacent
9763 #pragma omp ordered depend(sink:...) constructs
9764 into one #pragma omp ordered depend(sink:...), so that
9765 we can optimize them together. */
9766 gimple_stmt_iterator gsi
= *gsi_p
;
9768 while (!gsi_end_p (gsi
))
9770 gimple
*stmt
= gsi_stmt (gsi
);
9771 if (is_gimple_debug (stmt
)
9772 || gimple_code (stmt
) == GIMPLE_NOP
)
9777 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9779 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9780 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9782 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
9783 || OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9786 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9788 gsi_remove (&gsi
, true);
9792 /* Canonicalize sink dependence clauses into one folded clause if
9795 The basic algorithm is to create a sink vector whose first
9796 element is the GCD of all the first elements, and whose remaining
9797 elements are the minimum of the subsequent columns.
9799 We ignore dependence vectors whose first element is zero because
9800 such dependencies are known to be executed by the same thread.
9802 We take into account the direction of the loop, so a minimum
9803 becomes a maximum if the loop is iterating forwards. We also
9804 ignore sink clauses where the loop direction is unknown, or where
9805 the offsets are clearly invalid because they are not a multiple
9806 of the loop increment.
9810 #pragma omp for ordered(2)
9811 for (i=0; i < N; ++i)
9812 for (j=0; j < M; ++j)
9814 #pragma omp ordered \
9815 depend(sink:i-8,j-2) \
9816 depend(sink:i,j-1) \ // Completely ignored because i+0.
9817 depend(sink:i-4,j-3) \
9818 depend(sink:i-6,j-4)
9819 #pragma omp ordered depend(source)
9824 depend(sink:-gcd(8,4,6),-min(2,3,4))
9829 /* FIXME: Computing GCD's where the first element is zero is
9830 non-trivial in the presence of collapsed loops. Do this later. */
9831 if (fd
.collapse
> 1)
9834 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9836 /* wide_int is not a POD so it must be default-constructed. */
9837 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9838 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9840 tree folded_dep
= NULL_TREE
;
9841 /* TRUE if the first dimension's offset is negative. */
9842 bool neg_offset_p
= false;
9844 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9846 while ((c
= *list_p
) != NULL
)
9848 bool remove
= false;
9850 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
);
9851 if (OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9852 goto next_ordered_clause
;
9855 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9856 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9857 vec
= TREE_CHAIN (vec
), ++i
)
9859 gcc_assert (i
< len
);
9861 /* omp_extract_for_data has canonicalized the condition. */
9862 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9863 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9864 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9865 bool maybe_lexically_later
= true;
9867 /* While the committee makes up its mind, bail if we have any
9868 non-constant steps. */
9869 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9870 goto lower_omp_ordered_ret
;
9872 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9873 if (POINTER_TYPE_P (itype
))
9875 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9876 TYPE_PRECISION (itype
),
9879 /* Ignore invalid offsets that are not multiples of the step. */
9880 if (!wi::multiple_of_p (wi::abs (offset
),
9881 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9884 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9885 "ignoring sink clause with offset that is not "
9886 "a multiple of the loop step");
9888 goto next_ordered_clause
;
9891 /* Calculate the first dimension. The first dimension of
9892 the folded dependency vector is the GCD of the first
9893 elements, while ignoring any first elements whose offset
9897 /* Ignore dependence vectors whose first dimension is 0. */
9901 goto next_ordered_clause
;
9905 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9907 error_at (OMP_CLAUSE_LOCATION (c
),
9908 "first offset must be in opposite direction "
9909 "of loop iterations");
9910 goto lower_omp_ordered_ret
;
9914 neg_offset_p
= forward
;
9915 /* Initialize the first time around. */
9916 if (folded_dep
== NULL_TREE
)
9919 folded_deps
[0] = offset
;
9922 folded_deps
[0] = wi::gcd (folded_deps
[0],
9926 /* Calculate minimum for the remaining dimensions. */
9929 folded_deps
[len
+ i
- 1] = offset
;
9930 if (folded_dep
== c
)
9931 folded_deps
[i
] = offset
;
9932 else if (maybe_lexically_later
9933 && !wi::eq_p (folded_deps
[i
], offset
))
9935 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9939 for (j
= 1; j
<= i
; j
++)
9940 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9943 maybe_lexically_later
= false;
9947 gcc_assert (i
== len
);
9951 next_ordered_clause
:
9953 *list_p
= OMP_CLAUSE_CHAIN (c
);
9955 list_p
= &OMP_CLAUSE_CHAIN (c
);
9961 folded_deps
[0] = -folded_deps
[0];
9963 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9964 if (POINTER_TYPE_P (itype
))
9967 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9968 = wide_int_to_tree (itype
, folded_deps
[0]);
9969 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9970 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9973 lower_omp_ordered_ret
:
9975 /* Ordered without clauses is #pragma omp threads, while we want
9976 a nop instead if we remove all clauses. */
9977 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9978 gsi_replace (gsi_p
, gimple_build_nop (), true);
9982 /* Expand code for an OpenMP ordered directive. */
9985 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9988 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9989 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9992 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9994 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9997 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9998 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9999 OMP_CLAUSE_THREADS
);
10001 if (gimple_omp_ordered_standalone_p (ord_stmt
))
10003 /* FIXME: This is needs to be moved to the expansion to verify various
10004 conditions only testable on cfg with dominators computed, and also
10005 all the depend clauses to be merged still might need to be available
10006 for the runtime checks. */
10008 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
10012 push_gimplify_context ();
10014 block
= make_node (BLOCK
);
10015 bind
= gimple_build_bind (NULL
, NULL
, block
);
10016 gsi_replace (gsi_p
, bind
, true);
10017 gimple_bind_add_stmt (bind
, stmt
);
10021 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
10022 build_int_cst (NULL_TREE
, threads
));
10023 cfun
->has_simduid_loops
= true;
10026 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
10028 gimple_bind_add_stmt (bind
, x
);
10030 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
10033 counter
= create_tmp_var (integer_type_node
);
10034 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
10035 gimple_call_set_lhs (g
, counter
);
10036 gimple_bind_add_stmt (bind
, g
);
10038 body
= create_artificial_label (UNKNOWN_LOCATION
);
10039 test
= create_artificial_label (UNKNOWN_LOCATION
);
10040 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
10042 tree simt_pred
= create_tmp_var (integer_type_node
);
10043 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
10044 gimple_call_set_lhs (g
, simt_pred
);
10045 gimple_bind_add_stmt (bind
, g
);
10047 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
10048 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
10049 gimple_bind_add_stmt (bind
, g
);
10051 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
10053 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10054 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10055 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10056 gimple_omp_set_body (stmt
, NULL
);
10060 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
10061 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
10062 gimple_bind_add_stmt (bind
, g
);
10064 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
10065 tree nonneg
= create_tmp_var (integer_type_node
);
10066 gimple_seq tseq
= NULL
;
10067 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
10068 gimple_bind_add_seq (bind
, tseq
);
10070 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
10071 gimple_call_set_lhs (g
, nonneg
);
10072 gimple_bind_add_stmt (bind
, g
);
10074 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
10075 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
10076 gimple_bind_add_stmt (bind
, g
);
10078 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
10081 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
10082 build_int_cst (NULL_TREE
, threads
));
10084 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
10086 gimple_bind_add_stmt (bind
, x
);
10088 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10090 pop_gimplify_context (bind
);
10092 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10093 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10097 /* Expand code for an OpenMP scan directive and the structured block
10098 before the scan directive. */
10101 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10103 gimple
*stmt
= gsi_stmt (*gsi_p
);
10105 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
10106 tree lane
= NULL_TREE
;
10107 gimple_seq before
= NULL
;
10108 omp_context
*octx
= ctx
->outer
;
10110 if (octx
->scan_exclusive
&& !has_clauses
)
10112 gimple_stmt_iterator gsi2
= *gsi_p
;
10114 gimple
*stmt2
= gsi_stmt (gsi2
);
10115 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10116 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10117 the one with exclusive clause(s), comes first. */
10119 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10120 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10122 gsi_remove (gsi_p
, false);
10123 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10124 ctx
= maybe_lookup_ctx (stmt2
);
10126 lower_omp_scan (gsi_p
, ctx
);
10131 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10132 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10133 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10134 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10135 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10136 && !gimple_omp_for_combined_p (octx
->stmt
));
10137 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10138 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10141 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10142 OMP_CLAUSE__SIMDUID_
))
10144 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10145 lane
= create_tmp_var (unsigned_type_node
);
10146 tree t
= build_int_cst (integer_type_node
,
10148 : octx
->scan_inclusive
? 2 : 3);
10150 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10151 gimple_call_set_lhs (g
, lane
);
10152 gimple_seq_add_stmt (&before
, g
);
10155 if (is_simd
|| is_for
)
10157 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10158 c
; c
= OMP_CLAUSE_CHAIN (c
))
10159 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10160 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10162 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10163 tree var
= OMP_CLAUSE_DECL (c
);
10164 tree new_var
= lookup_decl (var
, octx
);
10165 tree val
= new_var
;
10166 tree var2
= NULL_TREE
;
10167 tree var3
= NULL_TREE
;
10168 tree var4
= NULL_TREE
;
10169 tree lane0
= NULL_TREE
;
10170 tree new_vard
= new_var
;
10171 if (omp_privatize_by_reference (var
))
10173 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10176 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10178 val
= DECL_VALUE_EXPR (new_vard
);
10179 if (new_vard
!= new_var
)
10181 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10182 val
= TREE_OPERAND (val
, 0);
10184 if (TREE_CODE (val
) == ARRAY_REF
10185 && VAR_P (TREE_OPERAND (val
, 0)))
10187 tree v
= TREE_OPERAND (val
, 0);
10188 if (lookup_attribute ("omp simd array",
10189 DECL_ATTRIBUTES (v
)))
10191 val
= unshare_expr (val
);
10192 lane0
= TREE_OPERAND (val
, 1);
10193 TREE_OPERAND (val
, 1) = lane
;
10194 var2
= lookup_decl (v
, octx
);
10195 if (octx
->scan_exclusive
)
10196 var4
= lookup_decl (var2
, octx
);
10198 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10199 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10202 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10203 var2
, lane
, NULL_TREE
, NULL_TREE
);
10204 TREE_THIS_NOTRAP (var2
) = 1;
10205 if (octx
->scan_exclusive
)
10207 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10208 var4
, lane
, NULL_TREE
,
10210 TREE_THIS_NOTRAP (var4
) = 1;
10221 var2
= build_outer_var_ref (var
, octx
);
10222 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10224 var3
= maybe_lookup_decl (new_vard
, octx
);
10225 if (var3
== new_vard
|| var3
== NULL_TREE
)
10227 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10229 var4
= maybe_lookup_decl (var3
, octx
);
10230 if (var4
== var3
|| var4
== NULL_TREE
)
10232 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10243 && octx
->scan_exclusive
10245 && var4
== NULL_TREE
)
10246 var4
= create_tmp_var (TREE_TYPE (val
));
10248 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10250 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10255 /* If we've added a separate identity element
10256 variable, copy it over into val. */
10257 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10259 gimplify_and_add (x
, &before
);
10261 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10263 /* Otherwise, assign to it the identity element. */
10264 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10266 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10267 tree ref
= build_outer_var_ref (var
, octx
);
10268 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10269 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10272 if (new_vard
!= new_var
)
10273 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10274 SET_DECL_VALUE_EXPR (new_vard
, val
);
10276 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10277 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10278 lower_omp (&tseq
, octx
);
10280 SET_DECL_VALUE_EXPR (new_vard
, x
);
10281 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10282 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10283 gimple_seq_add_seq (&before
, tseq
);
10285 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10291 if (octx
->scan_exclusive
)
10293 tree v4
= unshare_expr (var4
);
10294 tree v2
= unshare_expr (var2
);
10295 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10296 gimplify_and_add (x
, &before
);
10298 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10299 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10300 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10302 if (x
&& new_vard
!= new_var
)
10303 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10305 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10306 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10307 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10308 lower_omp (&tseq
, octx
);
10309 gimple_seq_add_seq (&before
, tseq
);
10310 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10312 SET_DECL_VALUE_EXPR (new_vard
, x
);
10313 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10314 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10315 if (octx
->scan_inclusive
)
10317 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10319 gimplify_and_add (x
, &before
);
10321 else if (lane0
== NULL_TREE
)
10323 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10325 gimplify_and_add (x
, &before
);
10333 /* input phase. Set val to initializer before
10335 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10336 gimplify_assign (val
, x
, &before
);
10341 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10342 if (code
== MINUS_EXPR
)
10345 tree x
= build2 (code
, TREE_TYPE (var2
),
10346 unshare_expr (var2
), unshare_expr (val
));
10347 if (octx
->scan_inclusive
)
10349 gimplify_assign (unshare_expr (var2
), x
, &before
);
10350 gimplify_assign (val
, var2
, &before
);
10354 gimplify_assign (unshare_expr (var4
),
10355 unshare_expr (var2
), &before
);
10356 gimplify_assign (var2
, x
, &before
);
10357 if (lane0
== NULL_TREE
)
10358 gimplify_assign (val
, var4
, &before
);
10362 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10364 tree vexpr
= unshare_expr (var4
);
10365 TREE_OPERAND (vexpr
, 1) = lane0
;
10366 if (new_vard
!= new_var
)
10367 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10368 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10372 if (is_simd
&& !is_for_simd
)
10374 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10375 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10376 gsi_replace (gsi_p
, gimple_build_nop (), true);
10379 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10382 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (stmt
));
10383 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10388 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10389 substitution of a couple of function calls. But in the NAMED case,
10390 requires that languages coordinate a symbol name. It is therefore
10391 best put here in common code. */
10393 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10396 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10399 tree name
, lock
, unlock
;
10400 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10402 location_t loc
= gimple_location (stmt
);
10405 name
= gimple_omp_critical_name (stmt
);
10410 if (!critical_name_mutexes
)
10411 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10413 tree
*n
= critical_name_mutexes
->get (name
);
10418 decl
= create_tmp_var_raw (ptr_type_node
);
10420 new_str
= ACONCAT ((".gomp_critical_user_",
10421 IDENTIFIER_POINTER (name
), NULL
));
10422 DECL_NAME (decl
) = get_identifier (new_str
);
10423 TREE_PUBLIC (decl
) = 1;
10424 TREE_STATIC (decl
) = 1;
10425 DECL_COMMON (decl
) = 1;
10426 DECL_ARTIFICIAL (decl
) = 1;
10427 DECL_IGNORED_P (decl
) = 1;
10429 varpool_node::finalize_decl (decl
);
10431 critical_name_mutexes
->put (name
, decl
);
10436 /* If '#pragma omp critical' is inside offloaded region or
10437 inside function marked as offloadable, the symbol must be
10438 marked as offloadable too. */
10440 if (cgraph_node::get (current_function_decl
)->offloadable
)
10441 varpool_node::get_create (decl
)->offloadable
= 1;
10443 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10444 if (is_gimple_omp_offloaded (octx
->stmt
))
10446 varpool_node::get_create (decl
)->offloadable
= 1;
10450 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10451 lock
= build_call_expr_loc (loc
, lock
, 1,
10452 build_fold_addr_expr_loc (loc
, decl
));
10454 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10455 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10456 build_fold_addr_expr_loc (loc
, decl
));
10460 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10461 lock
= build_call_expr_loc (loc
, lock
, 0);
10463 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10464 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10467 push_gimplify_context ();
10469 block
= make_node (BLOCK
);
10470 bind
= gimple_build_bind (NULL
, NULL
, block
);
10471 gsi_replace (gsi_p
, bind
, true);
10472 gimple_bind_add_stmt (bind
, stmt
);
10474 tbody
= gimple_bind_body (bind
);
10475 gimplify_and_add (lock
, &tbody
);
10476 gimple_bind_set_body (bind
, tbody
);
10478 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10479 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10480 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10481 gimple_omp_set_body (stmt
, NULL
);
10483 tbody
= gimple_bind_body (bind
);
10484 gimplify_and_add (unlock
, &tbody
);
10485 gimple_bind_set_body (bind
, tbody
);
10487 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10489 pop_gimplify_context (bind
);
10490 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10491 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10494 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10495 for a lastprivate clause. Given a loop control predicate of (V
10496 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10497 is appended to *DLIST, iterator initialization is appended to
10498 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10499 to be emitted in a critical section. */
10502 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10503 gimple_seq
*dlist
, gimple_seq
*clist
,
10504 struct omp_context
*ctx
)
10506 tree clauses
, cond
, vinit
;
10507 enum tree_code cond_code
;
10510 cond_code
= fd
->loop
.cond_code
;
10511 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10513 /* When possible, use a strict equality expression. This can let VRP
10514 type optimizations deduce the value and remove a copy. */
10515 if (tree_fits_shwi_p (fd
->loop
.step
))
10517 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10518 if (step
== 1 || step
== -1)
10519 cond_code
= EQ_EXPR
;
10522 tree n2
= fd
->loop
.n2
;
10523 if (fd
->collapse
> 1
10524 && TREE_CODE (n2
) != INTEGER_CST
10525 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10527 struct omp_context
*taskreg_ctx
= NULL
;
10528 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10530 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10531 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10532 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10534 if (gimple_omp_for_combined_into_p (gfor
))
10536 gcc_assert (ctx
->outer
->outer
10537 && is_parallel_ctx (ctx
->outer
->outer
));
10538 taskreg_ctx
= ctx
->outer
->outer
;
10542 struct omp_for_data outer_fd
;
10543 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10544 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10547 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10548 taskreg_ctx
= ctx
->outer
->outer
;
10550 else if (is_taskreg_ctx (ctx
->outer
))
10551 taskreg_ctx
= ctx
->outer
;
10555 tree taskreg_clauses
10556 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10557 tree innerc
= omp_find_clause (taskreg_clauses
,
10558 OMP_CLAUSE__LOOPTEMP_
);
10559 gcc_assert (innerc
);
10560 int count
= fd
->collapse
;
10562 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10563 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10564 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10566 for (i
= 0; i
< count
; i
++)
10568 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10569 OMP_CLAUSE__LOOPTEMP_
);
10570 gcc_assert (innerc
);
10572 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10573 OMP_CLAUSE__LOOPTEMP_
);
10575 n2
= fold_convert (TREE_TYPE (n2
),
10576 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10580 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10582 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10584 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10585 if (!gimple_seq_empty_p (stmts
))
10587 gimple_seq_add_seq (&stmts
, *dlist
);
10590 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10591 vinit
= fd
->loop
.n1
;
10592 if (cond_code
== EQ_EXPR
10593 && tree_fits_shwi_p (fd
->loop
.n2
)
10594 && ! integer_zerop (fd
->loop
.n2
))
10595 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10597 vinit
= unshare_expr (vinit
);
10599 /* Initialize the iterator variable, so that threads that don't execute
10600 any iterations don't execute the lastprivate clauses by accident. */
10601 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10605 /* OpenACC privatization.
10607 Or, in other words, *sharing* at the respective OpenACC level of
10610 From a correctness perspective, a non-addressable variable can't be accessed
10611 outside the current thread, so it can go in a (faster than shared memory)
10612 register -- though that register may need to be broadcast in some
10613 circumstances. A variable can only meaningfully be "shared" across workers
10614 or vector lanes if its address is taken, e.g. by a call to an atomic
10617 From an optimisation perspective, the answer might be fuzzier: maybe
10618 sometimes, using shared memory directly would be faster than
10622 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10623 const location_t loc
, const tree c
,
10626 const dump_user_location_t d_u_loc
10627 = dump_user_location_t::from_location_t (loc
);
10628 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10630 # pragma GCC diagnostic push
10631 # pragma GCC diagnostic ignored "-Wformat"
10633 dump_printf_loc (l_dump_flags
, d_u_loc
,
10634 "variable %<%T%> ", decl
);
10636 # pragma GCC diagnostic pop
10639 dump_printf (l_dump_flags
,
10641 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10643 dump_printf (l_dump_flags
,
10644 "declared in block ");
10648 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10651 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10653 /* There is some differentiation depending on block vs. clause. */
10658 if (res
&& !VAR_P (decl
))
10660 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10661 privatized into a new VAR_DECL. */
10662 gcc_checking_assert (TREE_CODE (decl
) != PARM_DECL
);
10666 if (dump_enabled_p ())
10668 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10669 dump_printf (l_dump_flags
,
10670 "potentially has improper OpenACC privatization level: %qs\n",
10671 get_tree_code_name (TREE_CODE (decl
)));
10675 if (res
&& block
&& TREE_STATIC (decl
))
10679 if (dump_enabled_p ())
10681 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10682 dump_printf (l_dump_flags
,
10683 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10688 if (res
&& block
&& DECL_EXTERNAL (decl
))
10692 if (dump_enabled_p ())
10694 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10695 dump_printf (l_dump_flags
,
10696 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10701 if (res
&& !TREE_ADDRESSABLE (decl
))
10705 if (dump_enabled_p ())
10707 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10708 dump_printf (l_dump_flags
,
10709 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10710 "not addressable");
10714 /* If an artificial variable has been added to a bind, e.g.
10715 a compiler-generated temporary structure used by the Fortran front-end, do
10716 not consider it as a privatization candidate. Note that variables on
10717 the stack are private per-thread by default: making them "gang-private"
10718 for OpenACC actually means to share a single instance of a variable
10719 amongst all workers and threads spawned within each gang.
10720 At present, no compiler-generated artificial variables require such
10721 sharing semantics, so this is safe. */
10723 if (res
&& block
&& DECL_ARTIFICIAL (decl
))
10727 if (dump_enabled_p ())
10729 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10730 dump_printf (l_dump_flags
,
10731 "isn%'t candidate for adjusting OpenACC privatization "
10732 "level: %s\n", "artificial");
10738 if (dump_enabled_p ())
10740 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10741 dump_printf (l_dump_flags
,
10742 "is candidate for adjusting OpenACC privatization level\n");
10746 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10748 print_generic_decl (dump_file
, decl
, dump_flags
);
10749 fprintf (dump_file
, "\n");
10755 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10759 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10761 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10762 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10764 tree decl
= OMP_CLAUSE_DECL (c
);
10766 tree new_decl
= lookup_decl (decl
, ctx
);
10768 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
,
10772 gcc_checking_assert
10773 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10774 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10778 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10782 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10784 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10786 tree new_decl
= lookup_decl (decl
, ctx
);
10787 gcc_checking_assert (new_decl
== decl
);
10789 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
,
10793 gcc_checking_assert
10794 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10795 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10799 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10802 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10803 struct walk_stmt_info
*wi
)
10805 gimple
*stmt
= gsi_stmt (*gsi_p
);
10807 *handled_ops_p
= true;
10808 switch (gimple_code (stmt
))
10812 case GIMPLE_OMP_FOR
:
10813 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10814 && gimple_omp_for_combined_into_p (stmt
))
10815 *handled_ops_p
= false;
10818 case GIMPLE_OMP_SCAN
:
10819 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10820 return integer_zero_node
;
10827 /* Helper function for lower_omp_for, add transformations for a worksharing
10828 loop with scan directives inside of it.
10829 For worksharing loop not combined with simd, transform:
10830 #pragma omp for reduction(inscan,+:r) private(i)
10831 for (i = 0; i < n; i = i + 1)
10836 #pragma omp scan inclusive(r)
10842 into two worksharing loops + code to merge results:
10844 num_threads = omp_get_num_threads ();
10845 thread_num = omp_get_thread_num ();
10846 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10851 // For UDRs this is UDR init, or if ctors are needed, copy from
10852 // var3 that has been constructed to contain the neutral element.
10856 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10857 // a shared array with num_threads elements and rprivb to a local array
10858 // number of elements equal to the number of (contiguous) iterations the
10859 // current thread will perform. controlb and controlp variables are
10860 // temporaries to handle deallocation of rprivb at the end of second
10862 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10863 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10864 for (i = 0; i < n; i = i + 1)
10867 // For UDRs this is UDR init or copy from var3.
10869 // This is the input phase from user code.
10873 // For UDRs this is UDR merge.
10875 // Rather than handing it over to the user, save to local thread's
10877 rprivb[ivar] = var2;
10878 // For exclusive scan, the above two statements are swapped.
10882 // And remember the final value from this thread's into the shared
10884 rpriva[(sizetype) thread_num] = var2;
10885 // If more than one thread, compute using Work-Efficient prefix sum
10886 // the inclusive parallel scan of the rpriva array.
10887 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10892 num_threadsu = (unsigned int) num_threads;
10893 thread_numup1 = (unsigned int) thread_num + 1;
10896 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10900 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10905 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10906 mul = REALPART_EXPR <cplx>;
10907 ovf = IMAGPART_EXPR <cplx>;
10908 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10911 andvm1 = andv + 4294967295;
10913 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10915 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10916 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10917 rpriva[l] = rpriva[l - k] + rpriva[l];
10919 if (down == 0) goto <D.2121>; else goto <D.2122>;
10927 if (k != 0) goto <D.2108>; else goto <D.2103>;
10929 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10931 // For UDRs this is UDR init or copy from var3.
10935 var2 = rpriva[thread_num - 1];
10938 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10939 reduction(inscan,+:r) private(i)
10940 for (i = 0; i < n; i = i + 1)
10943 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10944 r = var2 + rprivb[ivar];
10947 // This is the scan phase from user code.
10949 // Plus a bump of the iterator.
10955 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10956 struct omp_for_data
*fd
, omp_context
*ctx
)
10958 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10959 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10961 gimple_seq body
= gimple_omp_body (stmt
);
10962 gimple_stmt_iterator input1_gsi
= gsi_none ();
10963 struct walk_stmt_info wi
;
10964 memset (&wi
, 0, sizeof (wi
));
10965 wi
.val_only
= true;
10966 wi
.info
= (void *) &input1_gsi
;
10967 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10968 gcc_assert (!gsi_end_p (input1_gsi
));
10970 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10971 gimple_stmt_iterator gsi
= input1_gsi
;
10973 gimple_stmt_iterator scan1_gsi
= gsi
;
10974 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10975 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10977 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10978 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10979 gimple_omp_set_body (input_stmt1
, NULL
);
10980 gimple_omp_set_body (scan_stmt1
, NULL
);
10981 gimple_omp_set_body (stmt
, NULL
);
10983 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10984 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10985 gimple_omp_set_body (stmt
, body
);
10986 gimple_omp_set_body (input_stmt1
, input_body
);
10988 gimple_stmt_iterator input2_gsi
= gsi_none ();
10989 memset (&wi
, 0, sizeof (wi
));
10990 wi
.val_only
= true;
10991 wi
.info
= (void *) &input2_gsi
;
10992 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10993 gcc_assert (!gsi_end_p (input2_gsi
));
10995 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10998 gimple_stmt_iterator scan2_gsi
= gsi
;
10999 gimple
*scan_stmt2
= gsi_stmt (gsi
);
11000 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
11001 gimple_omp_set_body (scan_stmt2
, scan_body
);
11003 gimple_stmt_iterator input3_gsi
= gsi_none ();
11004 gimple_stmt_iterator scan3_gsi
= gsi_none ();
11005 gimple_stmt_iterator input4_gsi
= gsi_none ();
11006 gimple_stmt_iterator scan4_gsi
= gsi_none ();
11007 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
11008 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
11009 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
11012 memset (&wi
, 0, sizeof (wi
));
11013 wi
.val_only
= true;
11014 wi
.info
= (void *) &input3_gsi
;
11015 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
11016 gcc_assert (!gsi_end_p (input3_gsi
));
11018 input_stmt3
= gsi_stmt (input3_gsi
);
11022 scan_stmt3
= gsi_stmt (gsi
);
11023 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
11025 memset (&wi
, 0, sizeof (wi
));
11026 wi
.val_only
= true;
11027 wi
.info
= (void *) &input4_gsi
;
11028 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
11029 gcc_assert (!gsi_end_p (input4_gsi
));
11031 input_stmt4
= gsi_stmt (input4_gsi
);
11035 scan_stmt4
= gsi_stmt (gsi
);
11036 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
11038 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
11039 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
11042 tree num_threads
= create_tmp_var (integer_type_node
);
11043 tree thread_num
= create_tmp_var (integer_type_node
);
11044 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
11045 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
11046 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
11047 gimple_call_set_lhs (g
, num_threads
);
11048 gimple_seq_add_stmt (body_p
, g
);
11049 g
= gimple_build_call (threadnum_decl
, 0);
11050 gimple_call_set_lhs (g
, thread_num
);
11051 gimple_seq_add_stmt (body_p
, g
);
11053 tree ivar
= create_tmp_var (sizetype
);
11054 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
11055 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
11056 tree k
= create_tmp_var (unsigned_type_node
);
11057 tree l
= create_tmp_var (unsigned_type_node
);
11059 gimple_seq clist
= NULL
, mdlist
= NULL
;
11060 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
11061 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
11062 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
11063 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
11064 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11065 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11066 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
11068 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11069 tree var
= OMP_CLAUSE_DECL (c
);
11070 tree new_var
= lookup_decl (var
, ctx
);
11071 tree var3
= NULL_TREE
;
11072 tree new_vard
= new_var
;
11073 if (omp_privatize_by_reference (var
))
11074 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
11075 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11077 var3
= maybe_lookup_decl (new_vard
, ctx
);
11078 if (var3
== new_vard
)
11082 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
11083 tree rpriva
= create_tmp_var (ptype
);
11084 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11085 OMP_CLAUSE_DECL (nc
) = rpriva
;
11087 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11089 tree rprivb
= create_tmp_var (ptype
);
11090 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11091 OMP_CLAUSE_DECL (nc
) = rprivb
;
11092 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
11094 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11096 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
11097 if (new_vard
!= new_var
)
11098 TREE_ADDRESSABLE (var2
) = 1;
11099 gimple_add_tmp_var (var2
);
11101 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
11102 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11103 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11104 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11105 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11107 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
11108 thread_num
, integer_minus_one_node
);
11109 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11110 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11111 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11112 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11113 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11115 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
11116 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11117 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11118 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11119 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11121 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
11122 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11123 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11124 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11125 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11126 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11128 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
11129 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11130 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
11131 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11133 tree var4
= is_for_simd
? new_var
: var2
;
11134 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
11137 var5
= lookup_decl (var
, input_simd_ctx
);
11138 var6
= lookup_decl (var
, scan_simd_ctx
);
11139 if (new_vard
!= new_var
)
11141 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11142 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11147 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11150 x
= lang_hooks
.decls
.omp_clause_default_ctor
11151 (c
, var2
, build_outer_var_ref (var
, ctx
));
11153 gimplify_and_add (x
, &clist
);
11155 x
= build_outer_var_ref (var
, ctx
);
11156 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11158 gimplify_and_add (x
, &thr01_list
);
11160 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11161 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11164 x
= unshare_expr (var4
);
11165 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11166 gimplify_and_add (x
, &thrn1_list
);
11167 x
= unshare_expr (var4
);
11168 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11169 gimplify_and_add (x
, &thr02_list
);
11171 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11173 /* Otherwise, assign to it the identity element. */
11174 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11175 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11178 if (new_vard
!= new_var
)
11179 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11180 SET_DECL_VALUE_EXPR (new_vard
, val
);
11181 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11183 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11184 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11185 lower_omp (&tseq
, ctx
);
11186 gimple_seq_add_seq (&thrn1_list
, tseq
);
11187 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11188 lower_omp (&tseq
, ctx
);
11189 gimple_seq_add_seq (&thr02_list
, tseq
);
11190 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11191 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11192 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11194 SET_DECL_VALUE_EXPR (new_vard
, y
);
11197 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11198 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11202 x
= unshare_expr (var4
);
11203 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11204 gimplify_and_add (x
, &thrn2_list
);
11208 x
= unshare_expr (rprivb_ref
);
11209 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11210 gimplify_and_add (x
, &scan1_list
);
11214 if (ctx
->scan_exclusive
)
11216 x
= unshare_expr (rprivb_ref
);
11217 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11218 gimplify_and_add (x
, &scan1_list
);
11221 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11222 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11223 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11224 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11225 lower_omp (&tseq
, ctx
);
11226 gimple_seq_add_seq (&scan1_list
, tseq
);
11228 if (ctx
->scan_inclusive
)
11230 x
= unshare_expr (rprivb_ref
);
11231 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11232 gimplify_and_add (x
, &scan1_list
);
11236 x
= unshare_expr (rpriva_ref
);
11237 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11238 unshare_expr (var4
));
11239 gimplify_and_add (x
, &mdlist
);
11241 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11242 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11243 gimplify_and_add (x
, &input2_list
);
11246 if (new_vard
!= new_var
)
11247 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11249 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11250 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11251 SET_DECL_VALUE_EXPR (new_vard
, val
);
11252 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11255 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11256 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11259 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11260 lower_omp (&tseq
, ctx
);
11262 SET_DECL_VALUE_EXPR (new_vard
, y
);
11265 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11266 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11270 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11271 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11272 lower_omp (&tseq
, ctx
);
11274 gimple_seq_add_seq (&input2_list
, tseq
);
11276 x
= build_outer_var_ref (var
, ctx
);
11277 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11278 gimplify_and_add (x
, &last_list
);
11280 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11281 gimplify_and_add (x
, &reduc_list
);
11282 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11283 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11285 if (new_vard
!= new_var
)
11286 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11287 SET_DECL_VALUE_EXPR (new_vard
, val
);
11288 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11289 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11290 lower_omp (&tseq
, ctx
);
11291 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11292 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11293 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11295 SET_DECL_VALUE_EXPR (new_vard
, y
);
11298 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11299 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11301 gimple_seq_add_seq (&reduc_list
, tseq
);
11302 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11303 gimplify_and_add (x
, &reduc_list
);
11305 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11307 gimplify_and_add (x
, dlist
);
11311 x
= build_outer_var_ref (var
, ctx
);
11312 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11314 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11315 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11317 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11319 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11321 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11322 if (code
== MINUS_EXPR
)
11326 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11329 if (ctx
->scan_exclusive
)
11330 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11332 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11333 gimplify_assign (var2
, x
, &scan1_list
);
11334 if (ctx
->scan_inclusive
)
11335 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11339 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11342 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11343 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11345 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11348 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11349 unshare_expr (rprival_ref
));
11350 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11354 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11355 gimple_seq_add_stmt (&scan1_list
, g
);
11356 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11357 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11358 ? scan_stmt4
: scan_stmt2
), g
);
11360 tree controlb
= create_tmp_var (boolean_type_node
);
11361 tree controlp
= create_tmp_var (ptr_type_node
);
11362 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11363 OMP_CLAUSE_DECL (nc
) = controlb
;
11364 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11366 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11367 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11368 OMP_CLAUSE_DECL (nc
) = controlp
;
11369 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11371 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11372 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11373 OMP_CLAUSE_DECL (nc
) = controlb
;
11374 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11376 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11377 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11378 OMP_CLAUSE_DECL (nc
) = controlp
;
11379 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11381 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11383 *cp1
= gimple_omp_for_clauses (stmt
);
11384 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11385 *cp2
= gimple_omp_for_clauses (new_stmt
);
11386 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11390 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11391 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11393 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11395 gsi_remove (&input3_gsi
, true);
11396 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11398 gsi_remove (&scan3_gsi
, true);
11399 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11401 gsi_remove (&input4_gsi
, true);
11402 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11404 gsi_remove (&scan4_gsi
, true);
11408 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11409 gimple_omp_set_body (input_stmt2
, input2_list
);
11412 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11414 gsi_remove (&input1_gsi
, true);
11415 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11417 gsi_remove (&scan1_gsi
, true);
11418 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11420 gsi_remove (&input2_gsi
, true);
11421 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11423 gsi_remove (&scan2_gsi
, true);
11425 gimple_seq_add_seq (body_p
, clist
);
11427 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11428 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11429 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11430 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11431 gimple_seq_add_stmt (body_p
, g
);
11432 g
= gimple_build_label (lab1
);
11433 gimple_seq_add_stmt (body_p
, g
);
11434 gimple_seq_add_seq (body_p
, thr01_list
);
11435 g
= gimple_build_goto (lab3
);
11436 gimple_seq_add_stmt (body_p
, g
);
11437 g
= gimple_build_label (lab2
);
11438 gimple_seq_add_stmt (body_p
, g
);
11439 gimple_seq_add_seq (body_p
, thrn1_list
);
11440 g
= gimple_build_label (lab3
);
11441 gimple_seq_add_stmt (body_p
, g
);
11443 g
= gimple_build_assign (ivar
, size_zero_node
);
11444 gimple_seq_add_stmt (body_p
, g
);
11446 gimple_seq_add_stmt (body_p
, stmt
);
11447 gimple_seq_add_seq (body_p
, body
);
11448 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11451 g
= gimple_build_omp_return (true);
11452 gimple_seq_add_stmt (body_p
, g
);
11453 gimple_seq_add_seq (body_p
, mdlist
);
11455 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11456 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11457 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11458 gimple_seq_add_stmt (body_p
, g
);
11459 g
= gimple_build_label (lab1
);
11460 gimple_seq_add_stmt (body_p
, g
);
11462 g
= omp_build_barrier (NULL
);
11463 gimple_seq_add_stmt (body_p
, g
);
11465 tree down
= create_tmp_var (unsigned_type_node
);
11466 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11467 gimple_seq_add_stmt (body_p
, g
);
11469 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11470 gimple_seq_add_stmt (body_p
, g
);
11472 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11473 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11474 gimple_seq_add_stmt (body_p
, g
);
11476 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11477 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11478 gimple_seq_add_stmt (body_p
, g
);
11480 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11481 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11482 build_int_cst (unsigned_type_node
, 1));
11483 gimple_seq_add_stmt (body_p
, g
);
11485 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11486 g
= gimple_build_label (lab3
);
11487 gimple_seq_add_stmt (body_p
, g
);
11489 tree twok
= create_tmp_var (unsigned_type_node
);
11490 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11491 gimple_seq_add_stmt (body_p
, g
);
11493 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11494 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11495 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11496 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11497 gimple_seq_add_stmt (body_p
, g
);
11498 g
= gimple_build_label (lab4
);
11499 gimple_seq_add_stmt (body_p
, g
);
11500 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11501 gimple_seq_add_stmt (body_p
, g
);
11502 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11503 gimple_seq_add_stmt (body_p
, g
);
11505 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11506 gimple_seq_add_stmt (body_p
, g
);
11507 g
= gimple_build_label (lab6
);
11508 gimple_seq_add_stmt (body_p
, g
);
11510 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11511 gimple_seq_add_stmt (body_p
, g
);
11513 g
= gimple_build_label (lab5
);
11514 gimple_seq_add_stmt (body_p
, g
);
11516 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11517 gimple_seq_add_stmt (body_p
, g
);
11519 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11520 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11521 gimple_call_set_lhs (g
, cplx
);
11522 gimple_seq_add_stmt (body_p
, g
);
11523 tree mul
= create_tmp_var (unsigned_type_node
);
11524 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11525 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11526 gimple_seq_add_stmt (body_p
, g
);
11527 tree ovf
= create_tmp_var (unsigned_type_node
);
11528 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11529 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11530 gimple_seq_add_stmt (body_p
, g
);
11532 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11533 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11534 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11536 gimple_seq_add_stmt (body_p
, g
);
11537 g
= gimple_build_label (lab7
);
11538 gimple_seq_add_stmt (body_p
, g
);
11540 tree andv
= create_tmp_var (unsigned_type_node
);
11541 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11542 gimple_seq_add_stmt (body_p
, g
);
11543 tree andvm1
= create_tmp_var (unsigned_type_node
);
11544 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11545 build_minus_one_cst (unsigned_type_node
));
11546 gimple_seq_add_stmt (body_p
, g
);
11548 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11549 gimple_seq_add_stmt (body_p
, g
);
11551 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11552 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11553 gimple_seq_add_stmt (body_p
, g
);
11554 g
= gimple_build_label (lab9
);
11555 gimple_seq_add_stmt (body_p
, g
);
11556 gimple_seq_add_seq (body_p
, reduc_list
);
11557 g
= gimple_build_label (lab8
);
11558 gimple_seq_add_stmt (body_p
, g
);
11560 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11561 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11562 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11563 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11565 gimple_seq_add_stmt (body_p
, g
);
11566 g
= gimple_build_label (lab10
);
11567 gimple_seq_add_stmt (body_p
, g
);
11568 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11569 gimple_seq_add_stmt (body_p
, g
);
11570 g
= gimple_build_goto (lab12
);
11571 gimple_seq_add_stmt (body_p
, g
);
11572 g
= gimple_build_label (lab11
);
11573 gimple_seq_add_stmt (body_p
, g
);
11574 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11575 gimple_seq_add_stmt (body_p
, g
);
11576 g
= gimple_build_label (lab12
);
11577 gimple_seq_add_stmt (body_p
, g
);
11579 g
= omp_build_barrier (NULL
);
11580 gimple_seq_add_stmt (body_p
, g
);
11582 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11584 gimple_seq_add_stmt (body_p
, g
);
11586 g
= gimple_build_label (lab2
);
11587 gimple_seq_add_stmt (body_p
, g
);
11589 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11590 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11591 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11592 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11593 gimple_seq_add_stmt (body_p
, g
);
11594 g
= gimple_build_label (lab1
);
11595 gimple_seq_add_stmt (body_p
, g
);
11596 gimple_seq_add_seq (body_p
, thr02_list
);
11597 g
= gimple_build_goto (lab3
);
11598 gimple_seq_add_stmt (body_p
, g
);
11599 g
= gimple_build_label (lab2
);
11600 gimple_seq_add_stmt (body_p
, g
);
11601 gimple_seq_add_seq (body_p
, thrn2_list
);
11602 g
= gimple_build_label (lab3
);
11603 gimple_seq_add_stmt (body_p
, g
);
11605 g
= gimple_build_assign (ivar
, size_zero_node
);
11606 gimple_seq_add_stmt (body_p
, g
);
11607 gimple_seq_add_stmt (body_p
, new_stmt
);
11608 gimple_seq_add_seq (body_p
, new_body
);
11610 gimple_seq new_dlist
= NULL
;
11611 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11612 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11613 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11614 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11615 integer_minus_one_node
);
11616 gimple_seq_add_stmt (&new_dlist
, g
);
11617 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11618 gimple_seq_add_stmt (&new_dlist
, g
);
11619 g
= gimple_build_label (lab1
);
11620 gimple_seq_add_stmt (&new_dlist
, g
);
11621 gimple_seq_add_seq (&new_dlist
, last_list
);
11622 g
= gimple_build_label (lab2
);
11623 gimple_seq_add_stmt (&new_dlist
, g
);
11624 gimple_seq_add_seq (&new_dlist
, *dlist
);
11625 *dlist
= new_dlist
;
11628 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11629 the addresses of variables to be made private at the surrounding
11630 parallelism level. Such functions appear in the gimple code stream in two
11631 forms, e.g. for a partitioned loop:
11633 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11634 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11635 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11636 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11638 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11639 not as part of a HEAD_MARK sequence:
11641 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11643 For such stand-alone appearances, the 3rd argument is always 0, denoting
11644 gang partitioning. */
11647 lower_oacc_private_marker (omp_context
*ctx
)
11649 if (ctx
->oacc_privatization_candidates
.length () == 0)
11652 auto_vec
<tree
, 5> args
;
11654 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11655 args
.quick_push (integer_zero_node
);
11656 args
.quick_push (integer_minus_one_node
);
11660 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11662 gcc_checking_assert (TREE_ADDRESSABLE (decl
));
11663 tree addr
= build_fold_addr_expr (decl
);
11664 args
.safe_push (addr
);
11667 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11670 /* Lower code for an OMP loop directive. */
11673 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11675 tree
*rhs_p
, block
;
11676 struct omp_for_data fd
, *fdp
= NULL
;
11677 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11679 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11680 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11681 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11684 push_gimplify_context ();
11686 if (is_gimple_omp_oacc (ctx
->stmt
))
11687 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11689 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11691 block
= make_node (BLOCK
);
11692 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11693 /* Replace at gsi right away, so that 'stmt' is no member
11694 of a sequence anymore as we're going to add to a different
11696 gsi_replace (gsi_p
, new_stmt
, true);
11698 /* Move declaration of temporaries in the loop body before we make
11700 omp_for_body
= gimple_omp_body (stmt
);
11701 if (!gimple_seq_empty_p (omp_for_body
)
11702 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11705 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11706 tree vars
= gimple_bind_vars (inner_bind
);
11707 if (is_gimple_omp_oacc (ctx
->stmt
))
11708 oacc_privatization_scan_decl_chain (ctx
, vars
);
11709 gimple_bind_append_vars (new_stmt
, vars
);
11710 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11711 keep them on the inner_bind and it's block. */
11712 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11713 if (gimple_bind_block (inner_bind
))
11714 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11717 if (gimple_omp_for_combined_into_p (stmt
))
11719 omp_extract_for_data (stmt
, &fd
, NULL
);
11722 /* We need two temporaries with fd.loop.v type (istart/iend)
11723 and then (fd.collapse - 1) temporaries with the same
11724 type for count2 ... countN-1 vars if not constant. */
11726 tree type
= fd
.iter_type
;
11727 if (fd
.collapse
> 1
11728 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11729 count
+= fd
.collapse
- 1;
11731 tree type2
= NULL_TREE
;
11733 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11734 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11735 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11737 tree clauses
= *pc
;
11738 if (fd
.collapse
> 1
11740 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11741 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11742 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11743 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11745 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11746 type2
= TREE_TYPE (v
);
11752 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11753 OMP_CLAUSE__LOOPTEMP_
);
11754 if (ctx
->simt_stmt
)
11755 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11756 OMP_CLAUSE__LOOPTEMP_
);
11757 for (i
= 0; i
< count
+ count2
; i
++)
11762 gcc_assert (outerc
);
11763 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11764 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11765 OMP_CLAUSE__LOOPTEMP_
);
11769 /* If there are 2 adjacent SIMD stmts, one with _simt_
11770 clause, another without, make sure they have the same
11771 decls in _looptemp_ clauses, because the outer stmt
11772 they are combined into will look up just one inner_stmt. */
11773 if (ctx
->simt_stmt
)
11774 temp
= OMP_CLAUSE_DECL (simtc
);
11776 temp
= create_tmp_var (i
>= count
? type2
: type
);
11777 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11779 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11780 OMP_CLAUSE_DECL (*pc
) = temp
;
11781 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11782 if (ctx
->simt_stmt
)
11783 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11784 OMP_CLAUSE__LOOPTEMP_
);
11789 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11793 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11794 OMP_CLAUSE_REDUCTION
);
11795 tree rtmp
= NULL_TREE
;
11798 tree type
= build_pointer_type (pointer_sized_int_node
);
11799 tree temp
= create_tmp_var (type
);
11800 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11801 OMP_CLAUSE_DECL (c
) = temp
;
11802 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11803 gimple_omp_for_set_clauses (stmt
, c
);
11804 lower_omp_task_reductions (ctx
, OMP_FOR
,
11805 gimple_omp_for_clauses (stmt
),
11806 &tred_ilist
, &tred_dlist
);
11808 rtmp
= make_ssa_name (type
);
11809 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11812 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11815 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11817 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11818 gimple_omp_for_pre_body (stmt
));
11820 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11822 gcall
*private_marker
= NULL
;
11823 if (is_gimple_omp_oacc (ctx
->stmt
)
11824 && !gimple_seq_empty_p (omp_for_body
))
11825 private_marker
= lower_oacc_private_marker (ctx
);
11827 /* Lower the header expressions. At this point, we can assume that
11828 the header is of the form:
11830 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11832 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11833 using the .omp_data_s mapping, if needed. */
11834 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11836 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11837 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11839 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11840 TREE_VEC_ELT (*rhs_p
, 1)
11841 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11842 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11843 TREE_VEC_ELT (*rhs_p
, 2)
11844 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11846 else if (!is_gimple_min_invariant (*rhs_p
))
11847 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11848 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11849 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11851 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11852 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11854 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11855 TREE_VEC_ELT (*rhs_p
, 1)
11856 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11857 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11858 TREE_VEC_ELT (*rhs_p
, 2)
11859 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11861 else if (!is_gimple_min_invariant (*rhs_p
))
11862 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11863 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11864 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11866 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11867 if (!is_gimple_min_invariant (*rhs_p
))
11868 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11871 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11873 gimple_seq_add_seq (&body
, cnt_list
);
11875 /* Once lowered, extract the bounds and clauses. */
11876 omp_extract_for_data (stmt
, &fd
, NULL
);
11878 if (is_gimple_omp_oacc (ctx
->stmt
)
11879 && !ctx_in_oacc_kernels_region (ctx
))
11880 lower_oacc_head_tail (gimple_location (stmt
),
11881 gimple_omp_for_clauses (stmt
), private_marker
,
11882 &oacc_head
, &oacc_tail
, ctx
);
11884 /* Add OpenACC partitioning and reduction markers just before the loop. */
11886 gimple_seq_add_seq (&body
, oacc_head
);
11888 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11890 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11891 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11892 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11893 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11895 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11896 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11897 OMP_CLAUSE_LINEAR_STEP (c
)
11898 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11902 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11903 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11904 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11907 gimple_seq_add_stmt (&body
, stmt
);
11908 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11911 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11914 /* After the loop, add exit clauses. */
11915 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11919 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11920 gcall
*g
= gimple_build_call (fndecl
, 0);
11921 gimple_seq_add_stmt (&body
, g
);
11922 gimple_seq_add_seq (&body
, clist
);
11923 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11924 g
= gimple_build_call (fndecl
, 0);
11925 gimple_seq_add_stmt (&body
, g
);
11928 if (ctx
->cancellable
)
11929 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11931 gimple_seq_add_seq (&body
, dlist
);
11935 gimple_seq_add_seq (&tred_ilist
, body
);
11939 body
= maybe_catch_exception (body
);
11941 /* Region exit marker goes at the end of the loop body. */
11942 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11943 gimple_seq_add_stmt (&body
, g
);
11945 gimple_seq_add_seq (&body
, tred_dlist
);
11947 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11950 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11952 /* Add OpenACC joining and reduction markers just after the loop. */
11954 gimple_seq_add_seq (&body
, oacc_tail
);
11956 pop_gimplify_context (new_stmt
);
11958 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11959 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11960 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11961 if (BLOCK_VARS (block
))
11962 TREE_USED (block
) = 1;
11964 gimple_bind_set_body (new_stmt
, body
);
11965 gimple_omp_set_body (stmt
, NULL
);
11966 gimple_omp_for_set_pre_body (stmt
, NULL
);
11969 /* Callback for walk_stmts. Check if the current statement only contains
11970 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11973 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11974 bool *handled_ops_p
,
11975 struct walk_stmt_info
*wi
)
11977 int *info
= (int *) wi
->info
;
11978 gimple
*stmt
= gsi_stmt (*gsi_p
);
11980 *handled_ops_p
= true;
11981 switch (gimple_code (stmt
))
11987 case GIMPLE_OMP_FOR
:
11988 case GIMPLE_OMP_SECTIONS
:
11989 *info
= *info
== 0 ? 1 : -1;
11998 struct omp_taskcopy_context
12000 /* This field must be at the beginning, as we do "inheritance": Some
12001 callback functions for tree-inline.cc (e.g., omp_copy_decl)
12002 receive a copy_body_data pointer that is up-casted to an
12003 omp_context pointer. */
12009 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
12011 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
12013 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
12014 return create_tmp_var (TREE_TYPE (var
));
12020 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
12022 tree name
, new_fields
= NULL
, type
, f
;
12024 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
12025 name
= DECL_NAME (TYPE_NAME (orig_type
));
12026 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
12027 TYPE_DECL
, name
, type
);
12028 TYPE_NAME (type
) = name
;
12030 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
12032 tree new_f
= copy_node (f
);
12033 DECL_CONTEXT (new_f
) = type
;
12034 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
12035 TREE_CHAIN (new_f
) = new_fields
;
12036 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12037 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12038 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
12040 new_fields
= new_f
;
12041 tcctx
->cb
.decl_map
->put (f
, new_f
);
12043 TYPE_FIELDS (type
) = nreverse (new_fields
);
12044 layout_type (type
);
12048 /* Create task copyfn. */
12051 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
12053 struct function
*child_cfun
;
12054 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
12055 tree record_type
, srecord_type
, bind
, list
;
12056 bool record_needs_remap
= false, srecord_needs_remap
= false;
12058 struct omp_taskcopy_context tcctx
;
12059 location_t loc
= gimple_location (task_stmt
);
12060 size_t looptempno
= 0;
12062 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
12063 task_cpyfns
.safe_push (task_stmt
);
12064 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
12065 gcc_assert (child_cfun
->cfg
== NULL
);
12066 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
12068 /* Reset DECL_CONTEXT on function arguments. */
12069 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
12070 DECL_CONTEXT (t
) = child_fn
;
12072 /* Populate the function. */
12073 push_gimplify_context ();
12074 push_cfun (child_cfun
);
12076 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12077 TREE_SIDE_EFFECTS (bind
) = 1;
12079 DECL_SAVED_TREE (child_fn
) = bind
;
12080 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
12082 /* Remap src and dst argument types if needed. */
12083 record_type
= ctx
->record_type
;
12084 srecord_type
= ctx
->srecord_type
;
12085 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
12086 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12088 record_needs_remap
= true;
12091 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
12092 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12094 srecord_needs_remap
= true;
12098 if (record_needs_remap
|| srecord_needs_remap
)
12100 memset (&tcctx
, '\0', sizeof (tcctx
));
12101 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
12102 tcctx
.cb
.dst_fn
= child_fn
;
12103 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
12104 gcc_checking_assert (tcctx
.cb
.src_node
);
12105 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
12106 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
12107 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
12108 tcctx
.cb
.eh_lp_nr
= 0;
12109 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
12110 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
12113 if (record_needs_remap
)
12114 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
12115 if (srecord_needs_remap
)
12116 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
12119 tcctx
.cb
.decl_map
= NULL
;
12121 arg
= DECL_ARGUMENTS (child_fn
);
12122 TREE_TYPE (arg
) = build_pointer_type (record_type
);
12123 sarg
= DECL_CHAIN (arg
);
12124 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
12126 /* First pass: initialize temporaries used in record_type and srecord_type
12127 sizes and field offsets. */
12128 if (tcctx
.cb
.decl_map
)
12129 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12130 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12134 decl
= OMP_CLAUSE_DECL (c
);
12135 p
= tcctx
.cb
.decl_map
->get (decl
);
12138 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12139 sf
= (tree
) n
->value
;
12140 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12141 src
= build_simple_mem_ref_loc (loc
, sarg
);
12142 src
= omp_build_component_ref (src
, sf
);
12143 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12144 append_to_statement_list (t
, &list
);
12147 /* Second pass: copy shared var pointers and copy construct non-VLA
12148 firstprivate vars. */
12149 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12150 switch (OMP_CLAUSE_CODE (c
))
12152 splay_tree_key key
;
12153 case OMP_CLAUSE_SHARED
:
12154 decl
= OMP_CLAUSE_DECL (c
);
12155 key
= (splay_tree_key
) decl
;
12156 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12157 key
= (splay_tree_key
) &DECL_UID (decl
);
12158 n
= splay_tree_lookup (ctx
->field_map
, key
);
12161 f
= (tree
) n
->value
;
12162 if (tcctx
.cb
.decl_map
)
12163 f
= *tcctx
.cb
.decl_map
->get (f
);
12164 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12165 sf
= (tree
) n
->value
;
12166 if (tcctx
.cb
.decl_map
)
12167 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12168 src
= build_simple_mem_ref_loc (loc
, sarg
);
12169 src
= omp_build_component_ref (src
, sf
);
12170 dst
= build_simple_mem_ref_loc (loc
, arg
);
12171 dst
= omp_build_component_ref (dst
, f
);
12172 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12173 append_to_statement_list (t
, &list
);
12175 case OMP_CLAUSE_REDUCTION
:
12176 case OMP_CLAUSE_IN_REDUCTION
:
12177 decl
= OMP_CLAUSE_DECL (c
);
12178 if (TREE_CODE (decl
) == MEM_REF
)
12180 decl
= TREE_OPERAND (decl
, 0);
12181 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12182 decl
= TREE_OPERAND (decl
, 0);
12183 if (TREE_CODE (decl
) == INDIRECT_REF
12184 || TREE_CODE (decl
) == ADDR_EXPR
)
12185 decl
= TREE_OPERAND (decl
, 0);
12187 key
= (splay_tree_key
) decl
;
12188 n
= splay_tree_lookup (ctx
->field_map
, key
);
12191 f
= (tree
) n
->value
;
12192 if (tcctx
.cb
.decl_map
)
12193 f
= *tcctx
.cb
.decl_map
->get (f
);
12194 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12195 sf
= (tree
) n
->value
;
12196 if (tcctx
.cb
.decl_map
)
12197 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12198 src
= build_simple_mem_ref_loc (loc
, sarg
);
12199 src
= omp_build_component_ref (src
, sf
);
12200 if (decl
!= OMP_CLAUSE_DECL (c
)
12201 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12202 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12203 src
= build_simple_mem_ref_loc (loc
, src
);
12204 dst
= build_simple_mem_ref_loc (loc
, arg
);
12205 dst
= omp_build_component_ref (dst
, f
);
12206 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12207 append_to_statement_list (t
, &list
);
12209 case OMP_CLAUSE__LOOPTEMP_
:
12210 /* Fields for first two _looptemp_ clauses are initialized by
12211 GOMP_taskloop*, the rest are handled like firstprivate. */
12212 if (looptempno
< 2)
12218 case OMP_CLAUSE__REDUCTEMP_
:
12219 case OMP_CLAUSE_FIRSTPRIVATE
:
12220 decl
= OMP_CLAUSE_DECL (c
);
12221 if (is_variable_sized (decl
))
12223 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12226 f
= (tree
) n
->value
;
12227 if (tcctx
.cb
.decl_map
)
12228 f
= *tcctx
.cb
.decl_map
->get (f
);
12229 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12232 sf
= (tree
) n
->value
;
12233 if (tcctx
.cb
.decl_map
)
12234 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12235 src
= build_simple_mem_ref_loc (loc
, sarg
);
12236 src
= omp_build_component_ref (src
, sf
);
12237 if (use_pointer_for_field (decl
, NULL
)
12238 || omp_privatize_by_reference (decl
))
12239 src
= build_simple_mem_ref_loc (loc
, src
);
12243 dst
= build_simple_mem_ref_loc (loc
, arg
);
12244 dst
= omp_build_component_ref (dst
, f
);
12245 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12246 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12249 if (ctx
->allocate_map
)
12250 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12252 tree allocator
= *allocatorp
;
12253 HOST_WIDE_INT ialign
= 0;
12254 if (TREE_CODE (allocator
) == TREE_LIST
)
12256 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12257 allocator
= TREE_PURPOSE (allocator
);
12259 if (TREE_CODE (allocator
) != INTEGER_CST
)
12261 n
= splay_tree_lookup (ctx
->sfield_map
,
12262 (splay_tree_key
) allocator
);
12263 allocator
= (tree
) n
->value
;
12264 if (tcctx
.cb
.decl_map
)
12265 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12266 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12267 allocator
= omp_build_component_ref (a
, allocator
);
12269 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12270 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12271 tree align
= build_int_cst (size_type_node
,
12273 DECL_ALIGN_UNIT (decl
)));
12274 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12275 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12277 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12278 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12279 append_to_statement_list (t
, &list
);
12280 dst
= build_simple_mem_ref_loc (loc
, dst
);
12282 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12284 append_to_statement_list (t
, &list
);
12286 case OMP_CLAUSE_PRIVATE
:
12287 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12289 decl
= OMP_CLAUSE_DECL (c
);
12290 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12291 f
= (tree
) n
->value
;
12292 if (tcctx
.cb
.decl_map
)
12293 f
= *tcctx
.cb
.decl_map
->get (f
);
12294 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12297 sf
= (tree
) n
->value
;
12298 if (tcctx
.cb
.decl_map
)
12299 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12300 src
= build_simple_mem_ref_loc (loc
, sarg
);
12301 src
= omp_build_component_ref (src
, sf
);
12302 if (use_pointer_for_field (decl
, NULL
))
12303 src
= build_simple_mem_ref_loc (loc
, src
);
12307 dst
= build_simple_mem_ref_loc (loc
, arg
);
12308 dst
= omp_build_component_ref (dst
, f
);
12309 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12310 append_to_statement_list (t
, &list
);
12316 /* Last pass: handle VLA firstprivates. */
12317 if (tcctx
.cb
.decl_map
)
12318 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12319 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12323 decl
= OMP_CLAUSE_DECL (c
);
12324 if (!is_variable_sized (decl
))
12326 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12329 f
= (tree
) n
->value
;
12330 f
= *tcctx
.cb
.decl_map
->get (f
);
12331 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12332 ind
= DECL_VALUE_EXPR (decl
);
12333 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12334 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12335 n
= splay_tree_lookup (ctx
->sfield_map
,
12336 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12337 sf
= (tree
) n
->value
;
12338 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12339 src
= build_simple_mem_ref_loc (loc
, sarg
);
12340 src
= omp_build_component_ref (src
, sf
);
12341 src
= build_simple_mem_ref_loc (loc
, src
);
12342 dst
= build_simple_mem_ref_loc (loc
, arg
);
12343 dst
= omp_build_component_ref (dst
, f
);
12344 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12345 append_to_statement_list (t
, &list
);
12346 n
= splay_tree_lookup (ctx
->field_map
,
12347 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12348 df
= (tree
) n
->value
;
12349 df
= *tcctx
.cb
.decl_map
->get (df
);
12350 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12351 ptr
= omp_build_component_ref (ptr
, df
);
12352 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12353 build_fold_addr_expr_loc (loc
, dst
));
12354 append_to_statement_list (t
, &list
);
12357 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12358 append_to_statement_list (t
, &list
);
12360 if (tcctx
.cb
.decl_map
)
12361 delete tcctx
.cb
.decl_map
;
12362 pop_gimplify_context (NULL
);
12363 BIND_EXPR_BODY (bind
) = list
;
12368 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12372 size_t cnt
[5] = { 0, 0, 0, 0, 0 }, idx
= 2, i
;
12374 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12375 gcc_assert (clauses
);
12376 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12377 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12378 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12380 case OMP_CLAUSE_DEPEND_LAST
:
12381 /* Lowering already done at gimplification. */
12383 case OMP_CLAUSE_DEPEND_IN
:
12386 case OMP_CLAUSE_DEPEND_OUT
:
12387 case OMP_CLAUSE_DEPEND_INOUT
:
12390 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12393 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12396 case OMP_CLAUSE_DEPEND_INOUTSET
:
12400 gcc_unreachable ();
12402 if (cnt
[1] || cnt
[3] || cnt
[4])
12404 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3] + cnt
[4];
12405 size_t inoutidx
= total
+ idx
;
12406 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
+ 2 * cnt
[4]);
12407 tree array
= create_tmp_var (type
);
12408 TREE_ADDRESSABLE (array
) = 1;
12409 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12413 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12414 gimple_seq_add_stmt (iseq
, g
);
12415 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12418 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12419 gimple_seq_add_stmt (iseq
, g
);
12420 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12422 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12423 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12424 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12425 gimple_seq_add_stmt (iseq
, g
);
12427 for (i
= 0; i
< 5; i
++)
12431 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12432 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12436 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12438 case OMP_CLAUSE_DEPEND_IN
:
12442 case OMP_CLAUSE_DEPEND_OUT
:
12443 case OMP_CLAUSE_DEPEND_INOUT
:
12447 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12451 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12455 case OMP_CLAUSE_DEPEND_INOUTSET
:
12460 gcc_unreachable ();
12462 tree t
= OMP_CLAUSE_DECL (c
);
12465 t
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12466 size_int (inoutidx
), NULL_TREE
, NULL_TREE
);
12467 t
= build_fold_addr_expr (t
);
12470 t
= fold_convert (ptr_type_node
, t
);
12471 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12472 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12473 NULL_TREE
, NULL_TREE
);
12474 g
= gimple_build_assign (r
, t
);
12475 gimple_seq_add_stmt (iseq
, g
);
12479 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12480 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12481 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_INOUTSET
)
12483 tree t
= OMP_CLAUSE_DECL (c
);
12484 t
= fold_convert (ptr_type_node
, t
);
12485 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12486 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12487 NULL_TREE
, NULL_TREE
);
12488 g
= gimple_build_assign (r
, t
);
12489 gimple_seq_add_stmt (iseq
, g
);
12490 t
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
12491 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12492 NULL_TREE
, NULL_TREE
);
12493 g
= gimple_build_assign (r
, t
);
12494 gimple_seq_add_stmt (iseq
, g
);
12497 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12498 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12499 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12500 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12502 tree clobber
= build_clobber (type
);
12503 g
= gimple_build_assign (array
, clobber
);
12504 gimple_seq_add_stmt (oseq
, g
);
12507 /* Lower the OpenMP parallel or task directive in the current statement
12508 in GSI_P. CTX holds context information for the directive. */
12511 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12515 gimple
*stmt
= gsi_stmt (*gsi_p
);
12516 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12517 gimple_seq par_body
;
12518 location_t loc
= gimple_location (stmt
);
12520 clauses
= gimple_omp_taskreg_clauses (stmt
);
12521 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12522 && gimple_omp_task_taskwait_p (stmt
))
12530 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12531 par_body
= gimple_bind_body (par_bind
);
12533 child_fn
= ctx
->cb
.dst_fn
;
12534 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12535 && !gimple_omp_parallel_combined_p (stmt
))
12537 struct walk_stmt_info wi
;
12540 memset (&wi
, 0, sizeof (wi
));
12542 wi
.val_only
= true;
12543 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12545 gimple_omp_parallel_set_combined_p (stmt
, true);
12547 gimple_seq dep_ilist
= NULL
;
12548 gimple_seq dep_olist
= NULL
;
12549 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12550 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12552 push_gimplify_context ();
12553 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12554 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12555 &dep_ilist
, &dep_olist
);
12558 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12559 && gimple_omp_task_taskwait_p (stmt
))
12563 gsi_replace (gsi_p
, dep_bind
, true);
12564 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12565 gimple_bind_add_stmt (dep_bind
, stmt
);
12566 gimple_bind_add_seq (dep_bind
, dep_olist
);
12567 pop_gimplify_context (dep_bind
);
12572 if (ctx
->srecord_type
)
12573 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12575 gimple_seq tskred_ilist
= NULL
;
12576 gimple_seq tskred_olist
= NULL
;
12577 if ((is_task_ctx (ctx
)
12578 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12579 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12580 OMP_CLAUSE_REDUCTION
))
12581 || (is_parallel_ctx (ctx
)
12582 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12583 OMP_CLAUSE__REDUCTEMP_
)))
12585 if (dep_bind
== NULL
)
12587 push_gimplify_context ();
12588 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12590 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12592 gimple_omp_taskreg_clauses (ctx
->stmt
),
12593 &tskred_ilist
, &tskred_olist
);
12596 push_gimplify_context ();
12598 gimple_seq par_olist
= NULL
;
12599 gimple_seq par_ilist
= NULL
;
12600 gimple_seq par_rlist
= NULL
;
12601 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12602 lower_omp (&par_body
, ctx
);
12603 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12604 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12606 /* Declare all the variables created by mapping and the variables
12607 declared in the scope of the parallel body. */
12608 record_vars_into (ctx
->block_vars
, child_fn
);
12609 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12610 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12612 if (ctx
->record_type
)
12615 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12616 : ctx
->record_type
, ".omp_data_o");
12617 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12618 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12619 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12622 gimple_seq olist
= NULL
;
12623 gimple_seq ilist
= NULL
;
12624 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12625 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12627 if (ctx
->record_type
)
12629 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12630 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12634 /* Once all the expansions are done, sequence all the different
12635 fragments inside gimple_omp_body. */
12637 gimple_seq new_body
= NULL
;
12639 if (ctx
->record_type
)
12641 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12642 /* fixup_child_record_type might have changed receiver_decl's type. */
12643 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12644 gimple_seq_add_stmt (&new_body
,
12645 gimple_build_assign (ctx
->receiver_decl
, t
));
12648 gimple_seq_add_seq (&new_body
, par_ilist
);
12649 gimple_seq_add_seq (&new_body
, par_body
);
12650 gimple_seq_add_seq (&new_body
, par_rlist
);
12651 if (ctx
->cancellable
)
12652 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12653 gimple_seq_add_seq (&new_body
, par_olist
);
12654 new_body
= maybe_catch_exception (new_body
);
12655 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12656 gimple_seq_add_stmt (&new_body
,
12657 gimple_build_omp_continue (integer_zero_node
,
12658 integer_zero_node
));
12659 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12660 gimple_omp_set_body (stmt
, new_body
);
12662 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12663 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12665 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12666 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12667 gimple_bind_add_seq (bind
, ilist
);
12668 gimple_bind_add_stmt (bind
, stmt
);
12669 gimple_bind_add_seq (bind
, olist
);
12671 pop_gimplify_context (NULL
);
12675 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12676 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12677 gimple_bind_add_stmt (dep_bind
, bind
);
12678 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12679 gimple_bind_add_seq (dep_bind
, dep_olist
);
12680 pop_gimplify_context (dep_bind
);
12684 /* Lower the GIMPLE_OMP_TARGET in the current statement
12685 in GSI_P. CTX holds context information for the directive. */
12688 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12691 tree child_fn
, t
, c
;
12692 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12693 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12694 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12695 location_t loc
= gimple_location (stmt
);
12696 bool offloaded
, data_region
;
12697 unsigned int map_cnt
= 0;
12698 tree in_reduction_clauses
= NULL_TREE
;
12700 offloaded
= is_gimple_omp_offloaded (stmt
);
12701 switch (gimple_omp_target_kind (stmt
))
12703 case GF_OMP_TARGET_KIND_REGION
:
12705 q
= &in_reduction_clauses
;
12706 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12707 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12710 q
= &OMP_CLAUSE_CHAIN (*q
);
12711 *p
= OMP_CLAUSE_CHAIN (*p
);
12714 p
= &OMP_CLAUSE_CHAIN (*p
);
12716 *p
= in_reduction_clauses
;
12718 case GF_OMP_TARGET_KIND_UPDATE
:
12719 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12720 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12721 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12722 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12723 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12724 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12725 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12726 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12727 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12728 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12729 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12730 data_region
= false;
12732 case GF_OMP_TARGET_KIND_DATA
:
12733 case GF_OMP_TARGET_KIND_OACC_DATA
:
12734 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12735 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12736 data_region
= true;
12739 gcc_unreachable ();
12742 /* Ensure that requires map is written via output_offload_tables, even if only
12743 'target (enter/exit) data' is used in the translation unit. */
12744 if (ENABLE_OFFLOADING
&& (omp_requires_mask
& OMP_REQUIRES_TARGET_USED
))
12745 g
->have_offload
= true;
12747 clauses
= gimple_omp_target_clauses (stmt
);
12749 gimple_seq dep_ilist
= NULL
;
12750 gimple_seq dep_olist
= NULL
;
12751 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12752 if (has_depend
|| in_reduction_clauses
)
12754 push_gimplify_context ();
12755 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12757 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12758 &dep_ilist
, &dep_olist
);
12759 if (in_reduction_clauses
)
12760 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12768 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12769 tgt_body
= gimple_bind_body (tgt_bind
);
12771 else if (data_region
)
12772 tgt_body
= gimple_omp_body (stmt
);
12773 child_fn
= ctx
->cb
.dst_fn
;
12775 push_gimplify_context ();
12778 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12779 switch (OMP_CLAUSE_CODE (c
))
12785 case OMP_CLAUSE_MAP
:
12787 /* First check what we're prepared to handle in the following. */
12788 switch (OMP_CLAUSE_MAP_KIND (c
))
12790 case GOMP_MAP_ALLOC
:
12792 case GOMP_MAP_FROM
:
12793 case GOMP_MAP_TOFROM
:
12794 case GOMP_MAP_POINTER
:
12795 case GOMP_MAP_TO_PSET
:
12796 case GOMP_MAP_DELETE
:
12797 case GOMP_MAP_RELEASE
:
12798 case GOMP_MAP_ALWAYS_TO
:
12799 case GOMP_MAP_ALWAYS_FROM
:
12800 case GOMP_MAP_ALWAYS_TOFROM
:
12801 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12802 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12803 case GOMP_MAP_STRUCT
:
12804 case GOMP_MAP_ALWAYS_POINTER
:
12805 case GOMP_MAP_ATTACH
:
12806 case GOMP_MAP_DETACH
:
12807 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12808 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12810 case GOMP_MAP_IF_PRESENT
:
12811 case GOMP_MAP_FORCE_ALLOC
:
12812 case GOMP_MAP_FORCE_TO
:
12813 case GOMP_MAP_FORCE_FROM
:
12814 case GOMP_MAP_FORCE_TOFROM
:
12815 case GOMP_MAP_FORCE_PRESENT
:
12816 case GOMP_MAP_FORCE_DEVICEPTR
:
12817 case GOMP_MAP_DEVICE_RESIDENT
:
12818 case GOMP_MAP_LINK
:
12819 case GOMP_MAP_FORCE_DETACH
:
12820 gcc_assert (is_gimple_omp_oacc (stmt
));
12823 gcc_unreachable ();
12827 case OMP_CLAUSE_TO
:
12828 case OMP_CLAUSE_FROM
:
12830 var
= OMP_CLAUSE_DECL (c
);
12833 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12834 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12835 && (OMP_CLAUSE_MAP_KIND (c
)
12836 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12841 if (DECL_SIZE (var
)
12842 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12844 tree var2
= DECL_VALUE_EXPR (var
);
12845 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12846 var2
= TREE_OPERAND (var2
, 0);
12847 gcc_assert (DECL_P (var2
));
12852 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12853 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12854 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12856 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12858 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12859 && varpool_node::get_create (var
)->offloadable
)
12862 tree type
= build_pointer_type (TREE_TYPE (var
));
12863 tree new_var
= lookup_decl (var
, ctx
);
12864 x
= create_tmp_var_raw (type
, get_name (new_var
));
12865 gimple_add_tmp_var (x
);
12866 x
= build_simple_mem_ref (x
);
12867 SET_DECL_VALUE_EXPR (new_var
, x
);
12868 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12873 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12874 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12875 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12876 && is_omp_target (stmt
))
12878 gcc_assert (maybe_lookup_field (c
, ctx
));
12883 if (!maybe_lookup_field (var
, ctx
))
12886 /* Don't remap compute constructs' reduction variables, because the
12887 intermediate result must be local to each gang. */
12888 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12889 && is_gimple_omp_oacc (ctx
->stmt
)
12890 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12892 x
= build_receiver_ref (var
, true, ctx
);
12893 tree new_var
= lookup_decl (var
, ctx
);
12895 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12896 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12897 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12898 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12899 x
= build_simple_mem_ref (x
);
12900 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12902 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12903 if (omp_privatize_by_reference (new_var
)
12904 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12905 || DECL_BY_REFERENCE (var
)))
12907 /* Create a local object to hold the instance
12909 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12910 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12911 tree inst
= create_tmp_var (type
, id
);
12912 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12913 x
= build_fold_addr_expr (inst
);
12915 gimplify_assign (new_var
, x
, &fplist
);
12917 else if (DECL_P (new_var
))
12919 SET_DECL_VALUE_EXPR (new_var
, x
);
12920 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12923 gcc_unreachable ();
12928 case OMP_CLAUSE_FIRSTPRIVATE
:
12929 omp_firstprivate_recv
:
12930 gcc_checking_assert (offloaded
);
12931 if (is_gimple_omp_oacc (ctx
->stmt
))
12933 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12934 gcc_checking_assert (!is_oacc_kernels (ctx
));
12935 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12936 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12938 goto oacc_firstprivate
;
12941 var
= OMP_CLAUSE_DECL (c
);
12942 if (!omp_privatize_by_reference (var
)
12943 && !is_gimple_reg_type (TREE_TYPE (var
)))
12945 tree new_var
= lookup_decl (var
, ctx
);
12946 if (is_variable_sized (var
))
12948 tree pvar
= DECL_VALUE_EXPR (var
);
12949 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12950 pvar
= TREE_OPERAND (pvar
, 0);
12951 gcc_assert (DECL_P (pvar
));
12952 tree new_pvar
= lookup_decl (pvar
, ctx
);
12953 x
= build_fold_indirect_ref (new_pvar
);
12954 TREE_THIS_NOTRAP (x
) = 1;
12957 x
= build_receiver_ref (var
, true, ctx
);
12958 SET_DECL_VALUE_EXPR (new_var
, x
);
12959 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12961 /* Fortran array descriptors: firstprivate of data + attach. */
12962 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
12963 && lang_hooks
.decls
.omp_array_data (var
, true))
12967 case OMP_CLAUSE_PRIVATE
:
12968 gcc_checking_assert (offloaded
);
12969 if (is_gimple_omp_oacc (ctx
->stmt
))
12971 /* No 'private' clauses on OpenACC 'kernels'. */
12972 gcc_checking_assert (!is_oacc_kernels (ctx
));
12973 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12974 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12978 var
= OMP_CLAUSE_DECL (c
);
12979 if (is_variable_sized (var
))
12981 tree new_var
= lookup_decl (var
, ctx
);
12982 tree pvar
= DECL_VALUE_EXPR (var
);
12983 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12984 pvar
= TREE_OPERAND (pvar
, 0);
12985 gcc_assert (DECL_P (pvar
));
12986 tree new_pvar
= lookup_decl (pvar
, ctx
);
12987 x
= build_fold_indirect_ref (new_pvar
);
12988 TREE_THIS_NOTRAP (x
) = 1;
12989 SET_DECL_VALUE_EXPR (new_var
, x
);
12990 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12994 case OMP_CLAUSE_USE_DEVICE_PTR
:
12995 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12996 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12997 case OMP_CLAUSE_IS_DEVICE_PTR
:
12998 var
= OMP_CLAUSE_DECL (c
);
12999 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13001 while (TREE_CODE (var
) == INDIRECT_REF
13002 || TREE_CODE (var
) == ARRAY_REF
)
13003 var
= TREE_OPERAND (var
, 0);
13004 if (lang_hooks
.decls
.omp_array_data (var
, true))
13005 goto omp_firstprivate_recv
;
13008 if (is_variable_sized (var
))
13010 tree new_var
= lookup_decl (var
, ctx
);
13011 tree pvar
= DECL_VALUE_EXPR (var
);
13012 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13013 pvar
= TREE_OPERAND (pvar
, 0);
13014 gcc_assert (DECL_P (pvar
));
13015 tree new_pvar
= lookup_decl (pvar
, ctx
);
13016 x
= build_fold_indirect_ref (new_pvar
);
13017 TREE_THIS_NOTRAP (x
) = 1;
13018 SET_DECL_VALUE_EXPR (new_var
, x
);
13019 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13021 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13022 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13023 && !omp_privatize_by_reference (var
)
13024 && !omp_is_allocatable_or_ptr (var
)
13025 && !lang_hooks
.decls
.omp_array_data (var
, true))
13026 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13028 tree new_var
= lookup_decl (var
, ctx
);
13029 tree type
= build_pointer_type (TREE_TYPE (var
));
13030 x
= create_tmp_var_raw (type
, get_name (new_var
));
13031 gimple_add_tmp_var (x
);
13032 x
= build_simple_mem_ref (x
);
13033 SET_DECL_VALUE_EXPR (new_var
, x
);
13034 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13038 tree new_var
= lookup_decl (var
, ctx
);
13039 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
13040 gimple_add_tmp_var (x
);
13041 SET_DECL_VALUE_EXPR (new_var
, x
);
13042 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13049 target_nesting_level
++;
13050 lower_omp (&tgt_body
, ctx
);
13051 target_nesting_level
--;
13053 else if (data_region
)
13054 lower_omp (&tgt_body
, ctx
);
13058 /* Declare all the variables created by mapping and the variables
13059 declared in the scope of the target body. */
13060 record_vars_into (ctx
->block_vars
, child_fn
);
13061 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
13062 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
13067 if (ctx
->record_type
)
13070 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
13071 DECL_NAMELESS (ctx
->sender_decl
) = 1;
13072 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
13073 t
= make_tree_vec (3);
13074 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
13075 TREE_VEC_ELT (t
, 1)
13076 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
13077 ".omp_data_sizes");
13078 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
13079 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
13080 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
13081 tree tkind_type
= short_unsigned_type_node
;
13082 int talign_shift
= 8;
13083 TREE_VEC_ELT (t
, 2)
13084 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
13085 ".omp_data_kinds");
13086 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
13087 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
13088 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
13089 gimple_omp_target_set_data_arg (stmt
, t
);
13091 vec
<constructor_elt
, va_gc
> *vsize
;
13092 vec
<constructor_elt
, va_gc
> *vkind
;
13093 vec_alloc (vsize
, map_cnt
);
13094 vec_alloc (vkind
, map_cnt
);
13095 unsigned int map_idx
= 0;
13097 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13098 switch (OMP_CLAUSE_CODE (c
))
13100 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
13101 unsigned int talign
;
13106 case OMP_CLAUSE_MAP
:
13107 case OMP_CLAUSE_TO
:
13108 case OMP_CLAUSE_FROM
:
13109 oacc_firstprivate_map
:
13111 ovar
= OMP_CLAUSE_DECL (c
);
13112 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13113 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13114 || (OMP_CLAUSE_MAP_KIND (c
)
13115 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13117 if (!DECL_P (ovar
))
13119 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13120 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
13122 nc
= OMP_CLAUSE_CHAIN (c
);
13123 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
13124 == get_base_address (ovar
));
13125 ovar
= OMP_CLAUSE_DECL (nc
);
13129 tree x
= build_sender_ref (ovar
, ctx
);
13131 if (in_reduction_clauses
13132 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13133 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13135 v
= unshare_expr (v
);
13137 while (handled_component_p (*p
)
13138 || TREE_CODE (*p
) == INDIRECT_REF
13139 || TREE_CODE (*p
) == ADDR_EXPR
13140 || TREE_CODE (*p
) == MEM_REF
13141 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
13142 p
= &TREE_OPERAND (*p
, 0);
13144 if (is_variable_sized (d
))
13146 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13147 d
= DECL_VALUE_EXPR (d
);
13148 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13149 d
= TREE_OPERAND (d
, 0);
13150 gcc_assert (DECL_P (d
));
13153 = (splay_tree_key
) &DECL_CONTEXT (d
);
13154 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13159 *p
= build_fold_indirect_ref (nd
);
13161 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
13162 gimplify_assign (x
, v
, &ilist
);
13168 if (DECL_SIZE (ovar
)
13169 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
13171 tree ovar2
= DECL_VALUE_EXPR (ovar
);
13172 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
13173 ovar2
= TREE_OPERAND (ovar2
, 0);
13174 gcc_assert (DECL_P (ovar2
));
13177 if (!maybe_lookup_field (ovar
, ctx
)
13178 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13179 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13180 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13184 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13185 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13186 talign
= DECL_ALIGN_UNIT (ovar
);
13191 if (in_reduction_clauses
13192 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13193 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13196 if (is_variable_sized (d
))
13198 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13199 d
= DECL_VALUE_EXPR (d
);
13200 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13201 d
= TREE_OPERAND (d
, 0);
13202 gcc_assert (DECL_P (d
));
13205 = (splay_tree_key
) &DECL_CONTEXT (d
);
13206 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13211 var
= build_fold_indirect_ref (nd
);
13214 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13217 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13218 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13219 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13220 && is_omp_target (stmt
))
13222 x
= build_sender_ref (c
, ctx
);
13223 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13227 x
= build_sender_ref (ovar
, ctx
);
13229 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13230 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13231 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13232 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13234 gcc_assert (offloaded
);
13236 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13237 mark_addressable (avar
);
13238 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13239 talign
= DECL_ALIGN_UNIT (avar
);
13240 avar
= build_fold_addr_expr (avar
);
13241 gimplify_assign (x
, avar
, &ilist
);
13243 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13245 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13246 if (!omp_privatize_by_reference (var
))
13248 if (is_gimple_reg (var
)
13249 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13250 suppress_warning (var
);
13251 var
= build_fold_addr_expr (var
);
13254 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13255 gimplify_assign (x
, var
, &ilist
);
13257 else if (is_gimple_reg (var
))
13259 gcc_assert (offloaded
);
13260 tree avar
= create_tmp_var (TREE_TYPE (var
));
13261 mark_addressable (avar
);
13262 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13263 if (GOMP_MAP_COPY_TO_P (map_kind
)
13264 || map_kind
== GOMP_MAP_POINTER
13265 || map_kind
== GOMP_MAP_TO_PSET
13266 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13268 /* If we need to initialize a temporary
13269 with VAR because it is not addressable, and
13270 the variable hasn't been initialized yet, then
13271 we'll get a warning for the store to avar.
13272 Don't warn in that case, the mapping might
13274 suppress_warning (var
, OPT_Wuninitialized
);
13275 gimplify_assign (avar
, var
, &ilist
);
13277 avar
= build_fold_addr_expr (avar
);
13278 gimplify_assign (x
, avar
, &ilist
);
13279 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13280 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13281 && !TYPE_READONLY (TREE_TYPE (var
)))
13283 x
= unshare_expr (x
);
13284 x
= build_simple_mem_ref (x
);
13285 gimplify_assign (var
, x
, &olist
);
13290 /* While MAP is handled explicitly by the FE,
13291 for 'target update', only the identified is passed. */
13292 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13293 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13294 && (omp_is_allocatable_or_ptr (var
)
13295 && omp_check_optional_argument (var
, false)))
13296 var
= build_fold_indirect_ref (var
);
13297 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13298 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13299 || (!omp_is_allocatable_or_ptr (var
)
13300 && !omp_check_optional_argument (var
, false)))
13301 var
= build_fold_addr_expr (var
);
13302 gimplify_assign (x
, var
, &ilist
);
13306 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13308 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13309 s
= TREE_TYPE (ovar
);
13310 if (TREE_CODE (s
) == REFERENCE_TYPE
13311 || omp_check_optional_argument (ovar
, false))
13313 s
= TYPE_SIZE_UNIT (s
);
13316 s
= OMP_CLAUSE_SIZE (c
);
13317 if (s
== NULL_TREE
)
13318 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13319 s
= fold_convert (size_type_node
, s
);
13320 purpose
= size_int (map_idx
++);
13321 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13322 if (TREE_CODE (s
) != INTEGER_CST
)
13323 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13325 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13326 switch (OMP_CLAUSE_CODE (c
))
13328 case OMP_CLAUSE_MAP
:
13329 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13330 tkind_zero
= tkind
;
13331 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13334 case GOMP_MAP_ALLOC
:
13335 case GOMP_MAP_IF_PRESENT
:
13337 case GOMP_MAP_FROM
:
13338 case GOMP_MAP_TOFROM
:
13339 case GOMP_MAP_ALWAYS_TO
:
13340 case GOMP_MAP_ALWAYS_FROM
:
13341 case GOMP_MAP_ALWAYS_TOFROM
:
13342 case GOMP_MAP_RELEASE
:
13343 case GOMP_MAP_FORCE_TO
:
13344 case GOMP_MAP_FORCE_FROM
:
13345 case GOMP_MAP_FORCE_TOFROM
:
13346 case GOMP_MAP_FORCE_PRESENT
:
13347 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13349 case GOMP_MAP_DELETE
:
13350 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13354 if (tkind_zero
!= tkind
)
13356 if (integer_zerop (s
))
13357 tkind
= tkind_zero
;
13358 else if (integer_nonzerop (s
))
13359 tkind_zero
= tkind
;
13361 if (tkind_zero
== tkind
13362 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13363 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13364 & ~GOMP_MAP_IMPLICIT
)
13367 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13368 bits are not interfered by other special bit encodings,
13369 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13371 tkind
|= GOMP_MAP_IMPLICIT
;
13372 tkind_zero
= tkind
;
13375 case OMP_CLAUSE_FIRSTPRIVATE
:
13376 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13377 tkind
= GOMP_MAP_TO
;
13378 tkind_zero
= tkind
;
13380 case OMP_CLAUSE_TO
:
13381 tkind
= GOMP_MAP_TO
;
13382 tkind_zero
= tkind
;
13384 case OMP_CLAUSE_FROM
:
13385 tkind
= GOMP_MAP_FROM
;
13386 tkind_zero
= tkind
;
13389 gcc_unreachable ();
13391 gcc_checking_assert (tkind
13392 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13393 gcc_checking_assert (tkind_zero
13394 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13395 talign
= ceil_log2 (talign
);
13396 tkind
|= talign
<< talign_shift
;
13397 tkind_zero
|= talign
<< talign_shift
;
13398 gcc_checking_assert (tkind
13399 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13400 gcc_checking_assert (tkind_zero
13401 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13402 if (tkind
== tkind_zero
)
13403 x
= build_int_cstu (tkind_type
, tkind
);
13406 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13407 x
= build3 (COND_EXPR
, tkind_type
,
13408 fold_build2 (EQ_EXPR
, boolean_type_node
,
13409 unshare_expr (s
), size_zero_node
),
13410 build_int_cstu (tkind_type
, tkind_zero
),
13411 build_int_cstu (tkind_type
, tkind
));
13413 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13418 case OMP_CLAUSE_FIRSTPRIVATE
:
13419 omp_has_device_addr_descr
:
13420 if (is_gimple_omp_oacc (ctx
->stmt
))
13421 goto oacc_firstprivate_map
;
13422 ovar
= OMP_CLAUSE_DECL (c
);
13423 if (omp_privatize_by_reference (ovar
))
13424 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13426 talign
= DECL_ALIGN_UNIT (ovar
);
13427 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13428 x
= build_sender_ref (ovar
, ctx
);
13429 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13430 type
= TREE_TYPE (ovar
);
13431 if (omp_privatize_by_reference (ovar
))
13432 type
= TREE_TYPE (type
);
13433 if ((INTEGRAL_TYPE_P (type
)
13434 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13435 || TREE_CODE (type
) == POINTER_TYPE
)
13437 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13439 if (omp_privatize_by_reference (var
))
13440 t
= build_simple_mem_ref (var
);
13441 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13442 suppress_warning (var
);
13443 if (TREE_CODE (type
) != POINTER_TYPE
)
13444 t
= fold_convert (pointer_sized_int_node
, t
);
13445 t
= fold_convert (TREE_TYPE (x
), t
);
13446 gimplify_assign (x
, t
, &ilist
);
13448 else if (omp_privatize_by_reference (var
))
13449 gimplify_assign (x
, var
, &ilist
);
13450 else if (is_gimple_reg (var
))
13452 tree avar
= create_tmp_var (TREE_TYPE (var
));
13453 mark_addressable (avar
);
13454 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13455 suppress_warning (var
);
13456 gimplify_assign (avar
, var
, &ilist
);
13457 avar
= build_fold_addr_expr (avar
);
13458 gimplify_assign (x
, avar
, &ilist
);
13462 var
= build_fold_addr_expr (var
);
13463 gimplify_assign (x
, var
, &ilist
);
13465 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13467 else if (omp_privatize_by_reference (ovar
))
13468 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13470 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13471 s
= fold_convert (size_type_node
, s
);
13472 purpose
= size_int (map_idx
++);
13473 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13474 if (TREE_CODE (s
) != INTEGER_CST
)
13475 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13477 gcc_checking_assert (tkind
13478 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13479 talign
= ceil_log2 (talign
);
13480 tkind
|= talign
<< talign_shift
;
13481 gcc_checking_assert (tkind
13482 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13483 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13484 build_int_cstu (tkind_type
, tkind
));
13485 /* Fortran array descriptors: firstprivate of data + attach. */
13486 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13487 && lang_hooks
.decls
.omp_array_data (ovar
, true))
13489 tree not_null_lb
, null_lb
, after_lb
;
13490 tree var1
, var2
, size1
, size2
;
13491 tree present
= omp_check_optional_argument (ovar
, true);
13494 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13495 not_null_lb
= create_artificial_label (clause_loc
);
13496 null_lb
= create_artificial_label (clause_loc
);
13497 after_lb
= create_artificial_label (clause_loc
);
13498 gimple_seq seq
= NULL
;
13499 present
= force_gimple_operand (present
, &seq
, true,
13501 gimple_seq_add_seq (&ilist
, seq
);
13502 gimple_seq_add_stmt (&ilist
,
13503 gimple_build_cond_from_tree (present
,
13504 not_null_lb
, null_lb
));
13505 gimple_seq_add_stmt (&ilist
,
13506 gimple_build_label (not_null_lb
));
13508 var1
= lang_hooks
.decls
.omp_array_data (var
, false);
13509 size1
= lang_hooks
.decls
.omp_array_size (var
, &ilist
);
13510 var2
= build_fold_addr_expr (x
);
13511 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
13512 var
= build_fold_addr_expr (var
);
13513 size2
= fold_build2 (POINTER_DIFF_EXPR
, ssizetype
,
13514 build_fold_addr_expr (var1
), var
);
13515 size2
= fold_convert (sizetype
, size2
);
13518 tree tmp
= create_tmp_var (TREE_TYPE (var1
));
13519 gimplify_assign (tmp
, var1
, &ilist
);
13521 tmp
= create_tmp_var (TREE_TYPE (var2
));
13522 gimplify_assign (tmp
, var2
, &ilist
);
13524 tmp
= create_tmp_var (TREE_TYPE (size1
));
13525 gimplify_assign (tmp
, size1
, &ilist
);
13527 tmp
= create_tmp_var (TREE_TYPE (size2
));
13528 gimplify_assign (tmp
, size2
, &ilist
);
13530 gimple_seq_add_stmt (&ilist
, gimple_build_goto (after_lb
));
13531 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_lb
));
13532 gimplify_assign (var1
, null_pointer_node
, &ilist
);
13533 gimplify_assign (var2
, null_pointer_node
, &ilist
);
13534 gimplify_assign (size1
, size_zero_node
, &ilist
);
13535 gimplify_assign (size2
, size_zero_node
, &ilist
);
13536 gimple_seq_add_stmt (&ilist
, gimple_build_label (after_lb
));
13538 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13539 gimplify_assign (x
, var1
, &ilist
);
13540 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13541 talign
= DECL_ALIGN_UNIT (ovar
);
13542 talign
= ceil_log2 (talign
);
13543 tkind
|= talign
<< talign_shift
;
13544 gcc_checking_assert (tkind
13546 TYPE_MAX_VALUE (tkind_type
)));
13547 purpose
= size_int (map_idx
++);
13548 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size1
);
13549 if (TREE_CODE (size1
) != INTEGER_CST
)
13550 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13551 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13552 build_int_cstu (tkind_type
, tkind
));
13553 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13554 gimplify_assign (x
, var2
, &ilist
);
13555 tkind
= GOMP_MAP_ATTACH
;
13556 purpose
= size_int (map_idx
++);
13557 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size2
);
13558 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13559 build_int_cstu (tkind_type
, tkind
));
13563 case OMP_CLAUSE_USE_DEVICE_PTR
:
13564 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13565 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13566 case OMP_CLAUSE_IS_DEVICE_PTR
:
13567 ovar
= OMP_CLAUSE_DECL (c
);
13568 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13570 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13571 goto omp_has_device_addr_descr
;
13572 while (TREE_CODE (ovar
) == INDIRECT_REF
13573 || TREE_CODE (ovar
) == ARRAY_REF
)
13574 ovar
= TREE_OPERAND (ovar
, 0);
13576 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13578 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13580 tkind
= ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13581 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13582 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13583 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13585 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13586 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13588 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13589 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13593 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13594 x
= build_sender_ref (ovar
, ctx
);
13597 if (is_gimple_omp_oacc (ctx
->stmt
))
13599 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13601 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13602 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13605 type
= TREE_TYPE (ovar
);
13606 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13607 var
= lang_hooks
.decls
.omp_array_data (var
, false);
13608 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13609 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13610 && !omp_privatize_by_reference (ovar
)
13611 && !omp_is_allocatable_or_ptr (ovar
))
13612 || TREE_CODE (type
) == ARRAY_TYPE
)
13613 var
= build_fold_addr_expr (var
);
13616 if (omp_privatize_by_reference (ovar
)
13617 || omp_check_optional_argument (ovar
, false)
13618 || omp_is_allocatable_or_ptr (ovar
))
13620 type
= TREE_TYPE (type
);
13621 if (POINTER_TYPE_P (type
)
13622 && TREE_CODE (type
) != ARRAY_TYPE
13623 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13624 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13625 && !omp_is_allocatable_or_ptr (ovar
))
13626 || (omp_privatize_by_reference (ovar
)
13627 && omp_is_allocatable_or_ptr (ovar
))))
13628 var
= build_simple_mem_ref (var
);
13629 var
= fold_convert (TREE_TYPE (x
), var
);
13633 present
= omp_check_optional_argument (ovar
, true);
13636 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13637 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13638 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13639 tree new_x
= unshare_expr (x
);
13640 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13642 gcond
*cond
= gimple_build_cond_from_tree (present
,
13645 gimple_seq_add_stmt (&ilist
, cond
);
13646 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13647 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13648 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13649 gimple_seq_add_stmt (&ilist
,
13650 gimple_build_label (notnull_label
));
13651 gimplify_assign (x
, var
, &ilist
);
13652 gimple_seq_add_stmt (&ilist
,
13653 gimple_build_label (opt_arg_label
));
13656 gimplify_assign (x
, var
, &ilist
);
13658 purpose
= size_int (map_idx
++);
13659 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13660 gcc_checking_assert (tkind
13661 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13662 gcc_checking_assert (tkind
13663 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13664 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13665 build_int_cstu (tkind_type
, tkind
));
13669 gcc_assert (map_idx
== map_cnt
);
13671 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13672 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13673 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13674 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13675 for (int i
= 1; i
<= 2; i
++)
13676 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13678 gimple_seq initlist
= NULL
;
13679 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13680 TREE_VEC_ELT (t
, i
)),
13681 &initlist
, true, NULL_TREE
);
13682 gimple_seq_add_seq (&ilist
, initlist
);
13684 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13685 gimple_seq_add_stmt (&olist
,
13686 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13689 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13691 tree id
= get_identifier ("omp declare target");
13692 tree decl
= TREE_VEC_ELT (t
, i
);
13693 DECL_ATTRIBUTES (decl
)
13694 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13695 varpool_node
*node
= varpool_node::get (decl
);
13698 node
->offloadable
= 1;
13699 if (ENABLE_OFFLOADING
)
13701 g
->have_offload
= true;
13702 vec_safe_push (offload_vars
, t
);
13707 tree clobber
= build_clobber (ctx
->record_type
);
13708 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13712 /* Once all the expansions are done, sequence all the different
13713 fragments inside gimple_omp_body. */
13718 && ctx
->record_type
)
13720 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13721 /* fixup_child_record_type might have changed receiver_decl's type. */
13722 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13723 gimple_seq_add_stmt (&new_body
,
13724 gimple_build_assign (ctx
->receiver_decl
, t
));
13726 gimple_seq_add_seq (&new_body
, fplist
);
13728 if (offloaded
|| data_region
)
13730 tree prev
= NULL_TREE
;
13731 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13732 switch (OMP_CLAUSE_CODE (c
))
13737 case OMP_CLAUSE_FIRSTPRIVATE
:
13738 omp_firstprivatize_data_region
:
13739 if (is_gimple_omp_oacc (ctx
->stmt
))
13741 var
= OMP_CLAUSE_DECL (c
);
13742 if (omp_privatize_by_reference (var
)
13743 || is_gimple_reg_type (TREE_TYPE (var
)))
13745 tree new_var
= lookup_decl (var
, ctx
);
13747 type
= TREE_TYPE (var
);
13748 if (omp_privatize_by_reference (var
))
13749 type
= TREE_TYPE (type
);
13750 if ((INTEGRAL_TYPE_P (type
)
13751 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13752 || TREE_CODE (type
) == POINTER_TYPE
)
13754 x
= build_receiver_ref (var
, false, ctx
);
13755 if (TREE_CODE (type
) != POINTER_TYPE
)
13756 x
= fold_convert (pointer_sized_int_node
, x
);
13757 x
= fold_convert (type
, x
);
13758 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13760 if (omp_privatize_by_reference (var
))
13762 tree v
= create_tmp_var_raw (type
, get_name (var
));
13763 gimple_add_tmp_var (v
);
13764 TREE_ADDRESSABLE (v
) = 1;
13765 gimple_seq_add_stmt (&new_body
,
13766 gimple_build_assign (v
, x
));
13767 x
= build_fold_addr_expr (v
);
13769 gimple_seq_add_stmt (&new_body
,
13770 gimple_build_assign (new_var
, x
));
13774 bool by_ref
= !omp_privatize_by_reference (var
);
13775 x
= build_receiver_ref (var
, by_ref
, ctx
);
13776 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13778 gimple_seq_add_stmt (&new_body
,
13779 gimple_build_assign (new_var
, x
));
13782 else if (is_variable_sized (var
))
13784 tree pvar
= DECL_VALUE_EXPR (var
);
13785 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13786 pvar
= TREE_OPERAND (pvar
, 0);
13787 gcc_assert (DECL_P (pvar
));
13788 tree new_var
= lookup_decl (pvar
, ctx
);
13789 x
= build_receiver_ref (var
, false, ctx
);
13790 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13791 gimple_seq_add_stmt (&new_body
,
13792 gimple_build_assign (new_var
, x
));
13795 case OMP_CLAUSE_PRIVATE
:
13796 if (is_gimple_omp_oacc (ctx
->stmt
))
13798 var
= OMP_CLAUSE_DECL (c
);
13799 if (omp_privatize_by_reference (var
))
13801 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13802 tree new_var
= lookup_decl (var
, ctx
);
13803 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13804 if (TREE_CONSTANT (x
))
13806 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13808 gimple_add_tmp_var (x
);
13809 TREE_ADDRESSABLE (x
) = 1;
13810 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13815 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13816 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13817 gimple_seq_add_stmt (&new_body
,
13818 gimple_build_assign (new_var
, x
));
13821 case OMP_CLAUSE_USE_DEVICE_PTR
:
13822 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13823 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13824 case OMP_CLAUSE_IS_DEVICE_PTR
:
13826 gimple_seq assign_body
;
13827 bool is_array_data
;
13828 bool do_optional_check
;
13829 assign_body
= NULL
;
13830 do_optional_check
= false;
13831 var
= OMP_CLAUSE_DECL (c
);
13832 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13833 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
&& is_array_data
)
13834 goto omp_firstprivatize_data_region
;
13836 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13837 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13838 x
= build_sender_ref (is_array_data
13839 ? (splay_tree_key
) &DECL_NAME (var
)
13840 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13843 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13845 while (TREE_CODE (var
) == INDIRECT_REF
13846 || TREE_CODE (var
) == ARRAY_REF
)
13847 var
= TREE_OPERAND (var
, 0);
13849 x
= build_receiver_ref (var
, false, ctx
);
13854 bool is_ref
= omp_privatize_by_reference (var
);
13855 do_optional_check
= true;
13856 /* First, we copy the descriptor data from the host; then
13857 we update its data to point to the target address. */
13858 new_var
= lookup_decl (var
, ctx
);
13859 new_var
= DECL_VALUE_EXPR (new_var
);
13862 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
13863 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13864 v2
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
13868 v2
= build_fold_indirect_ref (v2
);
13869 v
= create_tmp_var_raw (TREE_TYPE (v2
), get_name (var
));
13870 gimple_add_tmp_var (v
);
13871 TREE_ADDRESSABLE (v
) = 1;
13872 gimplify_assign (v
, v2
, &assign_body
);
13873 tree rhs
= build_fold_addr_expr (v
);
13874 gimple_seq_add_stmt (&assign_body
,
13875 gimple_build_assign (new_var
, rhs
));
13878 gimplify_assign (new_var
, v2
, &assign_body
);
13880 v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13882 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13883 gimple_seq_add_stmt (&assign_body
,
13884 gimple_build_assign (v2
, x
));
13886 else if (is_variable_sized (var
))
13888 tree pvar
= DECL_VALUE_EXPR (var
);
13889 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13890 pvar
= TREE_OPERAND (pvar
, 0);
13891 gcc_assert (DECL_P (pvar
));
13892 new_var
= lookup_decl (pvar
, ctx
);
13893 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13894 gimple_seq_add_stmt (&assign_body
,
13895 gimple_build_assign (new_var
, x
));
13897 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13898 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13899 && !omp_privatize_by_reference (var
)
13900 && !omp_is_allocatable_or_ptr (var
))
13901 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13903 new_var
= lookup_decl (var
, ctx
);
13904 new_var
= DECL_VALUE_EXPR (new_var
);
13905 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13906 new_var
= TREE_OPERAND (new_var
, 0);
13907 gcc_assert (DECL_P (new_var
));
13908 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13909 gimple_seq_add_stmt (&assign_body
,
13910 gimple_build_assign (new_var
, x
));
13914 tree type
= TREE_TYPE (var
);
13915 new_var
= lookup_decl (var
, ctx
);
13916 if (omp_privatize_by_reference (var
))
13918 type
= TREE_TYPE (type
);
13919 if (POINTER_TYPE_P (type
)
13920 && TREE_CODE (type
) != ARRAY_TYPE
13921 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13922 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13923 || (omp_privatize_by_reference (var
)
13924 && omp_is_allocatable_or_ptr (var
))))
13926 tree v
= create_tmp_var_raw (type
, get_name (var
));
13927 gimple_add_tmp_var (v
);
13928 TREE_ADDRESSABLE (v
) = 1;
13929 x
= fold_convert (type
, x
);
13930 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13932 gimple_seq_add_stmt (&assign_body
,
13933 gimple_build_assign (v
, x
));
13934 x
= build_fold_addr_expr (v
);
13935 do_optional_check
= true;
13938 new_var
= DECL_VALUE_EXPR (new_var
);
13939 x
= fold_convert (TREE_TYPE (new_var
), x
);
13940 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13941 gimple_seq_add_stmt (&assign_body
,
13942 gimple_build_assign (new_var
, x
));
13945 present
= ((do_optional_check
13946 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13947 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13948 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13952 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13953 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13954 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13955 glabel
*null_glabel
= gimple_build_label (null_label
);
13956 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13957 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13958 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13960 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13962 gcond
*cond
= gimple_build_cond_from_tree (present
,
13965 gimple_seq_add_stmt (&new_body
, cond
);
13966 gimple_seq_add_stmt (&new_body
, null_glabel
);
13967 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13968 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13969 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13970 gimple_seq_add_seq (&new_body
, assign_body
);
13971 gimple_seq_add_stmt (&new_body
,
13972 gimple_build_label (opt_arg_label
));
13975 gimple_seq_add_seq (&new_body
, assign_body
);
13978 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13979 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13980 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13981 or references to VLAs. */
13982 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13983 switch (OMP_CLAUSE_CODE (c
))
13988 case OMP_CLAUSE_MAP
:
13989 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13990 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13992 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13993 poly_int64 offset
= 0;
13995 var
= OMP_CLAUSE_DECL (c
);
13997 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13998 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
14000 && varpool_node::get_create (var
)->offloadable
)
14002 if (TREE_CODE (var
) == INDIRECT_REF
14003 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
14004 var
= TREE_OPERAND (var
, 0);
14005 if (TREE_CODE (var
) == COMPONENT_REF
)
14007 var
= get_addr_base_and_unit_offset (var
, &offset
);
14008 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
14010 else if (DECL_SIZE (var
)
14011 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
14013 tree var2
= DECL_VALUE_EXPR (var
);
14014 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
14015 var2
= TREE_OPERAND (var2
, 0);
14016 gcc_assert (DECL_P (var2
));
14019 tree new_var
= lookup_decl (var
, ctx
), x
;
14020 tree type
= TREE_TYPE (new_var
);
14022 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
14023 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
14026 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
14028 new_var
= build2 (MEM_REF
, type
,
14029 build_fold_addr_expr (new_var
),
14030 build_int_cst (build_pointer_type (type
),
14033 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
14035 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
14036 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
14037 new_var
= build2 (MEM_REF
, type
,
14038 build_fold_addr_expr (new_var
),
14039 build_int_cst (build_pointer_type (type
),
14043 is_ref
= omp_privatize_by_reference (var
);
14044 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14046 bool ref_to_array
= false;
14047 bool ref_to_ptr
= false;
14050 type
= TREE_TYPE (type
);
14051 if (TREE_CODE (type
) == ARRAY_TYPE
)
14053 type
= build_pointer_type (type
);
14054 ref_to_array
= true;
14057 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14059 tree decl2
= DECL_VALUE_EXPR (new_var
);
14060 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
14061 decl2
= TREE_OPERAND (decl2
, 0);
14062 gcc_assert (DECL_P (decl2
));
14064 type
= TREE_TYPE (new_var
);
14066 else if (TREE_CODE (type
) == REFERENCE_TYPE
14067 && TREE_CODE (TREE_TYPE (type
)) == POINTER_TYPE
)
14069 type
= TREE_TYPE (type
);
14072 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
14073 x
= fold_convert_loc (clause_loc
, type
, x
);
14074 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
14076 tree bias
= OMP_CLAUSE_SIZE (c
);
14078 bias
= lookup_decl (bias
, ctx
);
14079 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
14080 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
14082 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
14083 TREE_TYPE (x
), x
, bias
);
14086 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14087 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14088 if ((is_ref
&& !ref_to_array
)
14091 tree t
= create_tmp_var_raw (type
, get_name (var
));
14092 gimple_add_tmp_var (t
);
14093 TREE_ADDRESSABLE (t
) = 1;
14094 gimple_seq_add_stmt (&new_body
,
14095 gimple_build_assign (t
, x
));
14096 x
= build_fold_addr_expr_loc (clause_loc
, t
);
14098 gimple_seq_add_stmt (&new_body
,
14099 gimple_build_assign (new_var
, x
));
14102 else if (OMP_CLAUSE_CHAIN (c
)
14103 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
14105 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14106 == GOMP_MAP_FIRSTPRIVATE_POINTER
14107 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14108 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
14111 case OMP_CLAUSE_PRIVATE
:
14112 var
= OMP_CLAUSE_DECL (c
);
14113 if (is_variable_sized (var
))
14115 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14116 tree new_var
= lookup_decl (var
, ctx
);
14117 tree pvar
= DECL_VALUE_EXPR (var
);
14118 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
14119 pvar
= TREE_OPERAND (pvar
, 0);
14120 gcc_assert (DECL_P (pvar
));
14121 tree new_pvar
= lookup_decl (pvar
, ctx
);
14122 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14123 tree al
= size_int (DECL_ALIGN (var
));
14124 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
14125 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14126 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
14127 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14128 gimple_seq_add_stmt (&new_body
,
14129 gimple_build_assign (new_pvar
, x
));
14131 else if (omp_privatize_by_reference (var
)
14132 && !is_gimple_omp_oacc (ctx
->stmt
))
14134 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14135 tree new_var
= lookup_decl (var
, ctx
);
14136 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
14137 if (TREE_CONSTANT (x
))
14142 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14143 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
14144 tree al
= size_int (TYPE_ALIGN (rtype
));
14145 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14148 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14149 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14150 gimple_seq_add_stmt (&new_body
,
14151 gimple_build_assign (new_var
, x
));
14156 gimple_seq fork_seq
= NULL
;
14157 gimple_seq join_seq
= NULL
;
14159 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
14161 /* If there are reductions on the offloaded region itself, treat
14162 them as a dummy GANG loop. */
14163 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
14165 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
14167 if (private_marker
)
14168 gimple_call_set_arg (private_marker
, 2, level
);
14170 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
14171 false, NULL
, private_marker
, NULL
, &fork_seq
,
14175 gimple_seq_add_seq (&new_body
, fork_seq
);
14176 gimple_seq_add_seq (&new_body
, tgt_body
);
14177 gimple_seq_add_seq (&new_body
, join_seq
);
14181 new_body
= maybe_catch_exception (new_body
);
14182 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
14184 gimple_omp_set_body (stmt
, new_body
);
14187 bind
= gimple_build_bind (NULL
, NULL
,
14188 tgt_bind
? gimple_bind_block (tgt_bind
)
14190 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
14191 gimple_bind_add_seq (bind
, ilist
);
14192 gimple_bind_add_stmt (bind
, stmt
);
14193 gimple_bind_add_seq (bind
, olist
);
14195 pop_gimplify_context (NULL
);
14199 gimple_bind_add_seq (dep_bind
, dep_ilist
);
14200 gimple_bind_add_stmt (dep_bind
, bind
);
14201 gimple_bind_add_seq (dep_bind
, dep_olist
);
14202 pop_gimplify_context (dep_bind
);
14206 /* Expand code for an OpenMP teams directive. */
14209 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14211 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
14212 push_gimplify_context ();
14214 tree block
= make_node (BLOCK
);
14215 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
14216 gsi_replace (gsi_p
, bind
, true);
14217 gimple_seq bind_body
= NULL
;
14218 gimple_seq dlist
= NULL
;
14219 gimple_seq olist
= NULL
;
14221 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14222 OMP_CLAUSE_NUM_TEAMS
);
14223 tree num_teams_lower
= NULL_TREE
;
14224 if (num_teams
== NULL_TREE
)
14225 num_teams
= build_int_cst (unsigned_type_node
, 0);
14228 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
14229 if (num_teams_lower
)
14231 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
14232 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
14235 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
14236 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
14237 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
14239 if (num_teams_lower
== NULL_TREE
)
14240 num_teams_lower
= num_teams
;
14241 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14242 OMP_CLAUSE_THREAD_LIMIT
);
14243 if (thread_limit
== NULL_TREE
)
14244 thread_limit
= build_int_cst (unsigned_type_node
, 0);
14247 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
14248 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
14249 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
14252 location_t loc
= gimple_location (teams_stmt
);
14253 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
14254 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
14255 tree first
= create_tmp_var (rettype
);
14256 gimple_seq_add_stmt (&bind_body
,
14257 gimple_build_assign (first
, build_one_cst (rettype
)));
14258 tree llabel
= create_artificial_label (loc
);
14259 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
14261 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
14263 gimple_set_location (call
, loc
);
14264 tree temp
= create_tmp_var (rettype
);
14265 gimple_call_set_lhs (call
, temp
);
14266 gimple_seq_add_stmt (&bind_body
, call
);
14268 tree tlabel
= create_artificial_label (loc
);
14269 tree flabel
= create_artificial_label (loc
);
14270 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
14272 gimple_seq_add_stmt (&bind_body
, cond
);
14273 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
14274 gimple_seq_add_stmt (&bind_body
,
14275 gimple_build_assign (first
, build_zero_cst (rettype
)));
14277 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
14278 &bind_body
, &dlist
, ctx
, NULL
);
14279 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
14280 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
14282 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
14284 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
14285 gimple_omp_set_body (teams_stmt
, NULL
);
14286 gimple_seq_add_seq (&bind_body
, olist
);
14287 gimple_seq_add_seq (&bind_body
, dlist
);
14288 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
14289 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
14290 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
14291 gimple_bind_set_body (bind
, bind_body
);
14293 pop_gimplify_context (bind
);
14295 gimple_bind_append_vars (bind
, ctx
->block_vars
);
14296 BLOCK_VARS (block
) = ctx
->block_vars
;
14297 if (BLOCK_VARS (block
))
14298 TREE_USED (block
) = 1;
14301 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14302 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14303 of OMP context, but with make_addressable_vars set. */
14306 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14311 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14312 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14314 && DECL_HAS_VALUE_EXPR_P (t
))
14317 if (make_addressable_vars
14319 && bitmap_bit_p (make_addressable_vars
, DECL_UID (t
)))
14322 /* If a global variable has been privatized, TREE_CONSTANT on
14323 ADDR_EXPR might be wrong. */
14324 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14325 recompute_tree_invariant_for_addr_expr (t
);
14327 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14331 /* Data to be communicated between lower_omp_regimplify_operands and
14332 lower_omp_regimplify_operands_p. */
14334 struct lower_omp_regimplify_operands_data
14340 /* Helper function for lower_omp_regimplify_operands. Find
14341 omp_member_access_dummy_var vars and adjust temporarily their
14342 DECL_VALUE_EXPRs if needed. */
14345 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14348 tree t
= omp_member_access_dummy_var (*tp
);
14351 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14352 lower_omp_regimplify_operands_data
*ldata
14353 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14354 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14357 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14358 ldata
->decls
->safe_push (*tp
);
14359 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14360 SET_DECL_VALUE_EXPR (*tp
, v
);
14363 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14367 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14368 of omp_member_access_dummy_var vars during regimplification. */
14371 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14372 gimple_stmt_iterator
*gsi_p
)
14374 auto_vec
<tree
, 10> decls
;
14377 struct walk_stmt_info wi
;
14378 memset (&wi
, '\0', sizeof (wi
));
14379 struct lower_omp_regimplify_operands_data data
;
14381 data
.decls
= &decls
;
14383 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14385 gimple_regimplify_operands (stmt
, gsi_p
);
14386 while (!decls
.is_empty ())
14388 tree t
= decls
.pop ();
14389 tree v
= decls
.pop ();
14390 SET_DECL_VALUE_EXPR (t
, v
);
14395 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14397 gimple
*stmt
= gsi_stmt (*gsi_p
);
14398 struct walk_stmt_info wi
;
14401 if (gimple_has_location (stmt
))
14402 input_location
= gimple_location (stmt
);
14404 if (make_addressable_vars
)
14405 memset (&wi
, '\0', sizeof (wi
));
14407 /* If we have issued syntax errors, avoid doing any heavy lifting.
14408 Just replace the OMP directives with a NOP to avoid
14409 confusing RTL expansion. */
14410 if (seen_error () && is_gimple_omp (stmt
))
14412 gsi_replace (gsi_p
, gimple_build_nop (), true);
14416 switch (gimple_code (stmt
))
14420 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14421 if ((ctx
|| make_addressable_vars
)
14422 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14423 lower_omp_regimplify_p
,
14424 ctx
? NULL
: &wi
, NULL
)
14425 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14426 lower_omp_regimplify_p
,
14427 ctx
? NULL
: &wi
, NULL
)))
14428 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14432 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14434 case GIMPLE_EH_FILTER
:
14435 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14438 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14439 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14441 case GIMPLE_ASSUME
:
14442 lower_omp (gimple_assume_body_ptr (stmt
), ctx
);
14444 case GIMPLE_TRANSACTION
:
14445 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14449 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14451 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14452 oacc_privatization_scan_decl_chain (ctx
, vars
);
14454 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14455 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14457 case GIMPLE_OMP_PARALLEL
:
14458 case GIMPLE_OMP_TASK
:
14459 ctx
= maybe_lookup_ctx (stmt
);
14461 if (ctx
->cancellable
)
14462 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14463 lower_omp_taskreg (gsi_p
, ctx
);
14465 case GIMPLE_OMP_FOR
:
14466 ctx
= maybe_lookup_ctx (stmt
);
14468 if (ctx
->cancellable
)
14469 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14470 lower_omp_for (gsi_p
, ctx
);
14472 case GIMPLE_OMP_SECTIONS
:
14473 ctx
= maybe_lookup_ctx (stmt
);
14475 if (ctx
->cancellable
)
14476 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14477 lower_omp_sections (gsi_p
, ctx
);
14479 case GIMPLE_OMP_SCOPE
:
14480 ctx
= maybe_lookup_ctx (stmt
);
14482 lower_omp_scope (gsi_p
, ctx
);
14484 case GIMPLE_OMP_SINGLE
:
14485 ctx
= maybe_lookup_ctx (stmt
);
14487 lower_omp_single (gsi_p
, ctx
);
14489 case GIMPLE_OMP_MASTER
:
14490 case GIMPLE_OMP_MASKED
:
14491 ctx
= maybe_lookup_ctx (stmt
);
14493 lower_omp_master (gsi_p
, ctx
);
14495 case GIMPLE_OMP_TASKGROUP
:
14496 ctx
= maybe_lookup_ctx (stmt
);
14498 lower_omp_taskgroup (gsi_p
, ctx
);
14500 case GIMPLE_OMP_ORDERED
:
14501 ctx
= maybe_lookup_ctx (stmt
);
14503 lower_omp_ordered (gsi_p
, ctx
);
14505 case GIMPLE_OMP_SCAN
:
14506 ctx
= maybe_lookup_ctx (stmt
);
14508 lower_omp_scan (gsi_p
, ctx
);
14510 case GIMPLE_OMP_CRITICAL
:
14511 ctx
= maybe_lookup_ctx (stmt
);
14513 lower_omp_critical (gsi_p
, ctx
);
14515 case GIMPLE_OMP_ATOMIC_LOAD
:
14516 if ((ctx
|| make_addressable_vars
)
14517 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14518 as_a
<gomp_atomic_load
*> (stmt
)),
14519 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14520 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14522 case GIMPLE_OMP_TARGET
:
14523 ctx
= maybe_lookup_ctx (stmt
);
14525 lower_omp_target (gsi_p
, ctx
);
14527 case GIMPLE_OMP_TEAMS
:
14528 ctx
= maybe_lookup_ctx (stmt
);
14530 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14531 lower_omp_taskreg (gsi_p
, ctx
);
14533 lower_omp_teams (gsi_p
, ctx
);
14537 call_stmt
= as_a
<gcall
*> (stmt
);
14538 fndecl
= gimple_call_fndecl (call_stmt
);
14540 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14541 switch (DECL_FUNCTION_CODE (fndecl
))
14543 case BUILT_IN_GOMP_BARRIER
:
14547 case BUILT_IN_GOMP_CANCEL
:
14548 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14551 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14552 cctx
= cctx
->outer
;
14553 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14554 if (!cctx
->cancellable
)
14556 if (DECL_FUNCTION_CODE (fndecl
)
14557 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14559 stmt
= gimple_build_nop ();
14560 gsi_replace (gsi_p
, stmt
, false);
14564 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14566 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14567 gimple_call_set_fndecl (call_stmt
, fndecl
);
14568 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14571 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14572 gimple_call_set_lhs (call_stmt
, lhs
);
14573 tree fallthru_label
;
14574 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14576 g
= gimple_build_label (fallthru_label
);
14577 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14578 g
= gimple_build_cond (NE_EXPR
, lhs
,
14579 fold_convert (TREE_TYPE (lhs
),
14580 boolean_false_node
),
14581 cctx
->cancel_label
, fallthru_label
);
14582 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14589 case GIMPLE_ASSIGN
:
14590 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14592 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14593 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14594 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14595 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14596 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14597 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14598 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14599 && (gimple_omp_target_kind (up
->stmt
)
14600 == GF_OMP_TARGET_KIND_DATA
)))
14602 else if (!up
->lastprivate_conditional_map
)
14604 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14605 if (TREE_CODE (lhs
) == MEM_REF
14606 && DECL_P (TREE_OPERAND (lhs
, 0))
14607 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14608 0))) == REFERENCE_TYPE
)
14609 lhs
= TREE_OPERAND (lhs
, 0);
14611 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14614 if (up
->combined_into_simd_safelen1
)
14617 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14620 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14621 clauses
= gimple_omp_for_clauses (up
->stmt
);
14623 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14624 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14625 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14626 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14627 OMP_CLAUSE__CONDTEMP_
);
14628 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14629 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14630 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14637 if ((ctx
|| make_addressable_vars
)
14638 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14641 /* Just remove clobbers, this should happen only if we have
14642 "privatized" local addressable variables in SIMD regions,
14643 the clobber isn't needed in that case and gimplifying address
14644 of the ARRAY_REF into a pointer and creating MEM_REF based
14645 clobber would create worse code than we get with the clobber
14647 if (gimple_clobber_p (stmt
))
14649 gsi_replace (gsi_p
, gimple_build_nop (), true);
14652 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14659 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14661 location_t saved_location
= input_location
;
14662 gimple_stmt_iterator gsi
;
14663 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14664 lower_omp_1 (&gsi
, ctx
);
14665 /* During gimplification, we haven't folded statments inside offloading
14666 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14667 if (target_nesting_level
|| taskreg_nesting_level
)
14668 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14670 input_location
= saved_location
;
14673 /* Main entry point. */
14675 static unsigned int
14676 execute_lower_omp (void)
14682 /* This pass always runs, to provide PROP_gimple_lomp.
14683 But often, there is nothing to do. */
14684 if (flag_openacc
== 0 && flag_openmp
== 0
14685 && flag_openmp_simd
== 0)
14688 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14689 delete_omp_context
);
14691 body
= gimple_body (current_function_decl
);
14693 scan_omp (&body
, NULL
);
14694 gcc_assert (taskreg_nesting_level
== 0);
14695 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14696 finish_taskreg_scan (ctx
);
14697 taskreg_contexts
.release ();
14699 if (all_contexts
->root
)
14701 if (make_addressable_vars
)
14702 push_gimplify_context ();
14703 lower_omp (&body
, NULL
);
14704 if (make_addressable_vars
)
14705 pop_gimplify_context (NULL
);
14710 splay_tree_delete (all_contexts
);
14711 all_contexts
= NULL
;
14713 BITMAP_FREE (make_addressable_vars
);
14714 BITMAP_FREE (global_nonaddressable_vars
);
14716 /* If current function is a method, remove artificial dummy VAR_DECL created
14717 for non-static data member privatization, they aren't needed for
14718 debuginfo nor anything else, have been already replaced everywhere in the
14719 IL and cause problems with LTO. */
14720 if (DECL_ARGUMENTS (current_function_decl
)
14721 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14722 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14724 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14726 for (auto task_stmt
: task_cpyfns
)
14727 finalize_task_copyfn (task_stmt
);
14728 task_cpyfns
.release ();
14734 const pass_data pass_data_lower_omp
=
14736 GIMPLE_PASS
, /* type */
14737 "omplower", /* name */
14738 OPTGROUP_OMP
, /* optinfo_flags */
14739 TV_NONE
, /* tv_id */
14740 PROP_gimple_any
, /* properties_required */
14741 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14742 0, /* properties_destroyed */
14743 0, /* todo_flags_start */
14744 0, /* todo_flags_finish */
14747 class pass_lower_omp
: public gimple_opt_pass
14750 pass_lower_omp (gcc::context
*ctxt
)
14751 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14754 /* opt_pass methods: */
14755 unsigned int execute (function
*) final override
14757 return execute_lower_omp ();
14760 }; // class pass_lower_omp
14762 } // anon namespace
14765 make_pass_lower_omp (gcc::context
*ctxt
)
14767 return new pass_lower_omp (ctxt
);
14770 /* The following is a utility to diagnose structured block violations.
14771 It is not part of the "omplower" pass, as that's invoked too late. It
14772 should be invoked by the respective front ends after gimplification. */
14774 static splay_tree all_labels
;
14776 /* Check for mismatched contexts and generate an error if needed. Return
14777 true if an error is detected. */
14780 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14781 gimple
*branch_ctx
, gimple
*label_ctx
)
14783 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14784 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14786 if (label_ctx
== branch_ctx
)
14789 const char* kind
= NULL
;
14793 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14794 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14796 gcc_checking_assert (kind
== NULL
);
14802 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14806 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14807 so we could traverse it and issue a correct "exit" or "enter" error
14808 message upon a structured block violation.
14810 We built the context by building a list with tree_cons'ing, but there is
14811 no easy counterpart in gimple tuples. It seems like far too much work
14812 for issuing exit/enter error messages. If someone really misses the
14813 distinct error message... patches welcome. */
14816 /* Try to avoid confusing the user by producing and error message
14817 with correct "exit" or "enter" verbiage. We prefer "exit"
14818 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14819 if (branch_ctx
== NULL
)
14825 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14830 label_ctx
= TREE_CHAIN (label_ctx
);
14835 error ("invalid exit from %s structured block", kind
);
14837 error ("invalid entry to %s structured block", kind
);
14840 /* If it's obvious we have an invalid entry, be specific about the error. */
14841 if (branch_ctx
== NULL
)
14842 error ("invalid entry to %s structured block", kind
);
14845 /* Otherwise, be vague and lazy, but efficient. */
14846 error ("invalid branch to/from %s structured block", kind
);
14849 gsi_replace (gsi_p
, gimple_build_nop (), false);
14853 /* Pass 1: Create a minimal tree of structured blocks, and record
14854 where each label is found. */
14857 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14858 struct walk_stmt_info
*wi
)
14860 gimple
*context
= (gimple
*) wi
->info
;
14861 gimple
*inner_context
;
14862 gimple
*stmt
= gsi_stmt (*gsi_p
);
14864 *handled_ops_p
= true;
14866 switch (gimple_code (stmt
))
14870 case GIMPLE_OMP_PARALLEL
:
14871 case GIMPLE_OMP_TASK
:
14872 case GIMPLE_OMP_SCOPE
:
14873 case GIMPLE_OMP_SECTIONS
:
14874 case GIMPLE_OMP_SINGLE
:
14875 case GIMPLE_OMP_SECTION
:
14876 case GIMPLE_OMP_MASTER
:
14877 case GIMPLE_OMP_MASKED
:
14878 case GIMPLE_OMP_ORDERED
:
14879 case GIMPLE_OMP_SCAN
:
14880 case GIMPLE_OMP_CRITICAL
:
14881 case GIMPLE_OMP_TARGET
:
14882 case GIMPLE_OMP_TEAMS
:
14883 case GIMPLE_OMP_TASKGROUP
:
14884 /* The minimal context here is just the current OMP construct. */
14885 inner_context
= stmt
;
14886 wi
->info
= inner_context
;
14887 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14888 wi
->info
= context
;
14891 case GIMPLE_OMP_FOR
:
14892 inner_context
= stmt
;
14893 wi
->info
= inner_context
;
14894 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14896 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14897 diagnose_sb_1
, NULL
, wi
);
14898 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14899 wi
->info
= context
;
14903 splay_tree_insert (all_labels
,
14904 (splay_tree_key
) gimple_label_label (
14905 as_a
<glabel
*> (stmt
)),
14906 (splay_tree_value
) context
);
14916 /* Pass 2: Check each branch and see if its context differs from that of
14917 the destination label's context. */
14920 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14921 struct walk_stmt_info
*wi
)
14923 gimple
*context
= (gimple
*) wi
->info
;
14925 gimple
*stmt
= gsi_stmt (*gsi_p
);
14927 *handled_ops_p
= true;
14929 switch (gimple_code (stmt
))
14933 case GIMPLE_OMP_PARALLEL
:
14934 case GIMPLE_OMP_TASK
:
14935 case GIMPLE_OMP_SCOPE
:
14936 case GIMPLE_OMP_SECTIONS
:
14937 case GIMPLE_OMP_SINGLE
:
14938 case GIMPLE_OMP_SECTION
:
14939 case GIMPLE_OMP_MASTER
:
14940 case GIMPLE_OMP_MASKED
:
14941 case GIMPLE_OMP_ORDERED
:
14942 case GIMPLE_OMP_SCAN
:
14943 case GIMPLE_OMP_CRITICAL
:
14944 case GIMPLE_OMP_TARGET
:
14945 case GIMPLE_OMP_TEAMS
:
14946 case GIMPLE_OMP_TASKGROUP
:
14948 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14949 wi
->info
= context
;
14952 case GIMPLE_OMP_FOR
:
14954 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14956 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14957 diagnose_sb_2
, NULL
, wi
);
14958 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14959 wi
->info
= context
;
14964 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14965 tree lab
= gimple_cond_true_label (cond_stmt
);
14968 n
= splay_tree_lookup (all_labels
,
14969 (splay_tree_key
) lab
);
14970 diagnose_sb_0 (gsi_p
, context
,
14971 n
? (gimple
*) n
->value
: NULL
);
14973 lab
= gimple_cond_false_label (cond_stmt
);
14976 n
= splay_tree_lookup (all_labels
,
14977 (splay_tree_key
) lab
);
14978 diagnose_sb_0 (gsi_p
, context
,
14979 n
? (gimple
*) n
->value
: NULL
);
14986 tree lab
= gimple_goto_dest (stmt
);
14987 if (TREE_CODE (lab
) != LABEL_DECL
)
14990 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14991 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14995 case GIMPLE_SWITCH
:
14997 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14999 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
15001 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
15002 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
15003 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
15009 case GIMPLE_RETURN
:
15010 diagnose_sb_0 (gsi_p
, context
, NULL
);
15020 static unsigned int
15021 diagnose_omp_structured_block_errors (void)
15023 struct walk_stmt_info wi
;
15024 gimple_seq body
= gimple_body (current_function_decl
);
15026 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
15028 memset (&wi
, 0, sizeof (wi
));
15029 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
15031 memset (&wi
, 0, sizeof (wi
));
15032 wi
.want_locations
= true;
15033 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
15035 gimple_set_body (current_function_decl
, body
);
15037 splay_tree_delete (all_labels
);
15045 const pass_data pass_data_diagnose_omp_blocks
=
15047 GIMPLE_PASS
, /* type */
15048 "*diagnose_omp_blocks", /* name */
15049 OPTGROUP_OMP
, /* optinfo_flags */
15050 TV_NONE
, /* tv_id */
15051 PROP_gimple_any
, /* properties_required */
15052 0, /* properties_provided */
15053 0, /* properties_destroyed */
15054 0, /* todo_flags_start */
15055 0, /* todo_flags_finish */
15058 class pass_diagnose_omp_blocks
: public gimple_opt_pass
15061 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15062 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
15065 /* opt_pass methods: */
15066 bool gate (function
*) final override
15068 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
15070 unsigned int execute (function
*) final override
15072 return diagnose_omp_structured_block_errors ();
15075 }; // class pass_diagnose_omp_blocks
15077 } // anon namespace
15080 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15082 return new pass_diagnose_omp_blocks (ctxt
);
15086 #include "gt-omp-low.h"