1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap make_addressable_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
194 static vec
<gomp_task
*> task_cpyfns
;
196 static void scan_omp (gimple_seq
*, omp_context
*);
197 static tree
scan_omp_1_op (tree
*, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context
*ctx
);
200 #define WALK_SUBSTMTS \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
215 is_oacc_parallel_or_serial (omp_context
*ctx
)
217 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
218 return ((outer_type
== GIMPLE_OMP_TARGET
)
219 && ((gimple_omp_target_kind (ctx
->stmt
)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
221 || (gimple_omp_target_kind (ctx
->stmt
)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
229 is_oacc_kernels (omp_context
*ctx
)
231 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
232 return ((outer_type
== GIMPLE_OMP_TARGET
)
233 && (gimple_omp_target_kind (ctx
->stmt
)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
240 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
242 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
243 return ((outer_type
== GIMPLE_OMP_TARGET
)
244 && ((gimple_omp_target_kind (ctx
->stmt
)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
246 || (gimple_omp_target_kind (ctx
->stmt
)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
248 || (gimple_omp_target_kind (ctx
->stmt
)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
254 is_omp_target (gimple
*stmt
)
256 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
258 int kind
= gimple_omp_target_kind (stmt
);
259 return (kind
== GF_OMP_TARGET_KIND_REGION
260 || kind
== GF_OMP_TARGET_KIND_DATA
261 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
272 omp_member_access_dummy_var (tree decl
)
275 || !DECL_ARTIFICIAL (decl
)
276 || !DECL_IGNORED_P (decl
)
277 || !DECL_HAS_VALUE_EXPR_P (decl
)
278 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
281 tree v
= DECL_VALUE_EXPR (decl
);
282 if (TREE_CODE (v
) != COMPONENT_REF
)
286 switch (TREE_CODE (v
))
292 case POINTER_PLUS_EXPR
:
293 v
= TREE_OPERAND (v
, 0);
296 if (DECL_CONTEXT (v
) == current_function_decl
297 && DECL_ARTIFICIAL (v
)
298 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
306 /* Helper for unshare_and_remap, called through walk_tree. */
309 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
311 tree
*pair
= (tree
*) data
;
314 *tp
= unshare_expr (pair
[1]);
317 else if (IS_TYPE_OR_DECL_P (*tp
))
322 /* Return unshare_expr (X) with all occurrences of FROM
326 unshare_and_remap (tree x
, tree from
, tree to
)
328 tree pair
[2] = { from
, to
};
329 x
= unshare_expr (x
);
330 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
337 scan_omp_op (tree
*tp
, omp_context
*ctx
)
339 struct walk_stmt_info wi
;
341 memset (&wi
, 0, sizeof (wi
));
343 wi
.want_locations
= true;
345 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
348 static void lower_omp (gimple_seq
*, omp_context
*);
349 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
350 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
352 /* Return true if CTX is for an omp parallel. */
355 is_parallel_ctx (omp_context
*ctx
)
357 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
361 /* Return true if CTX is for an omp task. */
364 is_task_ctx (omp_context
*ctx
)
366 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
370 /* Return true if CTX is for an omp taskloop. */
373 is_taskloop_ctx (omp_context
*ctx
)
375 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
380 /* Return true if CTX is for a host omp teams. */
383 is_host_teams_ctx (omp_context
*ctx
)
385 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
394 is_taskreg_ctx (omp_context
*ctx
)
396 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
399 /* Return true if EXPR is variable sized. */
402 is_variable_sized (const_tree expr
)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
412 lookup_decl (tree var
, omp_context
*ctx
)
414 tree
*n
= ctx
->cb
.decl_map
->get (var
);
419 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
421 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
422 return n
? *n
: NULL_TREE
;
426 lookup_field (tree var
, omp_context
*ctx
)
429 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
430 return (tree
) n
->value
;
434 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
437 n
= splay_tree_lookup (ctx
->sfield_map
438 ? ctx
->sfield_map
: ctx
->field_map
, key
);
439 return (tree
) n
->value
;
443 lookup_sfield (tree var
, omp_context
*ctx
)
445 return lookup_sfield ((splay_tree_key
) var
, ctx
);
449 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
452 n
= splay_tree_lookup (ctx
->field_map
, key
);
453 return n
? (tree
) n
->value
: NULL_TREE
;
457 maybe_lookup_field (tree var
, omp_context
*ctx
)
459 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
466 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
469 || TYPE_ATOMIC (TREE_TYPE (decl
)))
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
492 /* Do not use copy-in/copy-out for variables that have their
494 if (is_global_var (decl
))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl
))
504 if (!global_nonaddressable_vars
)
505 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
506 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars
,
513 else if (TREE_ADDRESSABLE (decl
))
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
518 if (TREE_READONLY (decl
)
519 || ((TREE_CODE (decl
) == RESULT_DECL
520 || TREE_CODE (decl
) == PARM_DECL
)
521 && DECL_BY_REFERENCE (decl
)))
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx
->is_nested
)
533 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
534 if ((is_taskreg_ctx (up
)
535 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up
->stmt
)))
537 && maybe_lookup_decl (decl
, up
))
544 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
546 for (c
= gimple_omp_target_clauses (up
->stmt
);
547 c
; c
= OMP_CLAUSE_CHAIN (c
))
548 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c
) == decl
)
553 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
554 c
; c
= OMP_CLAUSE_CHAIN (c
))
555 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c
) == decl
)
560 goto maybe_mark_addressable_and_ret
;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx
))
570 maybe_mark_addressable_and_ret
:
571 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
572 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
577 if (!make_addressable_vars
)
578 make_addressable_vars
= BITMAP_ALLOC (NULL
);
579 bitmap_set_bit (make_addressable_vars
, DECL_UID (outer
));
580 TREE_ADDRESSABLE (outer
) = 1;
589 /* Construct a new automatic decl similar to VAR. */
592 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
594 tree copy
= copy_var_decl (var
, name
, type
);
596 DECL_CONTEXT (copy
) = current_function_decl
;
600 DECL_CHAIN (copy
) = ctx
->block_vars
;
601 ctx
->block_vars
= copy
;
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
610 if (TREE_ADDRESSABLE (var
)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars
, DECL_UID (var
)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
615 TREE_ADDRESSABLE (copy
) = 0;
621 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
623 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
629 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
631 tree x
, field
= lookup_field (var
, ctx
);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x
= maybe_lookup_field (field
, ctx
);
639 x
= build_simple_mem_ref (ctx
->receiver_decl
);
640 TREE_THIS_NOTRAP (x
) = 1;
641 x
= omp_build_component_ref (x
, field
);
644 x
= build_simple_mem_ref (x
);
645 TREE_THIS_NOTRAP (x
) = 1;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
656 build_outer_var_ref (tree var
, omp_context
*ctx
,
657 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
660 omp_context
*outer
= ctx
->outer
;
661 for (; outer
; outer
= outer
->outer
)
663 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
665 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var
, outer
))
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
673 else if (is_variable_sized (var
))
675 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
676 x
= build_outer_var_ref (x
, ctx
, code
);
677 x
= build_simple_mem_ref (x
);
679 else if (is_taskreg_ctx (ctx
))
681 bool by_ref
= use_pointer_for_field (var
, NULL
);
682 x
= build_receiver_ref (var
, by_ref
, ctx
);
684 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
687 || code
== OMP_CLAUSE_ALLOCATE
688 || (code
== OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
690 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
698 if (outer
&& is_taskreg_ctx (outer
))
699 x
= lookup_decl (var
, outer
);
701 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
705 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
709 = splay_tree_lookup (outer
->field_map
,
710 (splay_tree_key
) &DECL_UID (var
));
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
716 x
= lookup_decl (var
, outer
);
720 tree field
= (tree
) n
->value
;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x
= maybe_lookup_field (field
, outer
);
727 x
= build_simple_mem_ref (outer
->receiver_decl
);
728 x
= omp_build_component_ref (x
, field
);
729 if (use_pointer_for_field (var
, outer
))
730 x
= build_simple_mem_ref (x
);
734 x
= lookup_decl (var
, outer
);
735 else if (omp_privatize_by_reference (var
))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
739 else if (omp_member_access_dummy_var (var
))
746 tree t
= omp_member_access_dummy_var (var
);
749 x
= DECL_VALUE_EXPR (var
);
750 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
752 x
= unshare_and_remap (x
, t
, o
);
754 x
= unshare_expr (x
);
758 if (omp_privatize_by_reference (var
))
759 x
= build_simple_mem_ref (x
);
764 /* Build tree nodes to access the field for VAR on the sender side. */
767 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
769 tree field
= lookup_sfield (key
, ctx
);
770 return omp_build_component_ref (ctx
->sender_decl
, field
);
774 build_sender_ref (tree var
, omp_context
*ctx
)
776 return build_sender_ref ((splay_tree_key
) var
, ctx
);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
783 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
785 tree field
, type
, sfield
= NULL_TREE
;
786 splay_tree_key key
= (splay_tree_key
) var
;
788 if ((mask
& 16) != 0)
790 key
= (splay_tree_key
) &DECL_NAME (var
);
791 gcc_checking_assert (key
!= (splay_tree_key
) var
);
795 key
= (splay_tree_key
) &DECL_UID (var
);
796 gcc_checking_assert (key
!= (splay_tree_key
) var
);
798 gcc_assert ((mask
& 1) == 0
799 || !splay_tree_lookup (ctx
->field_map
, key
));
800 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
801 || !splay_tree_lookup (ctx
->sfield_map
, key
));
802 gcc_assert ((mask
& 3) == 3
803 || !is_gimple_omp_oacc (ctx
->stmt
));
805 type
= TREE_TYPE (var
);
806 if ((mask
& 16) != 0)
807 type
= lang_hooks
.decls
.omp_array_data (var
, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type
)
813 && TYPE_RESTRICT (type
))
814 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
818 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
819 type
= build_pointer_type (build_pointer_type (type
));
822 type
= build_pointer_type (type
);
823 else if ((mask
& (32 | 3)) == 1
824 && omp_privatize_by_reference (var
))
825 type
= TREE_TYPE (type
);
827 field
= build_decl (DECL_SOURCE_LOCATION (var
),
828 FIELD_DECL
, DECL_NAME (var
), type
);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field
) = var
;
834 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
836 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
837 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
838 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
841 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
845 insert_field_into_struct (ctx
->record_type
, field
);
846 if (ctx
->srecord_type
)
848 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
849 FIELD_DECL
, DECL_NAME (var
), type
);
850 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
851 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
852 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
853 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
854 insert_field_into_struct (ctx
->srecord_type
, sfield
);
859 if (ctx
->srecord_type
== NULL_TREE
)
863 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
864 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
865 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
867 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
868 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
869 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
870 insert_field_into_struct (ctx
->srecord_type
, sfield
);
871 splay_tree_insert (ctx
->sfield_map
,
872 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
873 (splay_tree_value
) sfield
);
877 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
878 : ctx
->srecord_type
, field
);
882 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
883 if ((mask
& 2) && ctx
->sfield_map
)
884 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
888 install_var_local (tree var
, omp_context
*ctx
)
890 tree new_var
= omp_copy_decl_1 (var
, ctx
);
891 insert_decl_map (&ctx
->cb
, var
, new_var
);
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
899 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
903 new_decl
= lookup_decl (decl
, ctx
);
905 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
908 && DECL_HAS_VALUE_EXPR_P (decl
))
910 tree ve
= DECL_VALUE_EXPR (decl
);
911 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
912 SET_DECL_VALUE_EXPR (new_decl
, ve
);
913 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
918 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
919 if (size
== error_mark_node
)
920 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
921 DECL_SIZE (new_decl
) = size
;
923 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
924 if (size
== error_mark_node
)
925 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
926 DECL_SIZE_UNIT (new_decl
) = size
;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
936 omp_copy_decl (tree var
, copy_body_data
*cb
)
938 omp_context
*ctx
= (omp_context
*) cb
;
941 if (TREE_CODE (var
) == LABEL_DECL
)
943 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
945 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
946 DECL_CONTEXT (new_var
) = current_function_decl
;
947 insert_decl_map (&ctx
->cb
, var
, new_var
);
951 while (!is_taskreg_ctx (ctx
))
956 new_var
= maybe_lookup_decl (var
, ctx
);
961 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
964 return error_mark_node
;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
970 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
972 omp_context
*ctx
= XCNEW (omp_context
);
974 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
975 (splay_tree_value
) ctx
);
980 ctx
->outer
= outer_ctx
;
981 ctx
->cb
= outer_ctx
->cb
;
982 ctx
->cb
.block
= NULL
;
983 ctx
->depth
= outer_ctx
->depth
+ 1;
987 ctx
->cb
.src_fn
= current_function_decl
;
988 ctx
->cb
.dst_fn
= current_function_decl
;
989 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
990 gcc_checking_assert (ctx
->cb
.src_node
);
991 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
992 ctx
->cb
.src_cfun
= cfun
;
993 ctx
->cb
.copy_decl
= omp_copy_decl
;
994 ctx
->cb
.eh_lp_nr
= 0;
995 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
996 ctx
->cb
.adjust_array_error_bounds
= true;
997 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1001 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1006 static gimple_seq
maybe_catch_exception (gimple_seq
);
1008 /* Finalize task copyfn. */
1011 finalize_task_copyfn (gomp_task
*task_stmt
)
1013 struct function
*child_cfun
;
1015 gimple_seq seq
= NULL
, new_seq
;
1018 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1019 if (child_fn
== NULL_TREE
)
1022 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1023 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1025 push_cfun (child_cfun
);
1026 bind
= gimplify_body (child_fn
, false);
1027 gimple_seq_add_stmt (&seq
, bind
);
1028 new_seq
= maybe_catch_exception (seq
);
1031 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1033 gimple_seq_add_stmt (&seq
, bind
);
1035 gimple_set_body (child_fn
, seq
);
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1040 node
->parallelized_function
= 1;
1041 cgraph_node::add_new_function (child_fn
, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1048 delete_omp_context (splay_tree_value value
)
1050 omp_context
*ctx
= (omp_context
*) value
;
1052 delete ctx
->cb
.decl_map
;
1055 splay_tree_delete (ctx
->field_map
);
1056 if (ctx
->sfield_map
)
1057 splay_tree_delete (ctx
->sfield_map
);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx
->record_type
)
1064 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1065 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1067 if (ctx
->srecord_type
)
1070 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1071 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1074 if (ctx
->task_reduction_map
)
1076 ctx
->task_reductions
.release ();
1077 delete ctx
->task_reduction_map
;
1080 delete ctx
->lastprivate_conditional_map
;
1081 delete ctx
->allocate_map
;
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1090 fixup_child_record_type (omp_context
*ctx
)
1092 tree f
, type
= ctx
->record_type
;
1094 if (!ctx
->receiver_decl
)
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1101 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1105 tree name
, new_fields
= NULL
;
1107 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1108 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1109 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1110 TYPE_DECL
, name
, type
);
1111 TYPE_NAME (type
) = name
;
1113 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1115 tree new_f
= copy_node (f
);
1116 DECL_CONTEXT (new_f
) = type
;
1117 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1118 DECL_CHAIN (new_f
) = new_fields
;
1119 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1120 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1122 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1129 (splay_tree_value
) new_f
);
1131 TYPE_FIELDS (type
) = nreverse (new_fields
);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx
->stmt
))
1138 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1140 TREE_TYPE (ctx
->receiver_decl
)
1141 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1148 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1151 bool scan_array_reductions
= false;
1153 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1154 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1165 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
) == 0)
1166 && omp_maybe_offloaded_ctx (ctx
))
1167 error_at (OMP_CLAUSE_LOCATION (c
), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx
->allocate_map
== NULL
)
1170 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1171 tree val
= integer_zero_node
;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1173 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1175 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1176 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1179 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1183 switch (OMP_CLAUSE_CODE (c
))
1185 case OMP_CLAUSE_PRIVATE
:
1186 decl
= OMP_CLAUSE_DECL (c
);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1189 else if (!is_variable_sized (decl
))
1190 install_var_local (decl
, ctx
);
1193 case OMP_CLAUSE_SHARED
:
1194 decl
= OMP_CLAUSE_DECL (c
);
1195 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1196 ctx
->allocate_map
->remove (decl
);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx
))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1205 if (is_global_var (odecl
))
1207 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1210 gcc_assert (is_taskreg_ctx (ctx
));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1212 || !is_variable_sized (decl
));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1219 use_pointer_for_field (decl
, ctx
);
1222 by_ref
= use_pointer_for_field (decl
, NULL
);
1223 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1224 || TREE_ADDRESSABLE (decl
)
1226 || omp_privatize_by_reference (decl
))
1228 by_ref
= use_pointer_for_field (decl
, ctx
);
1229 install_var_field (decl
, by_ref
, 3, ctx
);
1230 install_var_local (decl
, ctx
);
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1237 case OMP_CLAUSE_REDUCTION
:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx
->stmt
)
1240 && is_gimple_omp_offloaded (ctx
->stmt
))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx
));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1247 ctx
->local_reduction_clauses
1248 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1252 case OMP_CLAUSE_IN_REDUCTION
:
1253 decl
= OMP_CLAUSE_DECL (c
);
1254 if (ctx
->allocate_map
1255 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1257 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1258 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx
)))
1262 if (ctx
->allocate_map
->get (decl
))
1263 ctx
->allocate_map
->remove (decl
);
1265 if (TREE_CODE (decl
) == MEM_REF
)
1267 tree t
= TREE_OPERAND (decl
, 0);
1268 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1269 t
= TREE_OPERAND (t
, 0);
1270 if (INDIRECT_REF_P (t
)
1271 || TREE_CODE (t
) == ADDR_EXPR
)
1272 t
= TREE_OPERAND (t
, 0);
1273 if (is_omp_target (ctx
->stmt
))
1275 if (is_variable_sized (t
))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1278 t
= DECL_VALUE_EXPR (t
);
1279 gcc_assert (INDIRECT_REF_P (t
));
1280 t
= TREE_OPERAND (t
, 0);
1281 gcc_assert (DECL_P (t
));
1285 scan_omp_op (&at
, ctx
->outer
);
1286 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1287 splay_tree_insert (ctx
->field_map
,
1288 (splay_tree_key
) &DECL_CONTEXT (t
),
1289 (splay_tree_value
) nt
);
1291 splay_tree_insert (ctx
->field_map
,
1292 (splay_tree_key
) &DECL_CONTEXT (at
),
1293 (splay_tree_value
) nt
);
1296 install_var_local (t
, ctx
);
1297 if (is_taskreg_ctx (ctx
)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1299 || (is_task_ctx (ctx
)
1300 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1303 == POINTER_TYPE
)))))
1304 && !is_variable_sized (t
)
1305 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1307 && !is_task_ctx (ctx
))))
1309 by_ref
= use_pointer_for_field (t
, NULL
);
1310 if (is_task_ctx (ctx
)
1311 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1314 install_var_field (t
, false, 1, ctx
);
1315 install_var_field (t
, by_ref
, 2, ctx
);
1318 install_var_field (t
, by_ref
, 3, ctx
);
1322 if (is_omp_target (ctx
->stmt
))
1326 scan_omp_op (&at
, ctx
->outer
);
1327 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1328 splay_tree_insert (ctx
->field_map
,
1329 (splay_tree_key
) &DECL_CONTEXT (decl
),
1330 (splay_tree_value
) nt
);
1332 splay_tree_insert (ctx
->field_map
,
1333 (splay_tree_key
) &DECL_CONTEXT (at
),
1334 (splay_tree_value
) nt
);
1337 if (is_task_ctx (ctx
)
1338 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c
)
1340 && is_parallel_ctx (ctx
)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1346 by_ref
= use_pointer_for_field (decl
, ctx
);
1347 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1348 install_var_field (decl
, by_ref
, 3, ctx
);
1350 install_var_local (decl
, ctx
);
1353 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c
))
1356 install_var_local (decl
, ctx
);
1361 case OMP_CLAUSE_LASTPRIVATE
:
1362 /* Let the corresponding firstprivate clause create
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1368 case OMP_CLAUSE_FIRSTPRIVATE
:
1369 case OMP_CLAUSE_LINEAR
:
1370 decl
= OMP_CLAUSE_DECL (c
);
1372 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1375 && is_gimple_omp_offloaded (ctx
->stmt
))
1377 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks
.decls
.omp_array_data (decl
, true)))
1381 by_ref
= !omp_privatize_by_reference (decl
);
1382 install_var_field (decl
, by_ref
, 3, ctx
);
1384 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1386 if (INDIRECT_REF_P (decl
))
1387 decl
= TREE_OPERAND (decl
, 0);
1388 install_var_field (decl
, true, 3, ctx
);
1390 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1391 install_var_field (decl
, true, 3, ctx
);
1393 install_var_field (decl
, false, 3, ctx
);
1395 if (is_variable_sized (decl
))
1397 if (is_task_ctx (ctx
))
1399 if (ctx
->allocate_map
1400 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1403 if (ctx
->allocate_map
->get (decl
))
1404 ctx
->allocate_map
->remove (decl
);
1406 install_var_field (decl
, false, 1, ctx
);
1410 else if (is_taskreg_ctx (ctx
))
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1414 by_ref
= use_pointer_for_field (decl
, NULL
);
1416 if (is_task_ctx (ctx
)
1417 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1419 if (ctx
->allocate_map
1420 && ctx
->allocate_map
->get (decl
))
1421 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1423 install_var_field (decl
, false, 1, ctx
);
1425 install_var_field (decl
, by_ref
, 2, ctx
);
1428 install_var_field (decl
, by_ref
, 3, ctx
);
1430 install_var_local (decl
, ctx
);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx
->stmt
)
1434 && !is_gimple_omp_oacc (ctx
->stmt
)
1435 && lang_hooks
.decls
.omp_array_data (decl
, true))
1437 install_var_field (decl
, false, 16 | 3, ctx
);
1438 install_var_field (decl
, true, 8 | 3, ctx
);
1442 case OMP_CLAUSE_USE_DEVICE_PTR
:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1444 decl
= OMP_CLAUSE_DECL (c
);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1448 install_var_field (decl
, false, 19, ctx
);
1449 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl
)
1451 && !omp_is_allocatable_or_ptr (decl
))
1452 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1453 install_var_field (decl
, true, 11, ctx
);
1455 install_var_field (decl
, false, 11, ctx
);
1456 if (DECL_SIZE (decl
)
1457 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1459 tree decl2
= DECL_VALUE_EXPR (decl
);
1460 gcc_assert (INDIRECT_REF_P (decl2
));
1461 decl2
= TREE_OPERAND (decl2
, 0);
1462 gcc_assert (DECL_P (decl2
));
1463 install_var_local (decl2
, ctx
);
1465 install_var_local (decl
, ctx
);
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1469 decl
= OMP_CLAUSE_DECL (c
);
1470 while (INDIRECT_REF_P (decl
)
1471 || TREE_CODE (decl
) == ARRAY_REF
)
1472 decl
= TREE_OPERAND (decl
, 0);
1475 case OMP_CLAUSE_IS_DEVICE_PTR
:
1476 decl
= OMP_CLAUSE_DECL (c
);
1479 case OMP_CLAUSE__LOOPTEMP_
:
1480 case OMP_CLAUSE__REDUCTEMP_
:
1481 gcc_assert (is_taskreg_ctx (ctx
));
1482 decl
= OMP_CLAUSE_DECL (c
);
1483 install_var_field (decl
, false, 3, ctx
);
1484 install_var_local (decl
, ctx
);
1487 case OMP_CLAUSE_COPYPRIVATE
:
1488 case OMP_CLAUSE_COPYIN
:
1489 decl
= OMP_CLAUSE_DECL (c
);
1490 by_ref
= use_pointer_for_field (decl
, NULL
);
1491 install_var_field (decl
, by_ref
, 3, ctx
);
1494 case OMP_CLAUSE_FINAL
:
1496 case OMP_CLAUSE_SELF
:
1497 case OMP_CLAUSE_NUM_THREADS
:
1498 case OMP_CLAUSE_NUM_TEAMS
:
1499 case OMP_CLAUSE_THREAD_LIMIT
:
1500 case OMP_CLAUSE_DEVICE
:
1501 case OMP_CLAUSE_SCHEDULE
:
1502 case OMP_CLAUSE_DIST_SCHEDULE
:
1503 case OMP_CLAUSE_DEPEND
:
1504 case OMP_CLAUSE_PRIORITY
:
1505 case OMP_CLAUSE_GRAINSIZE
:
1506 case OMP_CLAUSE_NUM_TASKS
:
1507 case OMP_CLAUSE_NUM_GANGS
:
1508 case OMP_CLAUSE_NUM_WORKERS
:
1509 case OMP_CLAUSE_VECTOR_LENGTH
:
1510 case OMP_CLAUSE_DETACH
:
1511 case OMP_CLAUSE_FILTER
:
1513 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1517 case OMP_CLAUSE_FROM
:
1518 case OMP_CLAUSE_MAP
:
1520 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1521 decl
= OMP_CLAUSE_DECL (c
);
1522 /* If requested, make 'decl' addressable. */
1523 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1524 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
))
1526 gcc_checking_assert (DECL_P (decl
));
1528 bool decl_addressable
= TREE_ADDRESSABLE (decl
);
1529 if (!decl_addressable
)
1531 if (!make_addressable_vars
)
1532 make_addressable_vars
= BITMAP_ALLOC (NULL
);
1533 bitmap_set_bit (make_addressable_vars
, DECL_UID (decl
));
1534 TREE_ADDRESSABLE (decl
) = 1;
1537 if (dump_enabled_p ())
1539 location_t loc
= OMP_CLAUSE_LOCATION (c
);
1540 const dump_user_location_t d_u_loc
1541 = dump_user_location_t::from_location_t (loc
);
1542 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1544 # pragma GCC diagnostic push
1545 # pragma GCC diagnostic ignored "-Wformat"
1547 if (!decl_addressable
)
1548 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1550 " made addressable\n",
1553 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1555 " already made addressable\n",
1558 # pragma GCC diagnostic pop
1563 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
) = 0;
1565 /* Global variables with "omp declare target" attribute
1566 don't need to be copied, the receiver side will use them
1567 directly. However, global variables with "omp declare target link"
1568 attribute need to be copied. Or when ALWAYS modifier is used. */
1569 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1571 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1572 && (OMP_CLAUSE_MAP_KIND (c
)
1573 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1574 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1575 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1576 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1577 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1578 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1579 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1580 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_PRESENT_TO
1581 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_PRESENT_FROM
1582 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_PRESENT_TOFROM
1583 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1584 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1585 && varpool_node::get_create (decl
)->offloadable
1586 && !lookup_attribute ("omp declare target link",
1587 DECL_ATTRIBUTES (decl
)))
1589 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1590 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1592 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1593 not offloaded; there is nothing to map for those. */
1594 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1595 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1596 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1599 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1601 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1602 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1603 && is_omp_target (ctx
->stmt
))
1605 /* If this is an offloaded region, an attach operation should
1606 only exist when the pointer variable is mapped in a prior
1608 If we had an error, we may not have attempted to sort clauses
1609 properly, so avoid the test. */
1610 if (is_gimple_omp_offloaded (ctx
->stmt
)
1613 (maybe_lookup_decl (decl
, ctx
)
1614 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1615 && lookup_attribute ("omp declare target",
1616 DECL_ATTRIBUTES (decl
))));
1618 /* By itself, attach/detach is generated as part of pointer
1619 variable mapping and should not create new variables in the
1620 offloaded region, however sender refs for it must be created
1621 for its address to be passed to the runtime. */
1623 = build_decl (OMP_CLAUSE_LOCATION (c
),
1624 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1625 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1626 insert_field_into_struct (ctx
->record_type
, field
);
1627 /* To not clash with a map of the pointer variable itself,
1628 attach/detach maps have their field looked up by the *clause*
1629 tree expression, not the decl. */
1630 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1631 (splay_tree_key
) c
));
1632 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1633 (splay_tree_value
) field
);
1636 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1637 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1638 || (OMP_CLAUSE_MAP_KIND (c
)
1639 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1641 if (TREE_CODE (decl
) == COMPONENT_REF
1642 || (INDIRECT_REF_P (decl
)
1643 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1644 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1646 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1647 == POINTER_TYPE
)))))
1649 if (DECL_SIZE (decl
)
1650 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1652 tree decl2
= DECL_VALUE_EXPR (decl
);
1653 gcc_assert (INDIRECT_REF_P (decl2
));
1654 decl2
= TREE_OPERAND (decl2
, 0);
1655 gcc_assert (DECL_P (decl2
));
1656 install_var_local (decl2
, ctx
);
1658 install_var_local (decl
, ctx
);
1663 if (DECL_SIZE (decl
)
1664 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1666 tree decl2
= DECL_VALUE_EXPR (decl
);
1667 gcc_assert (INDIRECT_REF_P (decl2
));
1668 decl2
= TREE_OPERAND (decl2
, 0);
1669 gcc_assert (DECL_P (decl2
));
1670 install_var_field (decl2
, true, 3, ctx
);
1671 install_var_local (decl2
, ctx
);
1672 install_var_local (decl
, ctx
);
1676 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1677 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1678 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1679 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1680 install_var_field (decl
, true, 7, ctx
);
1682 install_var_field (decl
, true, 3, ctx
);
1683 if (is_gimple_omp_offloaded (ctx
->stmt
)
1684 && !(is_gimple_omp_oacc (ctx
->stmt
)
1685 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1686 install_var_local (decl
, ctx
);
1691 tree base
= get_base_address (decl
);
1692 tree nc
= OMP_CLAUSE_CHAIN (c
);
1695 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1696 && OMP_CLAUSE_DECL (nc
) == base
1697 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1698 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1700 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1701 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1707 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1708 decl
= OMP_CLAUSE_DECL (c
);
1710 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1711 (splay_tree_key
) decl
));
1713 = build_decl (OMP_CLAUSE_LOCATION (c
),
1714 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1715 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1716 insert_field_into_struct (ctx
->record_type
, field
);
1717 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1718 (splay_tree_value
) field
);
1723 case OMP_CLAUSE_ORDER
:
1724 ctx
->order_concurrent
= true;
1727 case OMP_CLAUSE_BIND
:
1731 case OMP_CLAUSE_NOWAIT
:
1732 case OMP_CLAUSE_ORDERED
:
1733 case OMP_CLAUSE_COLLAPSE
:
1734 case OMP_CLAUSE_UNTIED
:
1735 case OMP_CLAUSE_MERGEABLE
:
1736 case OMP_CLAUSE_PROC_BIND
:
1737 case OMP_CLAUSE_SAFELEN
:
1738 case OMP_CLAUSE_SIMDLEN
:
1739 case OMP_CLAUSE_THREADS
:
1740 case OMP_CLAUSE_SIMD
:
1741 case OMP_CLAUSE_NOGROUP
:
1742 case OMP_CLAUSE_DEFAULTMAP
:
1743 case OMP_CLAUSE_ASYNC
:
1744 case OMP_CLAUSE_WAIT
:
1745 case OMP_CLAUSE_GANG
:
1746 case OMP_CLAUSE_WORKER
:
1747 case OMP_CLAUSE_VECTOR
:
1748 case OMP_CLAUSE_INDEPENDENT
:
1749 case OMP_CLAUSE_AUTO
:
1750 case OMP_CLAUSE_SEQ
:
1751 case OMP_CLAUSE_TILE
:
1752 case OMP_CLAUSE__SIMT_
:
1753 case OMP_CLAUSE_DEFAULT
:
1754 case OMP_CLAUSE_NONTEMPORAL
:
1755 case OMP_CLAUSE_IF_PRESENT
:
1756 case OMP_CLAUSE_FINALIZE
:
1757 case OMP_CLAUSE_TASK_REDUCTION
:
1758 case OMP_CLAUSE_ALLOCATE
:
1761 case OMP_CLAUSE_ALIGNED
:
1762 decl
= OMP_CLAUSE_DECL (c
);
1763 if (is_global_var (decl
)
1764 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1765 install_var_local (decl
, ctx
);
1768 case OMP_CLAUSE__CONDTEMP_
:
1769 decl
= OMP_CLAUSE_DECL (c
);
1770 if (is_parallel_ctx (ctx
))
1772 install_var_field (decl
, false, 3, ctx
);
1773 install_var_local (decl
, ctx
);
1775 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1776 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1777 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1778 install_var_local (decl
, ctx
);
1781 case OMP_CLAUSE__CACHE_
:
1782 case OMP_CLAUSE_NOHOST
:
1788 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1790 switch (OMP_CLAUSE_CODE (c
))
1792 case OMP_CLAUSE_LASTPRIVATE
:
1793 /* Let the corresponding firstprivate clause create
1795 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1796 scan_array_reductions
= true;
1797 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1801 case OMP_CLAUSE_FIRSTPRIVATE
:
1802 case OMP_CLAUSE_PRIVATE
:
1803 case OMP_CLAUSE_LINEAR
:
1804 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1805 case OMP_CLAUSE_IS_DEVICE_PTR
:
1806 decl
= OMP_CLAUSE_DECL (c
);
1807 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1809 while (INDIRECT_REF_P (decl
)
1810 || TREE_CODE (decl
) == ARRAY_REF
)
1811 decl
= TREE_OPERAND (decl
, 0);
1814 if (is_variable_sized (decl
))
1816 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1817 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1818 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1819 && is_gimple_omp_offloaded (ctx
->stmt
))
1821 tree decl2
= DECL_VALUE_EXPR (decl
);
1822 gcc_assert (INDIRECT_REF_P (decl2
));
1823 decl2
= TREE_OPERAND (decl2
, 0);
1824 gcc_assert (DECL_P (decl2
));
1825 install_var_local (decl2
, ctx
);
1826 fixup_remapped_decl (decl2
, ctx
, false);
1828 install_var_local (decl
, ctx
);
1830 fixup_remapped_decl (decl
, ctx
,
1831 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1832 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1833 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1834 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1835 scan_array_reductions
= true;
1838 case OMP_CLAUSE_REDUCTION
:
1839 case OMP_CLAUSE_IN_REDUCTION
:
1840 decl
= OMP_CLAUSE_DECL (c
);
1841 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1843 if (is_variable_sized (decl
))
1844 install_var_local (decl
, ctx
);
1845 fixup_remapped_decl (decl
, ctx
, false);
1847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1848 scan_array_reductions
= true;
1851 case OMP_CLAUSE_TASK_REDUCTION
:
1852 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1853 scan_array_reductions
= true;
1856 case OMP_CLAUSE_SHARED
:
1857 /* Ignore shared directives in teams construct inside of
1858 target construct. */
1859 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1860 && !is_host_teams_ctx (ctx
))
1862 decl
= OMP_CLAUSE_DECL (c
);
1863 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1865 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1867 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1870 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1871 install_var_field (decl
, by_ref
, 11, ctx
);
1874 fixup_remapped_decl (decl
, ctx
, false);
1877 case OMP_CLAUSE_MAP
:
1878 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1880 decl
= OMP_CLAUSE_DECL (c
);
1882 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1883 && (OMP_CLAUSE_MAP_KIND (c
)
1884 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1885 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1886 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1887 && varpool_node::get_create (decl
)->offloadable
)
1889 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1890 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1891 && is_omp_target (ctx
->stmt
)
1892 && !is_gimple_omp_offloaded (ctx
->stmt
))
1896 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1897 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1898 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1899 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1901 tree new_decl
= lookup_decl (decl
, ctx
);
1902 TREE_TYPE (new_decl
)
1903 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1905 else if (DECL_SIZE (decl
)
1906 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1908 tree decl2
= DECL_VALUE_EXPR (decl
);
1909 gcc_assert (INDIRECT_REF_P (decl2
));
1910 decl2
= TREE_OPERAND (decl2
, 0);
1911 gcc_assert (DECL_P (decl2
));
1912 fixup_remapped_decl (decl2
, ctx
, false);
1913 fixup_remapped_decl (decl
, ctx
, true);
1916 fixup_remapped_decl (decl
, ctx
, false);
1920 case OMP_CLAUSE_COPYPRIVATE
:
1921 case OMP_CLAUSE_COPYIN
:
1922 case OMP_CLAUSE_DEFAULT
:
1924 case OMP_CLAUSE_SELF
:
1925 case OMP_CLAUSE_NUM_THREADS
:
1926 case OMP_CLAUSE_NUM_TEAMS
:
1927 case OMP_CLAUSE_THREAD_LIMIT
:
1928 case OMP_CLAUSE_DEVICE
:
1929 case OMP_CLAUSE_SCHEDULE
:
1930 case OMP_CLAUSE_DIST_SCHEDULE
:
1931 case OMP_CLAUSE_NOWAIT
:
1932 case OMP_CLAUSE_ORDERED
:
1933 case OMP_CLAUSE_COLLAPSE
:
1934 case OMP_CLAUSE_UNTIED
:
1935 case OMP_CLAUSE_FINAL
:
1936 case OMP_CLAUSE_MERGEABLE
:
1937 case OMP_CLAUSE_PROC_BIND
:
1938 case OMP_CLAUSE_SAFELEN
:
1939 case OMP_CLAUSE_SIMDLEN
:
1940 case OMP_CLAUSE_ALIGNED
:
1941 case OMP_CLAUSE_DEPEND
:
1942 case OMP_CLAUSE_DETACH
:
1943 case OMP_CLAUSE_ALLOCATE
:
1944 case OMP_CLAUSE__LOOPTEMP_
:
1945 case OMP_CLAUSE__REDUCTEMP_
:
1947 case OMP_CLAUSE_FROM
:
1948 case OMP_CLAUSE_PRIORITY
:
1949 case OMP_CLAUSE_GRAINSIZE
:
1950 case OMP_CLAUSE_NUM_TASKS
:
1951 case OMP_CLAUSE_THREADS
:
1952 case OMP_CLAUSE_SIMD
:
1953 case OMP_CLAUSE_NOGROUP
:
1954 case OMP_CLAUSE_DEFAULTMAP
:
1955 case OMP_CLAUSE_ORDER
:
1956 case OMP_CLAUSE_BIND
:
1957 case OMP_CLAUSE_USE_DEVICE_PTR
:
1958 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1959 case OMP_CLAUSE_NONTEMPORAL
:
1960 case OMP_CLAUSE_ASYNC
:
1961 case OMP_CLAUSE_WAIT
:
1962 case OMP_CLAUSE_NUM_GANGS
:
1963 case OMP_CLAUSE_NUM_WORKERS
:
1964 case OMP_CLAUSE_VECTOR_LENGTH
:
1965 case OMP_CLAUSE_GANG
:
1966 case OMP_CLAUSE_WORKER
:
1967 case OMP_CLAUSE_VECTOR
:
1968 case OMP_CLAUSE_INDEPENDENT
:
1969 case OMP_CLAUSE_AUTO
:
1970 case OMP_CLAUSE_SEQ
:
1971 case OMP_CLAUSE_TILE
:
1972 case OMP_CLAUSE__SIMT_
:
1973 case OMP_CLAUSE_IF_PRESENT
:
1974 case OMP_CLAUSE_FINALIZE
:
1975 case OMP_CLAUSE_FILTER
:
1976 case OMP_CLAUSE__CONDTEMP_
:
1979 case OMP_CLAUSE__CACHE_
:
1980 case OMP_CLAUSE_NOHOST
:
1986 gcc_checking_assert (!scan_array_reductions
1987 || !is_gimple_omp_oacc (ctx
->stmt
));
1988 if (scan_array_reductions
)
1990 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1991 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1992 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1993 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1994 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1996 omp_context
*rctx
= ctx
;
1997 if (is_omp_target (ctx
->stmt
))
1999 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
2000 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
2002 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
2003 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
2004 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
2005 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
2006 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
2007 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
2011 /* Create a new name for omp child function. Returns an identifier. */
2014 create_omp_child_function_name (bool task_copy
)
2016 return clone_function_name_numbered (current_function_decl
,
2017 task_copy
? "_omp_cpyfn" : "_omp_fn");
2020 /* Return true if CTX may belong to offloaded code: either if current function
2021 is offloaded, or any enclosing context corresponds to a target region. */
2024 omp_maybe_offloaded_ctx (omp_context
*ctx
)
2026 if (cgraph_node::get (current_function_decl
)->offloadable
)
2028 for (; ctx
; ctx
= ctx
->outer
)
2029 if (is_gimple_omp_offloaded (ctx
->stmt
))
2034 /* Build a decl for the omp child function. It'll not contain a body
2035 yet, just the bare decl. */
2038 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
2040 tree decl
, type
, name
, t
;
2042 name
= create_omp_child_function_name (task_copy
);
2044 type
= build_function_type_list (void_type_node
, ptr_type_node
,
2045 ptr_type_node
, NULL_TREE
);
2047 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2049 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
2051 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
2054 ctx
->cb
.dst_fn
= decl
;
2056 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
2058 TREE_STATIC (decl
) = 1;
2059 TREE_USED (decl
) = 1;
2060 DECL_ARTIFICIAL (decl
) = 1;
2061 DECL_IGNORED_P (decl
) = 0;
2062 TREE_PUBLIC (decl
) = 0;
2063 DECL_UNINLINABLE (decl
) = 1;
2064 DECL_EXTERNAL (decl
) = 0;
2065 DECL_CONTEXT (decl
) = NULL_TREE
;
2066 DECL_INITIAL (decl
) = make_node (BLOCK
);
2067 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
2068 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
2069 /* Remove omp declare simd attribute from the new attributes. */
2070 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
2072 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
2075 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
2076 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
2077 *p
= TREE_CHAIN (*p
);
2080 tree chain
= TREE_CHAIN (*p
);
2081 *p
= copy_node (*p
);
2082 p
= &TREE_CHAIN (*p
);
2086 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2087 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2088 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2089 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2090 DECL_FUNCTION_VERSIONED (decl
)
2091 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2093 if (omp_maybe_offloaded_ctx (ctx
))
2095 cgraph_node::get_create (decl
)->offloadable
= 1;
2096 if (ENABLE_OFFLOADING
)
2097 g
->have_offload
= true;
2100 if (cgraph_node::get_create (decl
)->offloadable
)
2102 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2103 ? "omp target entrypoint"
2104 : "omp declare target");
2105 if (lookup_attribute ("omp declare target",
2106 DECL_ATTRIBUTES (current_function_decl
)))
2108 if (is_gimple_omp_offloaded (ctx
->stmt
))
2109 DECL_ATTRIBUTES (decl
)
2110 = remove_attribute ("omp declare target",
2111 copy_list (DECL_ATTRIBUTES (decl
)));
2116 && is_gimple_omp_offloaded (ctx
->stmt
)
2117 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl
)) == NULL_TREE
)
2118 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("noclone"),
2119 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2121 DECL_ATTRIBUTES (decl
)
2122 = tree_cons (get_identifier (target_attr
),
2123 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2126 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2127 RESULT_DECL
, NULL_TREE
, void_type_node
);
2128 DECL_ARTIFICIAL (t
) = 1;
2129 DECL_IGNORED_P (t
) = 1;
2130 DECL_CONTEXT (t
) = decl
;
2131 DECL_RESULT (decl
) = t
;
2133 tree data_name
= get_identifier (".omp_data_i");
2134 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2136 DECL_ARTIFICIAL (t
) = 1;
2137 DECL_NAMELESS (t
) = 1;
2138 DECL_ARG_TYPE (t
) = ptr_type_node
;
2139 DECL_CONTEXT (t
) = current_function_decl
;
2141 TREE_READONLY (t
) = 1;
2142 DECL_ARGUMENTS (decl
) = t
;
2144 ctx
->receiver_decl
= t
;
2147 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2148 PARM_DECL
, get_identifier (".omp_data_o"),
2150 DECL_ARTIFICIAL (t
) = 1;
2151 DECL_NAMELESS (t
) = 1;
2152 DECL_ARG_TYPE (t
) = ptr_type_node
;
2153 DECL_CONTEXT (t
) = current_function_decl
;
2155 TREE_ADDRESSABLE (t
) = 1;
2156 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2157 DECL_ARGUMENTS (decl
) = t
;
2160 /* Allocate memory for the function structure. The call to
2161 allocate_struct_function clobbers CFUN, so we need to restore
2163 push_struct_function (decl
);
2164 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2165 init_tree_ssa (cfun
);
2169 /* Callback for walk_gimple_seq. Check if combined parallel
2170 contains gimple_omp_for_combined_into_p OMP_FOR. */
2173 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2174 bool *handled_ops_p
,
2175 struct walk_stmt_info
*wi
)
2177 gimple
*stmt
= gsi_stmt (*gsi_p
);
2179 *handled_ops_p
= true;
2180 switch (gimple_code (stmt
))
2184 case GIMPLE_OMP_FOR
:
2185 if (gimple_omp_for_combined_into_p (stmt
)
2186 && gimple_omp_for_kind (stmt
)
2187 == *(const enum gf_mask
*) (wi
->info
))
2190 return integer_zero_node
;
2199 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2202 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2203 omp_context
*outer_ctx
)
2205 struct walk_stmt_info wi
;
2207 memset (&wi
, 0, sizeof (wi
));
2209 wi
.info
= (void *) &msk
;
2210 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2211 if (wi
.info
!= (void *) &msk
)
2213 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2214 struct omp_for_data fd
;
2215 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2216 /* We need two temporaries with fd.loop.v type (istart/iend)
2217 and then (fd.collapse - 1) temporaries with the same
2218 type for count2 ... countN-1 vars if not constant. */
2219 size_t count
= 2, i
;
2220 tree type
= fd
.iter_type
;
2222 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2224 count
+= fd
.collapse
- 1;
2225 /* If there are lastprivate clauses on the inner
2226 GIMPLE_OMP_FOR, add one more temporaries for the total number
2227 of iterations (product of count1 ... countN-1). */
2228 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2229 OMP_CLAUSE_LASTPRIVATE
)
2230 || (msk
== GF_OMP_FOR_KIND_FOR
2231 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2232 OMP_CLAUSE_LASTPRIVATE
)))
2234 tree temp
= create_tmp_var (type
);
2235 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2236 OMP_CLAUSE__LOOPTEMP_
);
2237 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2238 OMP_CLAUSE_DECL (c
) = temp
;
2239 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2240 gimple_omp_taskreg_set_clauses (stmt
, c
);
2243 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2244 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2245 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2247 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2248 tree type2
= TREE_TYPE (v
);
2250 for (i
= 0; i
< 3; i
++)
2252 tree temp
= create_tmp_var (type2
);
2253 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2254 OMP_CLAUSE__LOOPTEMP_
);
2255 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2256 OMP_CLAUSE_DECL (c
) = temp
;
2257 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2258 gimple_omp_taskreg_set_clauses (stmt
, c
);
2262 for (i
= 0; i
< count
; i
++)
2264 tree temp
= create_tmp_var (type
);
2265 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2266 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2267 OMP_CLAUSE_DECL (c
) = temp
;
2268 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2269 gimple_omp_taskreg_set_clauses (stmt
, c
);
2272 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2273 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2274 OMP_CLAUSE_REDUCTION
))
2276 tree type
= build_pointer_type (pointer_sized_int_node
);
2277 tree temp
= create_tmp_var (type
);
2278 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2279 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2280 OMP_CLAUSE_DECL (c
) = temp
;
2281 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2282 gimple_omp_task_set_clauses (stmt
, c
);
2286 /* Scan an OpenMP parallel directive. */
2289 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2293 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2295 /* Ignore parallel directives with empty bodies, unless there
2296 are copyin clauses. */
2298 && empty_body_p (gimple_omp_body (stmt
))
2299 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2300 OMP_CLAUSE_COPYIN
) == NULL
)
2302 gsi_replace (gsi
, gimple_build_nop (), false);
2306 if (gimple_omp_parallel_combined_p (stmt
))
2307 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2308 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2309 OMP_CLAUSE_REDUCTION
);
2310 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2311 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2313 tree type
= build_pointer_type (pointer_sized_int_node
);
2314 tree temp
= create_tmp_var (type
);
2315 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2317 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2318 OMP_CLAUSE_DECL (c
) = temp
;
2319 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2320 gimple_omp_parallel_set_clauses (stmt
, c
);
2323 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2326 ctx
= new_omp_context (stmt
, outer_ctx
);
2327 taskreg_contexts
.safe_push (ctx
);
2328 if (taskreg_nesting_level
> 1)
2329 ctx
->is_nested
= true;
2330 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2331 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2332 name
= create_tmp_var_name (".omp_data_s");
2333 name
= build_decl (gimple_location (stmt
),
2334 TYPE_DECL
, name
, ctx
->record_type
);
2335 DECL_ARTIFICIAL (name
) = 1;
2336 DECL_NAMELESS (name
) = 1;
2337 TYPE_NAME (ctx
->record_type
) = name
;
2338 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2339 create_omp_child_function (ctx
, false);
2340 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2342 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2343 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2345 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2346 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2349 /* Scan an OpenMP task directive. */
2352 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2356 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2358 /* Ignore task directives with empty bodies, unless they have depend
2361 && gimple_omp_body (stmt
)
2362 && empty_body_p (gimple_omp_body (stmt
))
2363 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2365 gsi_replace (gsi
, gimple_build_nop (), false);
2369 if (gimple_omp_task_taskloop_p (stmt
))
2370 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2372 ctx
= new_omp_context (stmt
, outer_ctx
);
2374 if (gimple_omp_task_taskwait_p (stmt
))
2376 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2380 taskreg_contexts
.safe_push (ctx
);
2381 if (taskreg_nesting_level
> 1)
2382 ctx
->is_nested
= true;
2383 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2384 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2385 name
= create_tmp_var_name (".omp_data_s");
2386 name
= build_decl (gimple_location (stmt
),
2387 TYPE_DECL
, name
, ctx
->record_type
);
2388 DECL_ARTIFICIAL (name
) = 1;
2389 DECL_NAMELESS (name
) = 1;
2390 TYPE_NAME (ctx
->record_type
) = name
;
2391 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2392 create_omp_child_function (ctx
, false);
2393 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2395 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2397 if (ctx
->srecord_type
)
2399 name
= create_tmp_var_name (".omp_data_a");
2400 name
= build_decl (gimple_location (stmt
),
2401 TYPE_DECL
, name
, ctx
->srecord_type
);
2402 DECL_ARTIFICIAL (name
) = 1;
2403 DECL_NAMELESS (name
) = 1;
2404 TYPE_NAME (ctx
->srecord_type
) = name
;
2405 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2406 create_omp_child_function (ctx
, true);
2409 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2411 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2413 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2414 t
= build_int_cst (long_integer_type_node
, 0);
2415 gimple_omp_task_set_arg_size (stmt
, t
);
2416 t
= build_int_cst (long_integer_type_node
, 1);
2417 gimple_omp_task_set_arg_align (stmt
, t
);
2421 /* Helper function for finish_taskreg_scan, called through walk_tree.
2422 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2423 tree, replace it in the expression. */
2426 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2430 omp_context
*ctx
= (omp_context
*) data
;
2431 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2434 if (DECL_HAS_VALUE_EXPR_P (t
))
2435 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2440 else if (IS_TYPE_OR_DECL_P (*tp
))
2445 /* If any decls have been made addressable during scan_omp,
2446 adjust their fields if needed, and layout record types
2447 of parallel/task constructs. */
2450 finish_taskreg_scan (omp_context
*ctx
)
2452 if (ctx
->record_type
== NULL_TREE
)
2455 /* If any make_addressable_vars were needed, verify all
2456 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2457 statements if use_pointer_for_field hasn't changed
2458 because of that. If it did, update field types now. */
2459 if (make_addressable_vars
)
2463 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2464 c
; c
= OMP_CLAUSE_CHAIN (c
))
2465 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2466 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2468 tree decl
= OMP_CLAUSE_DECL (c
);
2470 /* Global variables don't need to be copied,
2471 the receiver side will use them directly. */
2472 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2474 if (!bitmap_bit_p (make_addressable_vars
, DECL_UID (decl
))
2475 || !use_pointer_for_field (decl
, ctx
))
2477 tree field
= lookup_field (decl
, ctx
);
2478 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2479 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2481 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2482 TREE_THIS_VOLATILE (field
) = 0;
2483 DECL_USER_ALIGN (field
) = 0;
2484 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2485 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2486 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2487 if (ctx
->srecord_type
)
2489 tree sfield
= lookup_sfield (decl
, ctx
);
2490 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2491 TREE_THIS_VOLATILE (sfield
) = 0;
2492 DECL_USER_ALIGN (sfield
) = 0;
2493 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2494 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2495 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2500 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2502 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2503 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2506 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2507 expects to find it at the start of data. */
2508 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2509 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2513 *p
= DECL_CHAIN (*p
);
2517 p
= &DECL_CHAIN (*p
);
2518 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2519 TYPE_FIELDS (ctx
->record_type
) = f
;
2521 layout_type (ctx
->record_type
);
2522 fixup_child_record_type (ctx
);
2524 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2526 layout_type (ctx
->record_type
);
2527 fixup_child_record_type (ctx
);
2531 location_t loc
= gimple_location (ctx
->stmt
);
2532 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2534 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2536 /* Move VLA fields to the end. */
2537 p
= &TYPE_FIELDS (ctx
->record_type
);
2539 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2540 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2543 *p
= TREE_CHAIN (*p
);
2544 TREE_CHAIN (*q
) = NULL_TREE
;
2545 q
= &TREE_CHAIN (*q
);
2548 p
= &DECL_CHAIN (*p
);
2550 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2552 /* Move fields corresponding to first and second _looptemp_
2553 clause first. There are filled by GOMP_taskloop
2554 and thus need to be in specific positions. */
2555 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2556 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2557 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2558 OMP_CLAUSE__LOOPTEMP_
);
2559 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2560 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2561 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2562 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2563 p
= &TYPE_FIELDS (ctx
->record_type
);
2565 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2566 *p
= DECL_CHAIN (*p
);
2568 p
= &DECL_CHAIN (*p
);
2569 DECL_CHAIN (f1
) = f2
;
2572 DECL_CHAIN (f2
) = f3
;
2573 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2576 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2577 TYPE_FIELDS (ctx
->record_type
) = f1
;
2578 if (ctx
->srecord_type
)
2580 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2581 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2583 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2584 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2586 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2587 *p
= DECL_CHAIN (*p
);
2589 p
= &DECL_CHAIN (*p
);
2590 DECL_CHAIN (f1
) = f2
;
2591 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2594 DECL_CHAIN (f2
) = f3
;
2595 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2598 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2599 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2606 /* Look for a firstprivate clause with the detach event handle. */
2607 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2608 c
; c
= OMP_CLAUSE_CHAIN (c
))
2610 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2612 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2613 == OMP_CLAUSE_DECL (detach_clause
))
2618 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2620 /* Move field corresponding to the detach clause first.
2621 This is filled by GOMP_task and needs to be in a
2622 specific position. */
2623 p
= &TYPE_FIELDS (ctx
->record_type
);
2626 *p
= DECL_CHAIN (*p
);
2628 p
= &DECL_CHAIN (*p
);
2629 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2630 TYPE_FIELDS (ctx
->record_type
) = field
;
2631 if (ctx
->srecord_type
)
2633 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2634 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2637 *p
= DECL_CHAIN (*p
);
2639 p
= &DECL_CHAIN (*p
);
2640 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2641 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2644 layout_type (ctx
->record_type
);
2645 fixup_child_record_type (ctx
);
2646 if (ctx
->srecord_type
)
2647 layout_type (ctx
->srecord_type
);
2648 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2649 TYPE_SIZE_UNIT (ctx
->record_type
));
2650 if (TREE_CODE (t
) != INTEGER_CST
)
2652 t
= unshare_expr (t
);
2653 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2655 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2656 t
= build_int_cst (long_integer_type_node
,
2657 TYPE_ALIGN_UNIT (ctx
->record_type
));
2658 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2662 /* Find the enclosing offload context. */
2664 static omp_context
*
2665 enclosing_target_ctx (omp_context
*ctx
)
2667 for (; ctx
; ctx
= ctx
->outer
)
2668 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2674 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2676 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2679 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2681 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2683 gimple
*stmt
= ctx
->stmt
;
2684 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2685 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2692 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2693 (This doesn't include OpenACC 'kernels' decomposed parts.)
2694 Until kernels handling moves to use the same loop indirection
2695 scheme as parallel, we need to do this checking early. */
2698 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2700 bool checking
= true;
2701 unsigned outer_mask
= 0;
2702 unsigned this_mask
= 0;
2703 bool has_seq
= false, has_auto
= false;
2706 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2710 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2712 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2715 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2717 switch (OMP_CLAUSE_CODE (c
))
2719 case OMP_CLAUSE_GANG
:
2720 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2722 case OMP_CLAUSE_WORKER
:
2723 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2725 case OMP_CLAUSE_VECTOR
:
2726 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2728 case OMP_CLAUSE_SEQ
:
2731 case OMP_CLAUSE_AUTO
:
2741 if (has_seq
&& (this_mask
|| has_auto
))
2742 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2743 " OpenACC loop specifiers");
2744 else if (has_auto
&& this_mask
)
2745 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2746 " OpenACC loop specifiers");
2748 if (this_mask
& outer_mask
)
2749 error_at (gimple_location (stmt
), "inner loop uses same"
2750 " OpenACC parallelism as containing loop");
2753 return outer_mask
| this_mask
;
2756 /* Scan a GIMPLE_OMP_FOR. */
2758 static omp_context
*
2759 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2763 tree clauses
= gimple_omp_for_clauses (stmt
);
2765 ctx
= new_omp_context (stmt
, outer_ctx
);
2767 if (is_gimple_omp_oacc (stmt
))
2769 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2771 if (!(tgt
&& is_oacc_kernels (tgt
)))
2772 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2775 switch (OMP_CLAUSE_CODE (c
))
2777 case OMP_CLAUSE_GANG
:
2778 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2781 case OMP_CLAUSE_WORKER
:
2782 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2785 case OMP_CLAUSE_VECTOR
:
2786 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2795 /* By construction, this is impossible for OpenACC 'kernels'
2796 decomposed parts. */
2797 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2799 error_at (OMP_CLAUSE_LOCATION (c
),
2800 "argument not permitted on %qs clause",
2801 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2803 inform (gimple_location (tgt
->stmt
),
2804 "enclosing parent compute construct");
2805 else if (oacc_get_fn_attrib (current_function_decl
))
2806 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2807 "enclosing routine");
2813 if (tgt
&& is_oacc_kernels (tgt
))
2814 check_oacc_kernel_gwv (stmt
, ctx
);
2816 /* Collect all variables named in reductions on this loop. Ensure
2817 that, if this loop has a reduction on some variable v, and there is
2818 a reduction on v somewhere in an outer context, then there is a
2819 reduction on v on all intervening loops as well. */
2820 tree local_reduction_clauses
= NULL
;
2821 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2823 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2824 local_reduction_clauses
2825 = tree_cons (NULL
, c
, local_reduction_clauses
);
2827 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2828 ctx
->outer_reduction_clauses
2829 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2830 ctx
->outer
->outer_reduction_clauses
);
2831 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2832 tree local_iter
= local_reduction_clauses
;
2833 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2835 tree local_clause
= TREE_VALUE (local_iter
);
2836 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2837 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2838 bool have_outer_reduction
= false;
2839 tree ctx_iter
= outer_reduction_clauses
;
2840 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2842 tree outer_clause
= TREE_VALUE (ctx_iter
);
2843 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2844 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2845 if (outer_var
== local_var
&& outer_op
!= local_op
)
2847 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2848 "conflicting reduction operations for %qE",
2850 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2851 "location of the previous reduction for %qE",
2854 if (outer_var
== local_var
)
2856 have_outer_reduction
= true;
2860 if (have_outer_reduction
)
2862 /* There is a reduction on outer_var both on this loop and on
2863 some enclosing loop. Walk up the context tree until such a
2864 loop with a reduction on outer_var is found, and complain
2865 about all intervening loops that do not have such a
2867 struct omp_context
*curr_loop
= ctx
->outer
;
2869 while (curr_loop
!= NULL
)
2871 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2872 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2874 tree curr_clause
= TREE_VALUE (curr_iter
);
2875 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2876 if (curr_var
== local_var
)
2883 warning_at (gimple_location (curr_loop
->stmt
), 0,
2884 "nested loop in reduction needs "
2885 "reduction clause for %qE",
2889 curr_loop
= curr_loop
->outer
;
2893 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2894 ctx
->outer_reduction_clauses
2895 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2896 ctx
->outer_reduction_clauses
);
2898 if (tgt
&& is_oacc_kernels (tgt
))
2900 /* Strip out reductions, as they are not handled yet. */
2901 tree
*prev_ptr
= &clauses
;
2903 while (tree probe
= *prev_ptr
)
2905 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2907 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2908 *prev_ptr
= *next_ptr
;
2910 prev_ptr
= next_ptr
;
2913 gimple_omp_for_set_clauses (stmt
, clauses
);
2917 scan_sharing_clauses (clauses
, ctx
);
2919 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2920 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2922 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2923 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2924 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2925 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2927 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2931 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2934 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2935 omp_context
*outer_ctx
)
2937 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2938 gsi_replace (gsi
, bind
, false);
2939 gimple_seq seq
= NULL
;
2940 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2941 tree cond
= create_tmp_var_raw (integer_type_node
);
2942 DECL_CONTEXT (cond
) = current_function_decl
;
2943 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2944 gimple_bind_set_vars (bind
, cond
);
2945 gimple_call_set_lhs (g
, cond
);
2946 gimple_seq_add_stmt (&seq
, g
);
2947 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2948 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2949 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2950 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2951 gimple_seq_add_stmt (&seq
, g
);
2952 g
= gimple_build_label (lab1
);
2953 gimple_seq_add_stmt (&seq
, g
);
2954 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2955 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2956 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2957 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2958 gimple_omp_for_set_clauses (new_stmt
, clause
);
2959 gimple_seq_add_stmt (&seq
, new_stmt
);
2960 g
= gimple_build_goto (lab3
);
2961 gimple_seq_add_stmt (&seq
, g
);
2962 g
= gimple_build_label (lab2
);
2963 gimple_seq_add_stmt (&seq
, g
);
2964 gimple_seq_add_stmt (&seq
, stmt
);
2965 g
= gimple_build_label (lab3
);
2966 gimple_seq_add_stmt (&seq
, g
);
2967 gimple_bind_set_body (bind
, seq
);
2969 scan_omp_for (new_stmt
, outer_ctx
);
2970 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2973 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2974 struct walk_stmt_info
*);
2975 static omp_context
*maybe_lookup_ctx (gimple
*);
2977 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2978 for scan phase loop. */
2981 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2982 omp_context
*outer_ctx
)
2984 /* The only change between inclusive and exclusive scan will be
2985 within the first simd loop, so just use inclusive in the
2986 worksharing loop. */
2987 outer_ctx
->scan_inclusive
= true;
2988 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2989 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2991 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2992 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2993 gsi_replace (gsi
, input_stmt
, false);
2994 gimple_seq input_body
= NULL
;
2995 gimple_seq_add_stmt (&input_body
, stmt
);
2996 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2998 gimple_stmt_iterator input1_gsi
= gsi_none ();
2999 struct walk_stmt_info wi
;
3000 memset (&wi
, 0, sizeof (wi
));
3002 wi
.info
= (void *) &input1_gsi
;
3003 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
3004 gcc_assert (!gsi_end_p (input1_gsi
));
3006 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
3007 gsi_next (&input1_gsi
);
3008 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
3009 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
3010 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
3011 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3012 std::swap (input_stmt1
, scan_stmt1
);
3014 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
3015 gimple_omp_set_body (input_stmt1
, NULL
);
3017 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
3018 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
3020 gimple_omp_set_body (input_stmt1
, input_body1
);
3021 gimple_omp_set_body (scan_stmt1
, NULL
);
3023 gimple_stmt_iterator input2_gsi
= gsi_none ();
3024 memset (&wi
, 0, sizeof (wi
));
3026 wi
.info
= (void *) &input2_gsi
;
3027 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
3029 gcc_assert (!gsi_end_p (input2_gsi
));
3031 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
3032 gsi_next (&input2_gsi
);
3033 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
3034 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
3035 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3036 std::swap (input_stmt2
, scan_stmt2
);
3038 gimple_omp_set_body (input_stmt2
, NULL
);
3040 gimple_omp_set_body (input_stmt
, input_body
);
3041 gimple_omp_set_body (scan_stmt
, scan_body
);
3043 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
3044 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
3046 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
3047 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
3049 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
3052 /* Scan an OpenMP sections directive. */
3055 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
3059 ctx
= new_omp_context (stmt
, outer_ctx
);
3060 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
3061 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3064 /* Scan an OpenMP single directive. */
3067 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
3072 ctx
= new_omp_context (stmt
, outer_ctx
);
3073 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3074 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3075 name
= create_tmp_var_name (".omp_copy_s");
3076 name
= build_decl (gimple_location (stmt
),
3077 TYPE_DECL
, name
, ctx
->record_type
);
3078 TYPE_NAME (ctx
->record_type
) = name
;
3080 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
3081 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3083 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3084 ctx
->record_type
= NULL
;
3086 layout_type (ctx
->record_type
);
3089 /* Scan a GIMPLE_OMP_TARGET. */
3092 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3096 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3097 tree clauses
= gimple_omp_target_clauses (stmt
);
3099 ctx
= new_omp_context (stmt
, outer_ctx
);
3100 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3101 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3102 name
= create_tmp_var_name (".omp_data_t");
3103 name
= build_decl (gimple_location (stmt
),
3104 TYPE_DECL
, name
, ctx
->record_type
);
3105 DECL_ARTIFICIAL (name
) = 1;
3106 DECL_NAMELESS (name
) = 1;
3107 TYPE_NAME (ctx
->record_type
) = name
;
3108 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3112 create_omp_child_function (ctx
, false);
3113 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3116 scan_sharing_clauses (clauses
, ctx
);
3117 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3119 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3120 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3123 TYPE_FIELDS (ctx
->record_type
)
3124 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3127 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3128 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3130 field
= DECL_CHAIN (field
))
3131 gcc_assert (DECL_ALIGN (field
) == align
);
3133 layout_type (ctx
->record_type
);
3135 fixup_child_record_type (ctx
);
3138 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3140 error_at (gimple_location (stmt
),
3141 "%<target%> construct with nested %<teams%> construct "
3142 "contains directives outside of the %<teams%> construct");
3143 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3147 /* Scan an OpenMP teams directive. */
3150 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3152 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3154 if (!gimple_omp_teams_host (stmt
))
3156 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3157 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3160 taskreg_contexts
.safe_push (ctx
);
3161 gcc_assert (taskreg_nesting_level
== 1);
3162 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3163 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3164 tree name
= create_tmp_var_name (".omp_data_s");
3165 name
= build_decl (gimple_location (stmt
),
3166 TYPE_DECL
, name
, ctx
->record_type
);
3167 DECL_ARTIFICIAL (name
) = 1;
3168 DECL_NAMELESS (name
) = 1;
3169 TYPE_NAME (ctx
->record_type
) = name
;
3170 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3171 create_omp_child_function (ctx
, false);
3172 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3174 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3175 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3177 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3178 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3181 /* Check nesting restrictions. */
3183 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3187 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3188 inside an OpenACC CTX. */
3189 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3190 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3191 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3193 else if (!(is_gimple_omp (stmt
)
3194 && is_gimple_omp_oacc (stmt
)))
3196 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3198 error_at (gimple_location (stmt
),
3199 "non-OpenACC construct inside of OpenACC routine");
3203 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3204 if (is_gimple_omp (octx
->stmt
)
3205 && is_gimple_omp_oacc (octx
->stmt
))
3207 error_at (gimple_location (stmt
),
3208 "non-OpenACC construct inside of OpenACC region");
3215 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3216 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3218 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3220 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3222 error_at (gimple_location (stmt
),
3223 "OpenMP constructs are not allowed in target region "
3224 "with %<ancestor%>");
3228 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3229 ctx
->teams_nested_p
= true;
3231 ctx
->nonteams_nested_p
= true;
3233 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3235 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3237 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3238 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3242 if (ctx
->order_concurrent
3243 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3244 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3245 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3247 error_at (gimple_location (stmt
),
3248 "OpenMP constructs other than %<parallel%>, %<loop%>"
3249 " or %<simd%> may not be nested inside a region with"
3250 " the %<order(concurrent)%> clause");
3253 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3255 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3256 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3258 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3259 && (ctx
->outer
== NULL
3260 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3261 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3262 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3263 != GF_OMP_FOR_KIND_FOR
)
3264 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3266 error_at (gimple_location (stmt
),
3267 "%<ordered simd threads%> must be closely "
3268 "nested inside of %<%s simd%> region",
3269 lang_GNU_Fortran () ? "do" : "for");
3275 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3276 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3277 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3279 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3280 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3282 error_at (gimple_location (stmt
),
3283 "OpenMP constructs other than "
3284 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3285 "not be nested inside %<simd%> region");
3288 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3290 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3291 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3292 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3293 OMP_CLAUSE_BIND
) == NULL_TREE
))
3294 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3296 error_at (gimple_location (stmt
),
3297 "only %<distribute%>, %<parallel%> or %<loop%> "
3298 "regions are allowed to be strictly nested inside "
3299 "%<teams%> region");
3303 else if (ctx
->order_concurrent
3304 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3305 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3306 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3307 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3310 error_at (gimple_location (stmt
),
3311 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3312 "%<simd%> may not be nested inside a %<loop%> region");
3314 error_at (gimple_location (stmt
),
3315 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3316 "%<simd%> may not be nested inside a region with "
3317 "the %<order(concurrent)%> clause");
3321 switch (gimple_code (stmt
))
3323 case GIMPLE_OMP_FOR
:
3324 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3326 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3328 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3330 error_at (gimple_location (stmt
),
3331 "%<distribute%> region must be strictly nested "
3332 "inside %<teams%> construct");
3337 /* We split taskloop into task and nested taskloop in it. */
3338 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3340 /* For now, hope this will change and loop bind(parallel) will not
3341 be allowed in lots of contexts. */
3342 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3343 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3345 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3350 switch (gimple_code (ctx
->stmt
))
3352 case GIMPLE_OMP_FOR
:
3353 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3354 == GF_OMP_FOR_KIND_OACC_LOOP
);
3357 case GIMPLE_OMP_TARGET
:
3358 switch (gimple_omp_target_kind (ctx
->stmt
))
3360 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3361 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3362 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3363 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3364 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3375 else if (oacc_get_fn_attrib (current_function_decl
))
3379 error_at (gimple_location (stmt
),
3380 "OpenACC loop directive must be associated with"
3381 " an OpenACC compute region");
3387 if (is_gimple_call (stmt
)
3388 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3389 == BUILT_IN_GOMP_CANCEL
3390 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3391 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3393 const char *bad
= NULL
;
3394 const char *kind
= NULL
;
3395 const char *construct
3396 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3397 == BUILT_IN_GOMP_CANCEL
)
3399 : "cancellation point";
3402 error_at (gimple_location (stmt
), "orphaned %qs construct",
3406 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3407 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3411 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3413 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3414 == BUILT_IN_GOMP_CANCEL
3415 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3416 ctx
->cancellable
= true;
3420 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3421 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3423 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3424 == BUILT_IN_GOMP_CANCEL
3425 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3427 ctx
->cancellable
= true;
3428 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3430 warning_at (gimple_location (stmt
), 0,
3431 "%<cancel for%> inside "
3432 "%<nowait%> for construct");
3433 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3434 OMP_CLAUSE_ORDERED
))
3435 warning_at (gimple_location (stmt
), 0,
3436 "%<cancel for%> inside "
3437 "%<ordered%> for construct");
3442 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3443 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3445 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3446 == BUILT_IN_GOMP_CANCEL
3447 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3449 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3451 ctx
->cancellable
= true;
3452 if (omp_find_clause (gimple_omp_sections_clauses
3455 warning_at (gimple_location (stmt
), 0,
3456 "%<cancel sections%> inside "
3457 "%<nowait%> sections construct");
3461 gcc_assert (ctx
->outer
3462 && gimple_code (ctx
->outer
->stmt
)
3463 == GIMPLE_OMP_SECTIONS
);
3464 ctx
->outer
->cancellable
= true;
3465 if (omp_find_clause (gimple_omp_sections_clauses
3468 warning_at (gimple_location (stmt
), 0,
3469 "%<cancel sections%> inside "
3470 "%<nowait%> sections construct");
3476 if (!is_task_ctx (ctx
)
3477 && (!is_taskloop_ctx (ctx
)
3478 || ctx
->outer
== NULL
3479 || !is_task_ctx (ctx
->outer
)))
3483 for (omp_context
*octx
= ctx
->outer
;
3484 octx
; octx
= octx
->outer
)
3486 switch (gimple_code (octx
->stmt
))
3488 case GIMPLE_OMP_TASKGROUP
:
3490 case GIMPLE_OMP_TARGET
:
3491 if (gimple_omp_target_kind (octx
->stmt
)
3492 != GF_OMP_TARGET_KIND_REGION
)
3495 case GIMPLE_OMP_PARALLEL
:
3496 case GIMPLE_OMP_TEAMS
:
3497 error_at (gimple_location (stmt
),
3498 "%<%s taskgroup%> construct not closely "
3499 "nested inside of %<taskgroup%> region",
3502 case GIMPLE_OMP_TASK
:
3503 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3505 && is_taskloop_ctx (octx
->outer
))
3508 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3509 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3518 ctx
->cancellable
= true;
3523 error_at (gimple_location (stmt
), "invalid arguments");
3528 error_at (gimple_location (stmt
),
3529 "%<%s %s%> construct not closely nested inside of %qs",
3530 construct
, kind
, bad
);
3535 case GIMPLE_OMP_SECTIONS
:
3536 case GIMPLE_OMP_SINGLE
:
3537 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3538 switch (gimple_code (ctx
->stmt
))
3540 case GIMPLE_OMP_FOR
:
3541 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3542 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3545 case GIMPLE_OMP_SECTIONS
:
3546 case GIMPLE_OMP_SINGLE
:
3547 case GIMPLE_OMP_ORDERED
:
3548 case GIMPLE_OMP_MASTER
:
3549 case GIMPLE_OMP_MASKED
:
3550 case GIMPLE_OMP_TASK
:
3551 case GIMPLE_OMP_CRITICAL
:
3552 if (is_gimple_call (stmt
))
3554 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3555 != BUILT_IN_GOMP_BARRIER
)
3557 error_at (gimple_location (stmt
),
3558 "barrier region may not be closely nested inside "
3559 "of work-sharing, %<loop%>, %<critical%>, "
3560 "%<ordered%>, %<master%>, %<masked%>, explicit "
3561 "%<task%> or %<taskloop%> region");
3564 error_at (gimple_location (stmt
),
3565 "work-sharing region may not be closely nested inside "
3566 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3567 "%<master%>, %<masked%>, explicit %<task%> or "
3568 "%<taskloop%> region");
3570 case GIMPLE_OMP_PARALLEL
:
3571 case GIMPLE_OMP_TEAMS
:
3573 case GIMPLE_OMP_TARGET
:
3574 if (gimple_omp_target_kind (ctx
->stmt
)
3575 == GF_OMP_TARGET_KIND_REGION
)
3582 case GIMPLE_OMP_MASTER
:
3583 case GIMPLE_OMP_MASKED
:
3584 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3585 switch (gimple_code (ctx
->stmt
))
3587 case GIMPLE_OMP_FOR
:
3588 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3589 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3592 case GIMPLE_OMP_SECTIONS
:
3593 case GIMPLE_OMP_SINGLE
:
3594 case GIMPLE_OMP_TASK
:
3595 error_at (gimple_location (stmt
),
3596 "%qs region may not be closely nested inside "
3597 "of work-sharing, %<loop%>, explicit %<task%> or "
3598 "%<taskloop%> region",
3599 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3600 ? "master" : "masked");
3602 case GIMPLE_OMP_PARALLEL
:
3603 case GIMPLE_OMP_TEAMS
:
3605 case GIMPLE_OMP_TARGET
:
3606 if (gimple_omp_target_kind (ctx
->stmt
)
3607 == GF_OMP_TARGET_KIND_REGION
)
3614 case GIMPLE_OMP_SCOPE
:
3615 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3616 switch (gimple_code (ctx
->stmt
))
3618 case GIMPLE_OMP_FOR
:
3619 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3620 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3623 case GIMPLE_OMP_SECTIONS
:
3624 case GIMPLE_OMP_SINGLE
:
3625 case GIMPLE_OMP_TASK
:
3626 case GIMPLE_OMP_CRITICAL
:
3627 case GIMPLE_OMP_ORDERED
:
3628 case GIMPLE_OMP_MASTER
:
3629 case GIMPLE_OMP_MASKED
:
3630 error_at (gimple_location (stmt
),
3631 "%<scope%> region may not be closely nested inside "
3632 "of work-sharing, %<loop%>, explicit %<task%>, "
3633 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3634 "or %<masked%> region");
3636 case GIMPLE_OMP_PARALLEL
:
3637 case GIMPLE_OMP_TEAMS
:
3639 case GIMPLE_OMP_TARGET
:
3640 if (gimple_omp_target_kind (ctx
->stmt
)
3641 == GF_OMP_TARGET_KIND_REGION
)
3648 case GIMPLE_OMP_TASK
:
3649 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3650 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3652 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3653 error_at (OMP_CLAUSE_LOCATION (c
),
3654 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3655 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross",
3656 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3660 case GIMPLE_OMP_ORDERED
:
3661 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3662 c
; c
= OMP_CLAUSE_CHAIN (c
))
3664 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
)
3666 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
3668 error_at (OMP_CLAUSE_LOCATION (c
),
3669 "invalid depend kind in omp %<ordered%> %<depend%>");
3672 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3673 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3678 /* Look for containing ordered(N) loop. */
3680 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3682 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3683 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3685 error_at (OMP_CLAUSE_LOCATION (c
),
3686 "%<ordered%> construct with %<depend%> clause "
3687 "must be closely nested inside an %<ordered%> loop");
3691 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3692 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3694 /* ordered simd must be closely nested inside of simd region,
3695 and simd region must not encounter constructs other than
3696 ordered simd, therefore ordered simd may be either orphaned,
3697 or ctx->stmt must be simd. The latter case is handled already
3701 error_at (gimple_location (stmt
),
3702 "%<ordered%> %<simd%> must be closely nested inside "
3707 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3708 switch (gimple_code (ctx
->stmt
))
3710 case GIMPLE_OMP_CRITICAL
:
3711 case GIMPLE_OMP_TASK
:
3712 case GIMPLE_OMP_ORDERED
:
3713 ordered_in_taskloop
:
3714 error_at (gimple_location (stmt
),
3715 "%<ordered%> region may not be closely nested inside "
3716 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3717 "%<taskloop%> region");
3719 case GIMPLE_OMP_FOR
:
3720 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3721 goto ordered_in_taskloop
;
3723 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3724 OMP_CLAUSE_ORDERED
);
3727 error_at (gimple_location (stmt
),
3728 "%<ordered%> region must be closely nested inside "
3729 "a loop region with an %<ordered%> clause");
3732 if (!gimple_omp_ordered_standalone_p (stmt
))
3734 if (OMP_CLAUSE_ORDERED_DOACROSS (o
))
3736 error_at (gimple_location (stmt
),
3737 "%<ordered%> construct without %<doacross%> or "
3738 "%<depend%> clauses must not have the same "
3739 "binding region as %<ordered%> construct with "
3743 else if (OMP_CLAUSE_ORDERED_EXPR (o
))
3746 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3747 OMP_CLAUSE_COLLAPSE
);
3749 o_n
= tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o
));
3750 HOST_WIDE_INT c_n
= 1;
3752 c_n
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co
));
3755 error_at (gimple_location (stmt
),
3756 "%<ordered%> construct without %<doacross%> "
3757 "or %<depend%> clauses binds to loop where "
3758 "%<collapse%> argument %wd is different from "
3759 "%<ordered%> argument %wd", c_n
, o_n
);
3765 case GIMPLE_OMP_TARGET
:
3766 if (gimple_omp_target_kind (ctx
->stmt
)
3767 != GF_OMP_TARGET_KIND_REGION
)
3770 case GIMPLE_OMP_PARALLEL
:
3771 case GIMPLE_OMP_TEAMS
:
3772 error_at (gimple_location (stmt
),
3773 "%<ordered%> region must be closely nested inside "
3774 "a loop region with an %<ordered%> clause");
3780 case GIMPLE_OMP_CRITICAL
:
3783 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3784 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3785 if (gomp_critical
*other_crit
3786 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3787 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3789 error_at (gimple_location (stmt
),
3790 "%<critical%> region may not be nested inside "
3791 "a %<critical%> region with the same name");
3796 case GIMPLE_OMP_TEAMS
:
3799 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3800 || (gimple_omp_target_kind (ctx
->stmt
)
3801 != GF_OMP_TARGET_KIND_REGION
))
3803 /* Teams construct can appear either strictly nested inside of
3804 target construct with no intervening stmts, or can be encountered
3805 only by initial task (so must not appear inside any OpenMP
3807 error_at (gimple_location (stmt
),
3808 "%<teams%> construct must be closely nested inside of "
3809 "%<target%> construct or not nested in any OpenMP "
3814 case GIMPLE_OMP_TARGET
:
3815 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3816 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3818 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3819 error_at (OMP_CLAUSE_LOCATION (c
),
3820 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3821 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3824 if (is_gimple_omp_offloaded (stmt
)
3825 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3827 error_at (gimple_location (stmt
),
3828 "OpenACC region inside of OpenACC routine, nested "
3829 "parallelism not supported yet");
3832 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3834 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3836 if (is_gimple_omp (stmt
)
3837 && is_gimple_omp_oacc (stmt
)
3838 && is_gimple_omp (ctx
->stmt
))
3840 error_at (gimple_location (stmt
),
3841 "OpenACC construct inside of non-OpenACC region");
3847 const char *stmt_name
, *ctx_stmt_name
;
3848 switch (gimple_omp_target_kind (stmt
))
3850 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3851 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3852 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3853 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3854 stmt_name
= "target enter data"; break;
3855 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3856 stmt_name
= "target exit data"; break;
3857 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3858 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3859 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3860 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3861 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3862 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3863 stmt_name
= "enter data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3865 stmt_name
= "exit data"; break;
3866 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3867 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3869 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3870 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3871 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3872 /* OpenACC 'kernels' decomposed parts. */
3873 stmt_name
= "kernels"; break;
3874 default: gcc_unreachable ();
3876 switch (gimple_omp_target_kind (ctx
->stmt
))
3878 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3879 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3880 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3881 ctx_stmt_name
= "parallel"; break;
3882 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3883 ctx_stmt_name
= "kernels"; break;
3884 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3885 ctx_stmt_name
= "serial"; break;
3886 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3887 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3888 ctx_stmt_name
= "host_data"; break;
3889 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3890 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3891 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3892 /* OpenACC 'kernels' decomposed parts. */
3893 ctx_stmt_name
= "kernels"; break;
3894 default: gcc_unreachable ();
3897 /* OpenACC/OpenMP mismatch? */
3898 if (is_gimple_omp_oacc (stmt
)
3899 != is_gimple_omp_oacc (ctx
->stmt
))
3901 error_at (gimple_location (stmt
),
3902 "%s %qs construct inside of %s %qs region",
3903 (is_gimple_omp_oacc (stmt
)
3904 ? "OpenACC" : "OpenMP"), stmt_name
,
3905 (is_gimple_omp_oacc (ctx
->stmt
)
3906 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3909 if (is_gimple_omp_offloaded (ctx
->stmt
))
3911 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3912 if (is_gimple_omp_oacc (ctx
->stmt
))
3914 error_at (gimple_location (stmt
),
3915 "%qs construct inside of %qs region",
3916 stmt_name
, ctx_stmt_name
);
3921 if ((gimple_omp_target_kind (ctx
->stmt
)
3922 == GF_OMP_TARGET_KIND_REGION
)
3923 && (gimple_omp_target_kind (stmt
)
3924 == GF_OMP_TARGET_KIND_REGION
))
3926 c
= omp_find_clause (gimple_omp_target_clauses (stmt
),
3928 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3931 warning_at (gimple_location (stmt
), 0,
3932 "%qs construct inside of %qs region",
3933 stmt_name
, ctx_stmt_name
);
3945 /* Helper function scan_omp.
3947 Callback for walk_tree or operators in walk_gimple_stmt used to
3948 scan for OMP directives in TP. */
3951 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3953 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3954 omp_context
*ctx
= (omp_context
*) wi
->info
;
3958 switch (TREE_CODE (t
))
3967 if (TREE_CODE (t
) == VAR_DECL
3968 && (tmp
= lookup_attribute ("omp allocate var",
3969 DECL_ATTRIBUTES (t
))) != NULL_TREE
)
3970 t
= TREE_VALUE (TREE_VALUE (tmp
));
3971 tree repl
= remap_decl (t
, &ctx
->cb
);
3972 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3973 if (tmp
!= NULL_TREE
&& t
!= repl
)
3974 *tp
= build_fold_addr_expr (repl
);
3975 else if (tmp
== NULL_TREE
)
3983 && TREE_CODE (TREE_OPERAND (t
, 0)) == VAR_DECL
3984 && ((tmp
= lookup_attribute ("omp allocate var",
3985 DECL_ATTRIBUTES (TREE_OPERAND (t
, 0))))
3988 tmp
= TREE_VALUE (TREE_VALUE (tmp
));
3989 tree repl
= remap_decl (tmp
, &ctx
->cb
);
3990 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3998 if (ctx
&& TYPE_P (t
))
3999 *tp
= remap_type (t
, &ctx
->cb
);
4000 else if (!DECL_P (t
))
4005 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
4006 if (tem
!= TREE_TYPE (t
))
4008 if (TREE_CODE (t
) == INTEGER_CST
)
4009 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
4011 TREE_TYPE (t
) = tem
;
4021 /* Return true if FNDECL is a setjmp or a longjmp. */
4024 setjmp_or_longjmp_p (const_tree fndecl
)
4026 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
, BUILT_IN_LONGJMP
))
4029 tree declname
= DECL_NAME (fndecl
);
4031 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4032 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4033 || !TREE_PUBLIC (fndecl
))
4036 const char *name
= IDENTIFIER_POINTER (declname
);
4037 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
4040 /* Helper function for scan_omp.
4042 Callback for walk_gimple_stmt used to scan for OMP directives in
4043 the current statement in GSI. */
4046 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4047 struct walk_stmt_info
*wi
)
4049 gimple
*stmt
= gsi_stmt (*gsi
);
4050 omp_context
*ctx
= (omp_context
*) wi
->info
;
4052 if (gimple_has_location (stmt
))
4053 input_location
= gimple_location (stmt
);
4055 /* Check the nesting restrictions. */
4056 bool remove
= false;
4057 if (is_gimple_omp (stmt
))
4058 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4059 else if (is_gimple_call (stmt
))
4061 tree fndecl
= gimple_call_fndecl (stmt
);
4065 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4066 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4067 && setjmp_or_longjmp_p (fndecl
)
4071 error_at (gimple_location (stmt
),
4072 "setjmp/longjmp inside %<simd%> construct");
4074 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4075 switch (DECL_FUNCTION_CODE (fndecl
))
4077 case BUILT_IN_GOMP_BARRIER
:
4078 case BUILT_IN_GOMP_CANCEL
:
4079 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4080 case BUILT_IN_GOMP_TASKYIELD
:
4081 case BUILT_IN_GOMP_TASKWAIT
:
4082 case BUILT_IN_GOMP_TASKGROUP_START
:
4083 case BUILT_IN_GOMP_TASKGROUP_END
:
4084 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4091 omp_context
*octx
= ctx
;
4092 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4094 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4097 error_at (gimple_location (stmt
),
4098 "OpenMP runtime API call %qD in a region with "
4099 "%<order(concurrent)%> clause", fndecl
);
4101 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4102 && omp_runtime_api_call (fndecl
)
4103 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4104 != strlen ("omp_get_num_teams"))
4105 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4106 "omp_get_num_teams") != 0)
4107 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4108 != strlen ("omp_get_team_num"))
4109 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4110 "omp_get_team_num") != 0))
4113 error_at (gimple_location (stmt
),
4114 "OpenMP runtime API call %qD strictly nested in a "
4115 "%<teams%> region", fndecl
);
4117 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4118 && (gimple_omp_target_kind (ctx
->stmt
)
4119 == GF_OMP_TARGET_KIND_REGION
)
4120 && omp_runtime_api_call (fndecl
))
4122 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4123 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4124 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4125 error_at (gimple_location (stmt
),
4126 "OpenMP runtime API call %qD in a region with "
4127 "%<device(ancestor)%> clause", fndecl
);
4134 stmt
= gimple_build_nop ();
4135 gsi_replace (gsi
, stmt
, false);
4138 *handled_ops_p
= true;
4140 switch (gimple_code (stmt
))
4142 case GIMPLE_OMP_PARALLEL
:
4143 taskreg_nesting_level
++;
4144 scan_omp_parallel (gsi
, ctx
);
4145 taskreg_nesting_level
--;
4148 case GIMPLE_OMP_TASK
:
4149 taskreg_nesting_level
++;
4150 scan_omp_task (gsi
, ctx
);
4151 taskreg_nesting_level
--;
4154 case GIMPLE_OMP_FOR
:
4155 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4156 == GF_OMP_FOR_KIND_SIMD
)
4157 && gimple_omp_for_combined_into_p (stmt
)
4158 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4160 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4161 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4162 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4164 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4168 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4169 == GF_OMP_FOR_KIND_SIMD
)
4170 && omp_maybe_offloaded_ctx (ctx
)
4171 && omp_max_simt_vf ()
4172 && gimple_omp_for_collapse (stmt
) == 1)
4173 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4175 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4178 case GIMPLE_OMP_SCOPE
:
4179 ctx
= new_omp_context (stmt
, ctx
);
4180 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4181 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4184 case GIMPLE_OMP_SECTIONS
:
4185 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4188 case GIMPLE_OMP_SINGLE
:
4189 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4192 case GIMPLE_OMP_SCAN
:
4193 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4195 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4196 ctx
->scan_inclusive
= true;
4197 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4198 ctx
->scan_exclusive
= true;
4201 case GIMPLE_OMP_SECTION
:
4202 case GIMPLE_OMP_STRUCTURED_BLOCK
:
4203 case GIMPLE_OMP_MASTER
:
4204 case GIMPLE_OMP_ORDERED
:
4205 case GIMPLE_OMP_CRITICAL
:
4206 ctx
= new_omp_context (stmt
, ctx
);
4207 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4210 case GIMPLE_OMP_MASKED
:
4211 ctx
= new_omp_context (stmt
, ctx
);
4212 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4213 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4216 case GIMPLE_OMP_TASKGROUP
:
4217 ctx
= new_omp_context (stmt
, ctx
);
4218 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4219 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4222 case GIMPLE_OMP_TARGET
:
4223 if (is_gimple_omp_offloaded (stmt
))
4225 taskreg_nesting_level
++;
4226 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4227 taskreg_nesting_level
--;
4230 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4233 case GIMPLE_OMP_TEAMS
:
4234 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4236 taskreg_nesting_level
++;
4237 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4238 taskreg_nesting_level
--;
4241 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4248 *handled_ops_p
= false;
4250 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4252 var
= DECL_CHAIN (var
))
4253 insert_decl_map (&ctx
->cb
, var
, var
);
4257 *handled_ops_p
= false;
4265 /* Scan all the statements starting at the current statement. CTX
4266 contains context information about the OMP directives and
4267 clauses found during the scan. */
4270 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4272 location_t saved_location
;
4273 struct walk_stmt_info wi
;
4275 memset (&wi
, 0, sizeof (wi
));
4277 wi
.want_locations
= true;
4279 saved_location
= input_location
;
4280 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4281 input_location
= saved_location
;
4284 /* Re-gimplification and code generation routines. */
4286 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4287 of BIND if in a method. */
4290 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4292 if (DECL_ARGUMENTS (current_function_decl
)
4293 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4294 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4297 tree vars
= gimple_bind_vars (bind
);
4298 for (tree
*pvar
= &vars
; *pvar
; )
4299 if (omp_member_access_dummy_var (*pvar
))
4300 *pvar
= DECL_CHAIN (*pvar
);
4302 pvar
= &DECL_CHAIN (*pvar
);
4303 gimple_bind_set_vars (bind
, vars
);
4307 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4308 block and its subblocks. */
4311 remove_member_access_dummy_vars (tree block
)
4313 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4314 if (omp_member_access_dummy_var (*pvar
))
4315 *pvar
= DECL_CHAIN (*pvar
);
4317 pvar
= &DECL_CHAIN (*pvar
);
4319 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4320 remove_member_access_dummy_vars (block
);
4323 /* If a context was created for STMT when it was scanned, return it. */
4325 static omp_context
*
4326 maybe_lookup_ctx (gimple
*stmt
)
4329 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4330 return n
? (omp_context
*) n
->value
: NULL
;
4334 /* Find the mapping for DECL in CTX or the immediately enclosing
4335 context that has a mapping for DECL.
4337 If CTX is a nested parallel directive, we may have to use the decl
4338 mappings created in CTX's parent context. Suppose that we have the
4339 following parallel nesting (variable UIDs showed for clarity):
4342 #omp parallel shared(iD.1562) -> outer parallel
4343 iD.1562 = iD.1562 + 1;
4345 #omp parallel shared (iD.1562) -> inner parallel
4346 iD.1562 = iD.1562 - 1;
4348 Each parallel structure will create a distinct .omp_data_s structure
4349 for copying iD.1562 in/out of the directive:
4351 outer parallel .omp_data_s.1.i -> iD.1562
4352 inner parallel .omp_data_s.2.i -> iD.1562
4354 A shared variable mapping will produce a copy-out operation before
4355 the parallel directive and a copy-in operation after it. So, in
4356 this case we would have:
4359 .omp_data_o.1.i = iD.1562;
4360 #omp parallel shared(iD.1562) -> outer parallel
4361 .omp_data_i.1 = &.omp_data_o.1
4362 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4364 .omp_data_o.2.i = iD.1562; -> **
4365 #omp parallel shared(iD.1562) -> inner parallel
4366 .omp_data_i.2 = &.omp_data_o.2
4367 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4370 ** This is a problem. The symbol iD.1562 cannot be referenced
4371 inside the body of the outer parallel region. But since we are
4372 emitting this copy operation while expanding the inner parallel
4373 directive, we need to access the CTX structure of the outer
4374 parallel directive to get the correct mapping:
4376 .omp_data_o.2.i = .omp_data_i.1->i
4378 Since there may be other workshare or parallel directives enclosing
4379 the parallel directive, it may be necessary to walk up the context
4380 parent chain. This is not a problem in general because nested
4381 parallelism happens only rarely. */
4384 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4389 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4390 t
= maybe_lookup_decl (decl
, up
);
4392 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4394 return t
? t
: decl
;
4398 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4399 in outer contexts. */
4402 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4407 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4408 t
= maybe_lookup_decl (decl
, up
);
4410 return t
? t
: decl
;
4414 /* Construct the initialization value for reduction operation OP. */
4417 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4426 case TRUTH_ORIF_EXPR
:
4427 case TRUTH_XOR_EXPR
:
4429 return build_zero_cst (type
);
4432 case TRUTH_AND_EXPR
:
4433 case TRUTH_ANDIF_EXPR
:
4435 return fold_convert_loc (loc
, type
, integer_one_node
);
4438 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4441 if (SCALAR_FLOAT_TYPE_P (type
))
4443 REAL_VALUE_TYPE min
;
4444 if (HONOR_INFINITIES (type
))
4445 real_arithmetic (&min
, NEGATE_EXPR
, &dconstinf
, NULL
);
4447 real_maxval (&min
, 1, TYPE_MODE (type
));
4448 return build_real (type
, min
);
4450 else if (POINTER_TYPE_P (type
))
4453 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4454 return wide_int_to_tree (type
, min
);
4458 gcc_assert (INTEGRAL_TYPE_P (type
));
4459 return TYPE_MIN_VALUE (type
);
4463 if (SCALAR_FLOAT_TYPE_P (type
))
4465 REAL_VALUE_TYPE max
;
4466 if (HONOR_INFINITIES (type
))
4469 real_maxval (&max
, 0, TYPE_MODE (type
));
4470 return build_real (type
, max
);
4472 else if (POINTER_TYPE_P (type
))
4475 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4476 return wide_int_to_tree (type
, max
);
4480 gcc_assert (INTEGRAL_TYPE_P (type
));
4481 return TYPE_MAX_VALUE (type
);
4489 /* Construct the initialization value for reduction CLAUSE. */
4492 omp_reduction_init (tree clause
, tree type
)
4494 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4495 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4498 /* Return alignment to be assumed for var in CLAUSE, which should be
4499 OMP_CLAUSE_ALIGNED. */
4502 omp_clause_aligned_alignment (tree clause
)
4504 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4505 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4507 /* Otherwise return implementation defined alignment. */
4508 unsigned int al
= 1;
4509 opt_scalar_mode mode_iter
;
4510 auto_vector_modes modes
;
4511 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4512 static enum mode_class classes
[]
4513 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4514 for (int i
= 0; i
< 4; i
+= 2)
4515 /* The for loop above dictates that we only walk through scalar classes. */
4516 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4518 scalar_mode mode
= mode_iter
.require ();
4519 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4520 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4522 machine_mode alt_vmode
;
4523 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4524 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4525 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4528 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4529 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4531 type
= build_vector_type_for_mode (type
, vmode
);
4532 if (TYPE_MODE (type
) != vmode
)
4534 if (TYPE_ALIGN_UNIT (type
) > al
)
4535 al
= TYPE_ALIGN_UNIT (type
);
4537 return build_int_cst (integer_type_node
, al
);
4541 /* This structure is part of the interface between lower_rec_simd_input_clauses
4542 and lower_rec_input_clauses. */
4544 class omplow_simd_context
{
4546 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4550 vec
<tree
, va_heap
> simt_eargs
;
4551 gimple_seq simt_dlist
;
4556 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4560 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4561 omplow_simd_context
*sctx
, tree
&ivar
,
4562 tree
&lvar
, tree
*rvar
= NULL
,
4565 if (known_eq (sctx
->max_vf
, 0U))
4567 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4568 if (maybe_gt (sctx
->max_vf
, 1U))
4570 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4571 OMP_CLAUSE_SAFELEN
);
4574 poly_uint64 safe_len
;
4575 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4576 || maybe_lt (safe_len
, 1U))
4579 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4582 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4584 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4585 c
= OMP_CLAUSE_CHAIN (c
))
4587 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4590 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4592 /* UDR reductions are not supported yet for SIMT, disable
4598 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4599 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4601 /* Doing boolean operations on non-integral types is
4602 for conformance only, it's not worth supporting this
4609 if (maybe_gt (sctx
->max_vf
, 1U))
4611 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4612 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4615 if (known_eq (sctx
->max_vf
, 1U))
4620 if (is_gimple_reg (new_var
))
4622 ivar
= lvar
= new_var
;
4625 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4626 ivar
= lvar
= create_tmp_var (type
);
4627 TREE_ADDRESSABLE (ivar
) = 1;
4628 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4629 NULL
, DECL_ATTRIBUTES (ivar
));
4630 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4631 tree clobber
= build_clobber (type
);
4632 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4633 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4637 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4638 tree avar
= create_tmp_var_raw (atype
);
4639 if (TREE_ADDRESSABLE (new_var
))
4640 TREE_ADDRESSABLE (avar
) = 1;
4641 DECL_ATTRIBUTES (avar
)
4642 = tree_cons (get_identifier ("omp simd array"), NULL
,
4643 DECL_ATTRIBUTES (avar
));
4644 gimple_add_tmp_var (avar
);
4646 if (rvar
&& !ctx
->for_simd_scan_phase
)
4648 /* For inscan reductions, create another array temporary,
4649 which will hold the reduced value. */
4650 iavar
= create_tmp_var_raw (atype
);
4651 if (TREE_ADDRESSABLE (new_var
))
4652 TREE_ADDRESSABLE (iavar
) = 1;
4653 DECL_ATTRIBUTES (iavar
)
4654 = tree_cons (get_identifier ("omp simd array"), NULL
,
4655 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4656 DECL_ATTRIBUTES (iavar
)));
4657 gimple_add_tmp_var (iavar
);
4658 ctx
->cb
.decl_map
->put (avar
, iavar
);
4659 if (sctx
->lastlane
== NULL_TREE
)
4660 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4661 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4662 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4663 TREE_THIS_NOTRAP (*rvar
) = 1;
4665 if (ctx
->scan_exclusive
)
4667 /* And for exclusive scan yet another one, which will
4668 hold the value during the scan phase. */
4669 tree savar
= create_tmp_var_raw (atype
);
4670 if (TREE_ADDRESSABLE (new_var
))
4671 TREE_ADDRESSABLE (savar
) = 1;
4672 DECL_ATTRIBUTES (savar
)
4673 = tree_cons (get_identifier ("omp simd array"), NULL
,
4674 tree_cons (get_identifier ("omp simd inscan "
4676 DECL_ATTRIBUTES (savar
)));
4677 gimple_add_tmp_var (savar
);
4678 ctx
->cb
.decl_map
->put (iavar
, savar
);
4679 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4680 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4681 TREE_THIS_NOTRAP (*rvar2
) = 1;
4684 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4685 NULL_TREE
, NULL_TREE
);
4686 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4687 NULL_TREE
, NULL_TREE
);
4688 TREE_THIS_NOTRAP (ivar
) = 1;
4689 TREE_THIS_NOTRAP (lvar
) = 1;
4691 if (DECL_P (new_var
))
4693 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4694 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4699 /* Helper function of lower_rec_input_clauses. For a reference
4700 in simd reduction, add an underlying variable it will reference. */
4703 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4705 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4706 if (TREE_CONSTANT (z
))
4708 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4709 get_name (new_vard
));
4710 gimple_add_tmp_var (z
);
4711 TREE_ADDRESSABLE (z
) = 1;
4712 z
= build_fold_addr_expr_loc (loc
, z
);
4713 gimplify_assign (new_vard
, z
, ilist
);
4717 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4718 code to emit (type) (tskred_temp[idx]). */
4721 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4724 unsigned HOST_WIDE_INT sz
4725 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4726 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4727 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4729 tree v
= create_tmp_var (pointer_sized_int_node
);
4730 gimple
*g
= gimple_build_assign (v
, r
);
4731 gimple_seq_add_stmt (ilist
, g
);
4732 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4734 v
= create_tmp_var (type
);
4735 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4736 gimple_seq_add_stmt (ilist
, g
);
4741 /* Lower early initialization of privatized variable NEW_VAR
4742 if it needs an allocator (has allocate clause). */
4745 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4746 tree
&allocate_ptr
, gimple_seq
*ilist
,
4747 omp_context
*ctx
, bool is_ref
, tree size
)
4751 gcc_assert (allocate_ptr
== NULL_TREE
);
4752 if (ctx
->allocate_map
4753 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4754 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4755 allocator
= *allocatorp
;
4756 if (allocator
== NULL_TREE
)
4758 if (!is_ref
&& omp_privatize_by_reference (var
))
4760 allocator
= NULL_TREE
;
4764 unsigned HOST_WIDE_INT ialign
= 0;
4765 if (TREE_CODE (allocator
) == TREE_LIST
)
4767 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4768 allocator
= TREE_PURPOSE (allocator
);
4770 if (TREE_CODE (allocator
) != INTEGER_CST
)
4771 allocator
= build_outer_var_ref (allocator
, ctx
, OMP_CLAUSE_ALLOCATE
);
4772 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4773 if (TREE_CODE (allocator
) != INTEGER_CST
)
4775 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4776 gimplify_assign (var
, allocator
, ilist
);
4780 tree ptr_type
, align
, sz
= size
;
4781 if (TYPE_P (new_var
))
4783 ptr_type
= build_pointer_type (new_var
);
4784 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4788 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4789 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4793 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4794 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4795 if (sz
== NULL_TREE
)
4796 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4798 align
= build_int_cst (size_type_node
, ialign
);
4799 if (TREE_CODE (sz
) != INTEGER_CST
)
4801 tree szvar
= create_tmp_var (size_type_node
);
4802 gimplify_assign (szvar
, sz
, ilist
);
4805 allocate_ptr
= create_tmp_var (ptr_type
);
4806 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4807 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4808 gimple_call_set_lhs (g
, allocate_ptr
);
4809 gimple_seq_add_stmt (ilist
, g
);
4812 tree x
= build_simple_mem_ref (allocate_ptr
);
4813 TREE_THIS_NOTRAP (x
) = 1;
4814 SET_DECL_VALUE_EXPR (new_var
, x
);
4815 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4820 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4821 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4822 private variables. Initialization statements go in ILIST, while calls
4823 to destructors go in DLIST. */
4826 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4827 omp_context
*ctx
, struct omp_for_data
*fd
)
4829 tree c
, copyin_seq
, x
, ptr
;
4830 bool copyin_by_ref
= false;
4831 bool lastprivate_firstprivate
= false;
4832 bool reduction_omp_orig_ref
= false;
4834 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4835 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4836 omplow_simd_context sctx
= omplow_simd_context ();
4837 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4838 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4839 gimple_seq llist
[4] = { };
4840 tree nonconst_simd_if
= NULL_TREE
;
4843 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4845 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4846 with data sharing clauses referencing variable sized vars. That
4847 is unnecessarily hard to support and very unlikely to result in
4848 vectorized code anyway. */
4850 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4851 switch (OMP_CLAUSE_CODE (c
))
4853 case OMP_CLAUSE_LINEAR
:
4854 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4857 case OMP_CLAUSE_PRIVATE
:
4858 case OMP_CLAUSE_FIRSTPRIVATE
:
4859 case OMP_CLAUSE_LASTPRIVATE
:
4860 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4862 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4864 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4865 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4869 case OMP_CLAUSE_REDUCTION
:
4870 case OMP_CLAUSE_IN_REDUCTION
:
4871 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4872 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4874 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4876 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4877 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4882 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4884 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4885 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4887 case OMP_CLAUSE_SIMDLEN
:
4888 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4891 case OMP_CLAUSE__CONDTEMP_
:
4892 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4900 /* Add a placeholder for simduid. */
4901 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4902 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4904 unsigned task_reduction_cnt
= 0;
4905 unsigned task_reduction_cntorig
= 0;
4906 unsigned task_reduction_cnt_full
= 0;
4907 unsigned task_reduction_cntorig_full
= 0;
4908 unsigned task_reduction_other_cnt
= 0;
4909 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4910 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4911 /* Do all the fixed sized types in the first pass, and the variable sized
4912 types in the second pass. This makes sure that the scalar arguments to
4913 the variable sized types are processed before we use them in the
4914 variable sized operations. For task reductions we use 4 passes, in the
4915 first two we ignore them, in the third one gather arguments for
4916 GOMP_task_reduction_remap call and in the last pass actually handle
4917 the task reductions. */
4918 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4921 if (pass
== 2 && task_reduction_cnt
)
4924 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4925 + task_reduction_cntorig
);
4926 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4927 gimple_add_tmp_var (tskred_avar
);
4928 TREE_ADDRESSABLE (tskred_avar
) = 1;
4929 task_reduction_cnt_full
= task_reduction_cnt
;
4930 task_reduction_cntorig_full
= task_reduction_cntorig
;
4932 else if (pass
== 3 && task_reduction_cnt
)
4934 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4936 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4937 size_int (task_reduction_cntorig
),
4938 build_fold_addr_expr (tskred_avar
));
4939 gimple_seq_add_stmt (ilist
, g
);
4941 if (pass
== 3 && task_reduction_other_cnt
)
4943 /* For reduction clauses, build
4944 tskred_base = (void *) tskred_temp[2]
4945 + omp_get_thread_num () * tskred_temp[1]
4946 or if tskred_temp[1] is known to be constant, that constant
4947 directly. This is the start of the private reduction copy block
4948 for the current thread. */
4949 tree v
= create_tmp_var (integer_type_node
);
4950 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4951 gimple
*g
= gimple_build_call (x
, 0);
4952 gimple_call_set_lhs (g
, v
);
4953 gimple_seq_add_stmt (ilist
, g
);
4954 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4955 tskred_temp
= OMP_CLAUSE_DECL (c
);
4956 if (is_taskreg_ctx (ctx
))
4957 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4958 tree v2
= create_tmp_var (sizetype
);
4959 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4960 gimple_seq_add_stmt (ilist
, g
);
4961 if (ctx
->task_reductions
[0])
4962 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4964 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4965 tree v3
= create_tmp_var (sizetype
);
4966 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4967 gimple_seq_add_stmt (ilist
, g
);
4968 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4969 tskred_base
= create_tmp_var (ptr_type_node
);
4970 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4971 gimple_seq_add_stmt (ilist
, g
);
4973 task_reduction_cnt
= 0;
4974 task_reduction_cntorig
= 0;
4975 task_reduction_other_cnt
= 0;
4976 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4978 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4981 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4982 bool task_reduction_p
= false;
4983 bool task_reduction_needs_orig_p
= false;
4984 tree cond
= NULL_TREE
;
4985 tree allocator
, allocate_ptr
;
4989 case OMP_CLAUSE_PRIVATE
:
4990 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4993 case OMP_CLAUSE_SHARED
:
4994 /* Ignore shared directives in teams construct inside
4995 of target construct. */
4996 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4997 && !is_host_teams_ctx (ctx
))
4999 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
5001 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
5002 || is_global_var (OMP_CLAUSE_DECL (c
)));
5005 case OMP_CLAUSE_FIRSTPRIVATE
:
5006 case OMP_CLAUSE_COPYIN
:
5008 case OMP_CLAUSE_LINEAR
:
5009 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5010 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5011 lastprivate_firstprivate
= true;
5013 case OMP_CLAUSE_REDUCTION
:
5014 case OMP_CLAUSE_IN_REDUCTION
:
5015 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5016 || is_task_ctx (ctx
)
5017 || OMP_CLAUSE_REDUCTION_TASK (c
))
5019 task_reduction_p
= true;
5020 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5022 task_reduction_other_cnt
++;
5027 task_reduction_cnt
++;
5028 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5030 var
= OMP_CLAUSE_DECL (c
);
5031 /* If var is a global variable that isn't privatized
5032 in outer contexts, we don't need to look up the
5033 original address, it is always the address of the
5034 global variable itself. */
5036 || omp_privatize_by_reference (var
)
5038 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5040 task_reduction_needs_orig_p
= true;
5041 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5042 task_reduction_cntorig
++;
5046 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5047 reduction_omp_orig_ref
= true;
5049 case OMP_CLAUSE__REDUCTEMP_
:
5050 if (!is_taskreg_ctx (ctx
))
5053 case OMP_CLAUSE__LOOPTEMP_
:
5054 /* Handle _looptemp_/_reductemp_ clauses only on
5059 case OMP_CLAUSE_LASTPRIVATE
:
5060 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5062 lastprivate_firstprivate
= true;
5063 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5066 /* Even without corresponding firstprivate, if
5067 decl is Fortran allocatable, it needs outer var
5070 && lang_hooks
.decls
.omp_private_outer_ref
5071 (OMP_CLAUSE_DECL (c
)))
5072 lastprivate_firstprivate
= true;
5074 case OMP_CLAUSE_ALIGNED
:
5077 var
= OMP_CLAUSE_DECL (c
);
5078 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5079 && !is_global_var (var
))
5081 new_var
= maybe_lookup_decl (var
, ctx
);
5082 if (new_var
== NULL_TREE
)
5083 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5084 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5085 tree alarg
= omp_clause_aligned_alignment (c
);
5086 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5087 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5088 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5089 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5090 gimplify_and_add (x
, ilist
);
5092 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5093 && is_global_var (var
))
5095 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5096 new_var
= lookup_decl (var
, ctx
);
5097 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5098 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5099 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5100 tree alarg
= omp_clause_aligned_alignment (c
);
5101 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5102 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5103 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5104 x
= create_tmp_var (ptype
);
5105 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5106 gimplify_and_add (t
, ilist
);
5107 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5108 SET_DECL_VALUE_EXPR (new_var
, t
);
5109 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5112 case OMP_CLAUSE__CONDTEMP_
:
5113 if (is_parallel_ctx (ctx
)
5114 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5121 if (task_reduction_p
!= (pass
>= 2))
5124 allocator
= NULL_TREE
;
5125 allocate_ptr
= NULL_TREE
;
5126 new_var
= var
= OMP_CLAUSE_DECL (c
);
5127 if ((c_kind
== OMP_CLAUSE_REDUCTION
5128 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5129 && TREE_CODE (var
) == MEM_REF
)
5131 var
= TREE_OPERAND (var
, 0);
5132 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5133 var
= TREE_OPERAND (var
, 0);
5134 if (TREE_CODE (var
) == INDIRECT_REF
5135 || TREE_CODE (var
) == ADDR_EXPR
)
5136 var
= TREE_OPERAND (var
, 0);
5137 if (is_variable_sized (var
))
5139 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5140 var
= DECL_VALUE_EXPR (var
);
5141 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5142 var
= TREE_OPERAND (var
, 0);
5143 gcc_assert (DECL_P (var
));
5147 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5149 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5150 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5152 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5153 new_var
= lookup_decl (var
, ctx
);
5155 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5160 /* C/C++ array section reductions. */
5161 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5162 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5163 && var
!= OMP_CLAUSE_DECL (c
))
5168 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5169 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5171 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5173 tree b
= TREE_OPERAND (orig_var
, 1);
5174 if (is_omp_target (ctx
->stmt
))
5177 b
= maybe_lookup_decl (b
, ctx
);
5180 b
= TREE_OPERAND (orig_var
, 1);
5181 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5183 if (integer_zerop (bias
))
5187 bias
= fold_convert_loc (clause_loc
,
5188 TREE_TYPE (b
), bias
);
5189 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5190 TREE_TYPE (b
), b
, bias
);
5192 orig_var
= TREE_OPERAND (orig_var
, 0);
5196 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5197 if (is_global_var (out
)
5198 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5199 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5200 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5203 else if (is_omp_target (ctx
->stmt
))
5207 bool by_ref
= use_pointer_for_field (var
, NULL
);
5208 x
= build_receiver_ref (var
, by_ref
, ctx
);
5209 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5210 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5212 x
= build_fold_addr_expr (x
);
5214 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5215 x
= build_simple_mem_ref (x
);
5216 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5218 if (var
== TREE_OPERAND (orig_var
, 0))
5219 x
= build_fold_addr_expr (x
);
5221 bias
= fold_convert (sizetype
, bias
);
5222 x
= fold_convert (ptr_type_node
, x
);
5223 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5224 TREE_TYPE (x
), x
, bias
);
5225 unsigned cnt
= task_reduction_cnt
- 1;
5226 if (!task_reduction_needs_orig_p
)
5227 cnt
+= (task_reduction_cntorig_full
5228 - task_reduction_cntorig
);
5230 cnt
= task_reduction_cntorig
- 1;
5231 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5232 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5233 gimplify_assign (r
, x
, ilist
);
5237 if (TREE_CODE (orig_var
) == INDIRECT_REF
5238 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5239 orig_var
= TREE_OPERAND (orig_var
, 0);
5240 tree d
= OMP_CLAUSE_DECL (c
);
5241 tree type
= TREE_TYPE (d
);
5242 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5243 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5245 const char *name
= get_name (orig_var
);
5246 if (pass
!= 3 && !TREE_CONSTANT (v
))
5249 if (is_omp_target (ctx
->stmt
))
5252 t
= maybe_lookup_decl (v
, ctx
);
5256 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5257 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5258 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5260 build_int_cst (TREE_TYPE (v
), 1));
5261 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5263 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5267 tree xv
= create_tmp_var (ptr_type_node
);
5268 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5270 unsigned cnt
= task_reduction_cnt
- 1;
5271 if (!task_reduction_needs_orig_p
)
5272 cnt
+= (task_reduction_cntorig_full
5273 - task_reduction_cntorig
);
5275 cnt
= task_reduction_cntorig
- 1;
5276 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5277 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5279 gimple
*g
= gimple_build_assign (xv
, x
);
5280 gimple_seq_add_stmt (ilist
, g
);
5284 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5286 if (ctx
->task_reductions
[1 + idx
])
5287 off
= fold_convert (sizetype
,
5288 ctx
->task_reductions
[1 + idx
]);
5290 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5292 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5294 gimple_seq_add_stmt (ilist
, g
);
5296 x
= fold_convert (build_pointer_type (boolean_type_node
),
5298 if (TREE_CONSTANT (v
))
5299 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5300 TYPE_SIZE_UNIT (type
));
5304 if (is_omp_target (ctx
->stmt
))
5307 t
= maybe_lookup_decl (v
, ctx
);
5311 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5312 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5314 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5316 build_int_cst (TREE_TYPE (v
), 1));
5317 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5319 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5320 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5322 cond
= create_tmp_var (TREE_TYPE (x
));
5323 gimplify_assign (cond
, x
, ilist
);
5326 else if (lower_private_allocate (var
, type
, allocator
,
5327 allocate_ptr
, ilist
, ctx
,
5330 ? TYPE_SIZE_UNIT (type
)
5333 else if (TREE_CONSTANT (v
))
5335 x
= create_tmp_var_raw (type
, name
);
5336 gimple_add_tmp_var (x
);
5337 TREE_ADDRESSABLE (x
) = 1;
5338 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5343 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5344 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5345 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5348 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5349 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5350 tree y
= create_tmp_var (ptype
, name
);
5351 gimplify_assign (y
, x
, ilist
);
5355 if (!integer_zerop (bias
))
5357 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5359 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5361 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5362 pointer_sized_int_node
, yb
, bias
);
5363 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5364 yb
= create_tmp_var (ptype
, name
);
5365 gimplify_assign (yb
, x
, ilist
);
5369 d
= TREE_OPERAND (d
, 0);
5370 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5371 d
= TREE_OPERAND (d
, 0);
5372 if (TREE_CODE (d
) == ADDR_EXPR
)
5374 if (orig_var
!= var
)
5376 gcc_assert (is_variable_sized (orig_var
));
5377 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5379 gimplify_assign (new_var
, x
, ilist
);
5380 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5381 tree t
= build_fold_indirect_ref (new_var
);
5382 DECL_IGNORED_P (new_var
) = 0;
5383 TREE_THIS_NOTRAP (t
) = 1;
5384 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5385 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5389 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5390 build_int_cst (ptype
, 0));
5391 SET_DECL_VALUE_EXPR (new_var
, x
);
5392 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5397 gcc_assert (orig_var
== var
);
5398 if (TREE_CODE (d
) == INDIRECT_REF
)
5400 x
= create_tmp_var (ptype
, name
);
5401 TREE_ADDRESSABLE (x
) = 1;
5402 gimplify_assign (x
, yb
, ilist
);
5403 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5405 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5406 gimplify_assign (new_var
, x
, ilist
);
5408 /* GOMP_taskgroup_reduction_register memsets the whole
5409 array to zero. If the initializer is zero, we don't
5410 need to initialize it again, just mark it as ever
5411 used unconditionally, i.e. cond = true. */
5413 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5414 && initializer_zerop (omp_reduction_init (c
,
5417 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5419 gimple_seq_add_stmt (ilist
, g
);
5422 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5426 if (!is_parallel_ctx (ctx
))
5428 tree condv
= create_tmp_var (boolean_type_node
);
5429 g
= gimple_build_assign (condv
,
5430 build_simple_mem_ref (cond
));
5431 gimple_seq_add_stmt (ilist
, g
);
5432 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5433 g
= gimple_build_cond (NE_EXPR
, condv
,
5434 boolean_false_node
, end
, lab1
);
5435 gimple_seq_add_stmt (ilist
, g
);
5436 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5438 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5440 gimple_seq_add_stmt (ilist
, g
);
5443 tree y1
= create_tmp_var (ptype
);
5444 gimplify_assign (y1
, y
, ilist
);
5445 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5446 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5447 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5448 if (task_reduction_needs_orig_p
)
5450 y3
= create_tmp_var (ptype
);
5452 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5453 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5454 size_int (task_reduction_cnt_full
5455 + task_reduction_cntorig
- 1),
5456 NULL_TREE
, NULL_TREE
);
5459 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5460 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5463 gimplify_assign (y3
, ref
, ilist
);
5465 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5469 y2
= create_tmp_var (ptype
);
5470 gimplify_assign (y2
, y
, ilist
);
5472 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5474 tree ref
= build_outer_var_ref (var
, ctx
);
5475 /* For ref build_outer_var_ref already performs this. */
5476 if (TREE_CODE (d
) == INDIRECT_REF
)
5477 gcc_assert (omp_privatize_by_reference (var
));
5478 else if (TREE_CODE (d
) == ADDR_EXPR
)
5479 ref
= build_fold_addr_expr (ref
);
5480 else if (omp_privatize_by_reference (var
))
5481 ref
= build_fold_addr_expr (ref
);
5482 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5483 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5484 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5486 y3
= create_tmp_var (ptype
);
5487 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5491 y4
= create_tmp_var (ptype
);
5492 gimplify_assign (y4
, ref
, dlist
);
5496 tree i
= create_tmp_var (TREE_TYPE (v
));
5497 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5498 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5499 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5502 i2
= create_tmp_var (TREE_TYPE (v
));
5503 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5504 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5505 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5506 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5508 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5510 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5511 tree decl_placeholder
5512 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5513 SET_DECL_VALUE_EXPR (decl_placeholder
,
5514 build_simple_mem_ref (y1
));
5515 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5516 SET_DECL_VALUE_EXPR (placeholder
,
5517 y3
? build_simple_mem_ref (y3
)
5519 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5520 x
= lang_hooks
.decls
.omp_clause_default_ctor
5521 (c
, build_simple_mem_ref (y1
),
5522 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5524 gimplify_and_add (x
, ilist
);
5525 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5527 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5528 lower_omp (&tseq
, ctx
);
5529 gimple_seq_add_seq (ilist
, tseq
);
5531 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5534 SET_DECL_VALUE_EXPR (decl_placeholder
,
5535 build_simple_mem_ref (y2
));
5536 SET_DECL_VALUE_EXPR (placeholder
,
5537 build_simple_mem_ref (y4
));
5538 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5539 lower_omp (&tseq
, ctx
);
5540 gimple_seq_add_seq (dlist
, tseq
);
5541 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5543 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5544 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5547 x
= lang_hooks
.decls
.omp_clause_dtor
5548 (c
, build_simple_mem_ref (y2
));
5550 gimplify_and_add (x
, dlist
);
5555 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5556 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5558 /* reduction(-:var) sums up the partial results, so it
5559 acts identically to reduction(+:var). */
5560 if (code
== MINUS_EXPR
)
5563 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5566 x
= build2 (code
, TREE_TYPE (type
),
5567 build_simple_mem_ref (y4
),
5568 build_simple_mem_ref (y2
));
5569 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5573 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5574 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5575 gimple_seq_add_stmt (ilist
, g
);
5578 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5579 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5580 gimple_seq_add_stmt (ilist
, g
);
5582 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5583 build_int_cst (TREE_TYPE (i
), 1));
5584 gimple_seq_add_stmt (ilist
, g
);
5585 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5586 gimple_seq_add_stmt (ilist
, g
);
5587 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5590 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5591 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5592 gimple_seq_add_stmt (dlist
, g
);
5595 g
= gimple_build_assign
5596 (y4
, POINTER_PLUS_EXPR
, y4
,
5597 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5598 gimple_seq_add_stmt (dlist
, g
);
5600 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5601 build_int_cst (TREE_TYPE (i2
), 1));
5602 gimple_seq_add_stmt (dlist
, g
);
5603 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5604 gimple_seq_add_stmt (dlist
, g
);
5605 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5609 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5610 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5611 gimple_seq_add_stmt (dlist
, g
);
5617 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5618 if (is_global_var (out
))
5620 else if (is_omp_target (ctx
->stmt
))
5624 bool by_ref
= use_pointer_for_field (var
, ctx
);
5625 x
= build_receiver_ref (var
, by_ref
, ctx
);
5627 if (!omp_privatize_by_reference (var
))
5628 x
= build_fold_addr_expr (x
);
5629 x
= fold_convert (ptr_type_node
, x
);
5630 unsigned cnt
= task_reduction_cnt
- 1;
5631 if (!task_reduction_needs_orig_p
)
5632 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5634 cnt
= task_reduction_cntorig
- 1;
5635 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5636 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5637 gimplify_assign (r
, x
, ilist
);
5642 tree type
= TREE_TYPE (new_var
);
5643 if (!omp_privatize_by_reference (var
))
5644 type
= build_pointer_type (type
);
5645 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5647 unsigned cnt
= task_reduction_cnt
- 1;
5648 if (!task_reduction_needs_orig_p
)
5649 cnt
+= (task_reduction_cntorig_full
5650 - task_reduction_cntorig
);
5652 cnt
= task_reduction_cntorig
- 1;
5653 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5654 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5658 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5660 if (ctx
->task_reductions
[1 + idx
])
5661 off
= fold_convert (sizetype
,
5662 ctx
->task_reductions
[1 + idx
]);
5664 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5666 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5669 x
= fold_convert (type
, x
);
5671 if (omp_privatize_by_reference (var
))
5673 gimplify_assign (new_var
, x
, ilist
);
5675 new_var
= build_simple_mem_ref (new_var
);
5679 t
= create_tmp_var (type
);
5680 gimplify_assign (t
, x
, ilist
);
5681 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5682 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5684 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5685 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5686 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5687 cond
= create_tmp_var (TREE_TYPE (t
));
5688 gimplify_assign (cond
, t
, ilist
);
5690 else if (is_variable_sized (var
))
5692 /* For variable sized types, we need to allocate the
5693 actual storage here. Call alloca and store the
5694 result in the pointer decl that we created elsewhere. */
5698 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5702 ptr
= DECL_VALUE_EXPR (new_var
);
5703 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5704 ptr
= TREE_OPERAND (ptr
, 0);
5705 gcc_assert (DECL_P (ptr
));
5706 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5708 if (lower_private_allocate (var
, new_var
, allocator
,
5709 allocate_ptr
, ilist
, ctx
,
5714 /* void *tmp = __builtin_alloca */
5716 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5718 = gimple_build_call (atmp
, 2, x
,
5719 size_int (DECL_ALIGN (var
)));
5720 cfun
->calls_alloca
= 1;
5721 tmp
= create_tmp_var_raw (ptr_type_node
);
5722 gimple_add_tmp_var (tmp
);
5723 gimple_call_set_lhs (stmt
, tmp
);
5725 gimple_seq_add_stmt (ilist
, stmt
);
5728 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5729 gimplify_assign (ptr
, x
, ilist
);
5732 else if (omp_privatize_by_reference (var
)
5733 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5734 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5736 /* For references that are being privatized for Fortran,
5737 allocate new backing storage for the new pointer
5738 variable. This allows us to avoid changing all the
5739 code that expects a pointer to something that expects
5740 a direct variable. */
5744 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5745 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5747 x
= build_receiver_ref (var
, false, ctx
);
5748 if (ctx
->allocate_map
)
5749 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5751 allocator
= *allocatep
;
5752 if (TREE_CODE (allocator
) == TREE_LIST
)
5753 allocator
= TREE_PURPOSE (allocator
);
5754 if (TREE_CODE (allocator
) != INTEGER_CST
)
5755 allocator
= build_outer_var_ref (allocator
, ctx
);
5756 allocator
= fold_convert (pointer_sized_int_node
,
5758 allocate_ptr
= unshare_expr (x
);
5760 if (allocator
== NULL_TREE
)
5761 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5763 else if (lower_private_allocate (var
, new_var
, allocator
,
5765 ilist
, ctx
, true, x
))
5767 else if (TREE_CONSTANT (x
))
5769 /* For reduction in SIMD loop, defer adding the
5770 initialization of the reference, because if we decide
5771 to use SIMD array for it, the initilization could cause
5772 expansion ICE. Ditto for other privatization clauses. */
5777 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5779 gimple_add_tmp_var (x
);
5780 TREE_ADDRESSABLE (x
) = 1;
5781 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5787 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5788 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5789 tree al
= size_int (TYPE_ALIGN (rtype
));
5790 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5795 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5796 gimplify_assign (new_var
, x
, ilist
);
5799 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5801 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5802 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5803 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5811 switch (OMP_CLAUSE_CODE (c
))
5813 case OMP_CLAUSE_SHARED
:
5814 /* Ignore shared directives in teams construct inside
5815 target construct. */
5816 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5817 && !is_host_teams_ctx (ctx
))
5819 /* Shared global vars are just accessed directly. */
5820 if (is_global_var (new_var
))
5822 /* For taskloop firstprivate/lastprivate, represented
5823 as firstprivate and shared clause on the task, new_var
5824 is the firstprivate var. */
5825 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5827 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5828 needs to be delayed until after fixup_child_record_type so
5829 that we get the correct type during the dereference. */
5830 by_ref
= use_pointer_for_field (var
, ctx
);
5831 x
= build_receiver_ref (var
, by_ref
, ctx
);
5832 SET_DECL_VALUE_EXPR (new_var
, x
);
5833 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5835 /* ??? If VAR is not passed by reference, and the variable
5836 hasn't been initialized yet, then we'll get a warning for
5837 the store into the omp_data_s structure. Ideally, we'd be
5838 able to notice this and not store anything at all, but
5839 we're generating code too early. Suppress the warning. */
5841 suppress_warning (var
, OPT_Wuninitialized
);
5844 case OMP_CLAUSE__CONDTEMP_
:
5845 if (is_parallel_ctx (ctx
))
5847 x
= build_receiver_ref (var
, false, ctx
);
5848 SET_DECL_VALUE_EXPR (new_var
, x
);
5849 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5851 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5853 x
= build_zero_cst (TREE_TYPE (var
));
5858 case OMP_CLAUSE_LASTPRIVATE
:
5859 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5863 case OMP_CLAUSE_PRIVATE
:
5864 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5865 x
= build_outer_var_ref (var
, ctx
);
5866 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5868 if (is_task_ctx (ctx
))
5869 x
= build_receiver_ref (var
, false, ctx
);
5871 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5879 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5880 ilist
, ctx
, false, NULL_TREE
);
5881 nx
= unshare_expr (new_var
);
5883 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5884 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5887 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5889 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5892 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5893 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5894 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5895 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5896 || (gimple_omp_for_index (ctx
->stmt
, 0)
5898 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5899 || omp_privatize_by_reference (var
))
5900 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5903 if (omp_privatize_by_reference (var
))
5905 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5906 tree new_vard
= TREE_OPERAND (new_var
, 0);
5907 gcc_assert (DECL_P (new_vard
));
5908 SET_DECL_VALUE_EXPR (new_vard
,
5909 build_fold_addr_expr (lvar
));
5910 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5915 tree iv
= unshare_expr (ivar
);
5917 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5920 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5924 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5926 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5927 unshare_expr (ivar
), x
);
5931 gimplify_and_add (x
, &llist
[0]);
5932 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5933 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5938 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5939 v
= TREE_OPERAND (v
, 0);
5940 gcc_assert (DECL_P (v
));
5942 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5943 tree t
= create_tmp_var (TREE_TYPE (v
));
5944 tree z
= build_zero_cst (TREE_TYPE (v
));
5946 = build_outer_var_ref (var
, ctx
,
5947 OMP_CLAUSE_LASTPRIVATE
);
5948 gimple_seq_add_stmt (dlist
,
5949 gimple_build_assign (t
, z
));
5950 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5951 tree civar
= DECL_VALUE_EXPR (v
);
5952 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5953 civar
= unshare_expr (civar
);
5954 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5955 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5956 unshare_expr (civar
));
5957 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5958 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5959 orig_v
, unshare_expr (ivar
)));
5960 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5962 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5964 gimple_seq tseq
= NULL
;
5965 gimplify_and_add (x
, &tseq
);
5967 lower_omp (&tseq
, ctx
->outer
);
5968 gimple_seq_add_seq (&llist
[1], tseq
);
5970 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5971 && ctx
->for_simd_scan_phase
)
5973 x
= unshare_expr (ivar
);
5975 = build_outer_var_ref (var
, ctx
,
5976 OMP_CLAUSE_LASTPRIVATE
);
5977 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5979 gimplify_and_add (x
, &llist
[0]);
5983 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5985 gimplify_and_add (y
, &llist
[1]);
5989 if (omp_privatize_by_reference (var
))
5991 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5992 tree new_vard
= TREE_OPERAND (new_var
, 0);
5993 gcc_assert (DECL_P (new_vard
));
5994 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5995 x
= TYPE_SIZE_UNIT (type
);
5996 if (TREE_CONSTANT (x
))
5998 x
= create_tmp_var_raw (type
, get_name (var
));
5999 gimple_add_tmp_var (x
);
6000 TREE_ADDRESSABLE (x
) = 1;
6001 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6002 x
= fold_convert_loc (clause_loc
,
6003 TREE_TYPE (new_vard
), x
);
6004 gimplify_assign (new_vard
, x
, ilist
);
6009 gimplify_and_add (nx
, ilist
);
6010 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6012 && ctx
->for_simd_scan_phase
)
6014 tree orig_v
= build_outer_var_ref (var
, ctx
,
6015 OMP_CLAUSE_LASTPRIVATE
);
6016 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6018 gimplify_and_add (x
, ilist
);
6023 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6025 gimplify_and_add (x
, dlist
);
6028 if (!is_gimple_val (allocator
))
6030 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6031 gimplify_assign (avar
, allocator
, dlist
);
6034 if (!is_gimple_val (allocate_ptr
))
6036 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6037 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6038 allocate_ptr
= apvar
;
6040 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6042 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6043 gimple_seq_add_stmt (dlist
, g
);
6047 case OMP_CLAUSE_LINEAR
:
6048 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6049 goto do_firstprivate
;
6050 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6053 x
= build_outer_var_ref (var
, ctx
);
6056 case OMP_CLAUSE_FIRSTPRIVATE
:
6057 if (is_task_ctx (ctx
))
6059 if ((omp_privatize_by_reference (var
)
6060 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6061 || is_variable_sized (var
))
6063 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6065 || use_pointer_for_field (var
, NULL
))
6067 x
= build_receiver_ref (var
, false, ctx
);
6068 if (ctx
->allocate_map
)
6069 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6071 allocator
= *allocatep
;
6072 if (TREE_CODE (allocator
) == TREE_LIST
)
6073 allocator
= TREE_PURPOSE (allocator
);
6074 if (TREE_CODE (allocator
) != INTEGER_CST
)
6075 allocator
= build_outer_var_ref (allocator
, ctx
);
6076 allocator
= fold_convert (pointer_sized_int_node
,
6078 allocate_ptr
= unshare_expr (x
);
6079 x
= build_simple_mem_ref (x
);
6080 TREE_THIS_NOTRAP (x
) = 1;
6082 SET_DECL_VALUE_EXPR (new_var
, x
);
6083 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6087 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6088 && omp_privatize_by_reference (var
))
6090 x
= build_outer_var_ref (var
, ctx
);
6091 gcc_assert (TREE_CODE (x
) == MEM_REF
6092 && integer_zerop (TREE_OPERAND (x
, 1)));
6093 x
= TREE_OPERAND (x
, 0);
6094 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6095 (c
, unshare_expr (new_var
), x
);
6096 gimplify_and_add (x
, ilist
);
6100 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6101 ilist
, ctx
, false, NULL_TREE
);
6102 x
= build_outer_var_ref (var
, ctx
);
6105 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6106 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6108 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6110 t
= build_outer_var_ref (t
, ctx
);
6111 tree stept
= TREE_TYPE (t
);
6112 tree ct
= omp_find_clause (clauses
,
6113 OMP_CLAUSE__LOOPTEMP_
);
6115 tree l
= OMP_CLAUSE_DECL (ct
);
6116 tree n1
= fd
->loop
.n1
;
6117 tree step
= fd
->loop
.step
;
6118 tree itype
= TREE_TYPE (l
);
6119 if (POINTER_TYPE_P (itype
))
6120 itype
= signed_type_for (itype
);
6121 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6122 if (TYPE_UNSIGNED (itype
)
6123 && fd
->loop
.cond_code
== GT_EXPR
)
6124 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6125 fold_build1 (NEGATE_EXPR
, itype
, l
),
6126 fold_build1 (NEGATE_EXPR
,
6129 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6130 t
= fold_build2 (MULT_EXPR
, stept
,
6131 fold_convert (stept
, l
), t
);
6133 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6135 if (omp_privatize_by_reference (var
))
6137 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6138 tree new_vard
= TREE_OPERAND (new_var
, 0);
6139 gcc_assert (DECL_P (new_vard
));
6140 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6141 nx
= TYPE_SIZE_UNIT (type
);
6142 if (TREE_CONSTANT (nx
))
6144 nx
= create_tmp_var_raw (type
,
6146 gimple_add_tmp_var (nx
);
6147 TREE_ADDRESSABLE (nx
) = 1;
6148 nx
= build_fold_addr_expr_loc (clause_loc
,
6150 nx
= fold_convert_loc (clause_loc
,
6151 TREE_TYPE (new_vard
),
6153 gimplify_assign (new_vard
, nx
, ilist
);
6157 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6159 gimplify_and_add (x
, ilist
);
6163 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6164 x
= fold_build_pointer_plus (x
, t
);
6166 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
,
6167 fold_convert (TREE_TYPE (x
), t
));
6170 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6171 || TREE_ADDRESSABLE (new_var
)
6172 || omp_privatize_by_reference (var
))
6173 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6176 if (omp_privatize_by_reference (var
))
6178 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6179 tree new_vard
= TREE_OPERAND (new_var
, 0);
6180 gcc_assert (DECL_P (new_vard
));
6181 SET_DECL_VALUE_EXPR (new_vard
,
6182 build_fold_addr_expr (lvar
));
6183 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6185 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6187 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6188 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6189 gimplify_and_add (x
, ilist
);
6190 gimple_stmt_iterator gsi
6191 = gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6193 = gimple_build_assign (unshare_expr (lvar
), iv
);
6194 gsi_insert_before_without_update (&gsi
, g
,
6196 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6197 enum tree_code code
= PLUS_EXPR
;
6198 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6199 code
= POINTER_PLUS_EXPR
;
6200 g
= gimple_build_assign (iv
, code
, iv
, t
);
6201 gsi_insert_before_without_update (&gsi
, g
,
6205 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6206 (c
, unshare_expr (ivar
), x
);
6207 gimplify_and_add (x
, &llist
[0]);
6208 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6210 gimplify_and_add (x
, &llist
[1]);
6213 if (omp_privatize_by_reference (var
))
6215 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6216 tree new_vard
= TREE_OPERAND (new_var
, 0);
6217 gcc_assert (DECL_P (new_vard
));
6218 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6219 nx
= TYPE_SIZE_UNIT (type
);
6220 if (TREE_CONSTANT (nx
))
6222 nx
= create_tmp_var_raw (type
, get_name (var
));
6223 gimple_add_tmp_var (nx
);
6224 TREE_ADDRESSABLE (nx
) = 1;
6225 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6226 nx
= fold_convert_loc (clause_loc
,
6227 TREE_TYPE (new_vard
), nx
);
6228 gimplify_assign (new_vard
, nx
, ilist
);
6232 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6233 (c
, unshare_expr (new_var
), x
);
6234 gimplify_and_add (x
, ilist
);
6237 case OMP_CLAUSE__LOOPTEMP_
:
6238 case OMP_CLAUSE__REDUCTEMP_
:
6239 gcc_assert (is_taskreg_ctx (ctx
));
6240 x
= build_outer_var_ref (var
, ctx
);
6241 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6242 gimplify_and_add (x
, ilist
);
6245 case OMP_CLAUSE_COPYIN
:
6246 by_ref
= use_pointer_for_field (var
, NULL
);
6247 x
= build_receiver_ref (var
, by_ref
, ctx
);
6248 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6249 append_to_statement_list (x
, ©in_seq
);
6250 copyin_by_ref
|= by_ref
;
6253 case OMP_CLAUSE_REDUCTION
:
6254 case OMP_CLAUSE_IN_REDUCTION
:
6255 /* OpenACC reductions are initialized using the
6256 GOACC_REDUCTION internal function. */
6257 if (is_gimple_omp_oacc (ctx
->stmt
))
6259 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6261 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6263 tree ptype
= TREE_TYPE (placeholder
);
6266 x
= error_mark_node
;
6267 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6268 && !task_reduction_needs_orig_p
)
6270 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6272 tree pptype
= build_pointer_type (ptype
);
6273 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6274 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6275 size_int (task_reduction_cnt_full
6276 + task_reduction_cntorig
- 1),
6277 NULL_TREE
, NULL_TREE
);
6281 = *ctx
->task_reduction_map
->get (c
);
6282 x
= task_reduction_read (ilist
, tskred_temp
,
6283 pptype
, 7 + 3 * idx
);
6285 x
= fold_convert (pptype
, x
);
6286 x
= build_simple_mem_ref (x
);
6291 lower_private_allocate (var
, new_var
, allocator
,
6292 allocate_ptr
, ilist
, ctx
, false,
6294 x
= build_outer_var_ref (var
, ctx
);
6296 if (omp_privatize_by_reference (var
)
6297 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6298 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6300 SET_DECL_VALUE_EXPR (placeholder
, x
);
6301 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6302 tree new_vard
= new_var
;
6303 if (omp_privatize_by_reference (var
))
6305 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6306 new_vard
= TREE_OPERAND (new_var
, 0);
6307 gcc_assert (DECL_P (new_vard
));
6309 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6311 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6312 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6315 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6319 if (new_vard
== new_var
)
6321 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6322 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6326 SET_DECL_VALUE_EXPR (new_vard
,
6327 build_fold_addr_expr (ivar
));
6328 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6330 x
= lang_hooks
.decls
.omp_clause_default_ctor
6331 (c
, unshare_expr (ivar
),
6332 build_outer_var_ref (var
, ctx
));
6333 if (rvarp
&& ctx
->for_simd_scan_phase
)
6336 gimplify_and_add (x
, &llist
[0]);
6337 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6339 gimplify_and_add (x
, &llist
[1]);
6346 gimplify_and_add (x
, &llist
[0]);
6348 tree ivar2
= unshare_expr (lvar
);
6349 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6350 x
= lang_hooks
.decls
.omp_clause_default_ctor
6351 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6352 gimplify_and_add (x
, &llist
[0]);
6356 x
= lang_hooks
.decls
.omp_clause_default_ctor
6357 (c
, unshare_expr (rvar2
),
6358 build_outer_var_ref (var
, ctx
));
6359 gimplify_and_add (x
, &llist
[0]);
6362 /* For types that need construction, add another
6363 private var which will be default constructed
6364 and optionally initialized with
6365 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6366 loop we want to assign this value instead of
6367 constructing and destructing it in each
6369 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6370 gimple_add_tmp_var (nv
);
6371 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6375 x
= lang_hooks
.decls
.omp_clause_default_ctor
6376 (c
, nv
, build_outer_var_ref (var
, ctx
));
6377 gimplify_and_add (x
, ilist
);
6379 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6381 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6382 x
= DECL_VALUE_EXPR (new_vard
);
6384 if (new_vard
!= new_var
)
6385 vexpr
= build_fold_addr_expr (nv
);
6386 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6387 lower_omp (&tseq
, ctx
);
6388 SET_DECL_VALUE_EXPR (new_vard
, x
);
6389 gimple_seq_add_seq (ilist
, tseq
);
6390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6393 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6395 gimplify_and_add (x
, dlist
);
6398 tree ref
= build_outer_var_ref (var
, ctx
);
6399 x
= unshare_expr (ivar
);
6400 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6402 gimplify_and_add (x
, &llist
[0]);
6404 ref
= build_outer_var_ref (var
, ctx
);
6405 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6407 gimplify_and_add (x
, &llist
[3]);
6409 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6410 if (new_vard
== new_var
)
6411 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6413 SET_DECL_VALUE_EXPR (new_vard
,
6414 build_fold_addr_expr (lvar
));
6416 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6418 gimplify_and_add (x
, &llist
[1]);
6420 tree ivar2
= unshare_expr (lvar
);
6421 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6422 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6424 gimplify_and_add (x
, &llist
[1]);
6428 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6430 gimplify_and_add (x
, &llist
[1]);
6435 gimplify_and_add (x
, &llist
[0]);
6436 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6438 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6439 lower_omp (&tseq
, ctx
);
6440 gimple_seq_add_seq (&llist
[0], tseq
);
6442 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6443 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6444 lower_omp (&tseq
, ctx
);
6445 gimple_seq_add_seq (&llist
[1], tseq
);
6446 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6447 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6448 if (new_vard
== new_var
)
6449 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6451 SET_DECL_VALUE_EXPR (new_vard
,
6452 build_fold_addr_expr (lvar
));
6453 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6455 gimplify_and_add (x
, &llist
[1]);
6458 /* If this is a reference to constant size reduction var
6459 with placeholder, we haven't emitted the initializer
6460 for it because it is undesirable if SIMD arrays are used.
6461 But if they aren't used, we need to emit the deferred
6462 initialization now. */
6463 else if (omp_privatize_by_reference (var
) && is_simd
)
6464 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6466 tree lab2
= NULL_TREE
;
6470 if (!is_parallel_ctx (ctx
))
6472 tree condv
= create_tmp_var (boolean_type_node
);
6473 tree m
= build_simple_mem_ref (cond
);
6474 g
= gimple_build_assign (condv
, m
);
6475 gimple_seq_add_stmt (ilist
, g
);
6477 = create_artificial_label (UNKNOWN_LOCATION
);
6478 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6479 g
= gimple_build_cond (NE_EXPR
, condv
,
6482 gimple_seq_add_stmt (ilist
, g
);
6483 gimple_seq_add_stmt (ilist
,
6484 gimple_build_label (lab1
));
6486 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6488 gimple_seq_add_stmt (ilist
, g
);
6490 x
= lang_hooks
.decls
.omp_clause_default_ctor
6491 (c
, unshare_expr (new_var
),
6493 : build_outer_var_ref (var
, ctx
));
6495 gimplify_and_add (x
, ilist
);
6497 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6498 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6500 if (ctx
->for_simd_scan_phase
)
6503 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6505 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6506 gimple_add_tmp_var (nv
);
6507 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6508 x
= lang_hooks
.decls
.omp_clause_default_ctor
6509 (c
, nv
, build_outer_var_ref (var
, ctx
));
6511 gimplify_and_add (x
, ilist
);
6512 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6514 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6516 if (new_vard
!= new_var
)
6517 vexpr
= build_fold_addr_expr (nv
);
6518 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6519 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6520 lower_omp (&tseq
, ctx
);
6521 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6522 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6523 gimple_seq_add_seq (ilist
, tseq
);
6525 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6526 if (is_simd
&& ctx
->scan_exclusive
)
6529 = create_tmp_var_raw (TREE_TYPE (new_var
));
6530 gimple_add_tmp_var (nv2
);
6531 ctx
->cb
.decl_map
->put (nv
, nv2
);
6532 x
= lang_hooks
.decls
.omp_clause_default_ctor
6533 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6534 gimplify_and_add (x
, ilist
);
6535 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6537 gimplify_and_add (x
, dlist
);
6539 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6541 gimplify_and_add (x
, dlist
);
6544 && ctx
->scan_exclusive
6545 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6547 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6548 gimple_add_tmp_var (nv2
);
6549 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6550 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6552 gimplify_and_add (x
, dlist
);
6554 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6558 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6560 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6561 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6562 && is_omp_target (ctx
->stmt
))
6564 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6565 tree oldv
= NULL_TREE
;
6567 if (DECL_HAS_VALUE_EXPR_P (d
))
6568 oldv
= DECL_VALUE_EXPR (d
);
6569 SET_DECL_VALUE_EXPR (d
, new_vard
);
6570 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6571 lower_omp (&tseq
, ctx
);
6573 SET_DECL_VALUE_EXPR (d
, oldv
);
6576 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6577 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6581 lower_omp (&tseq
, ctx
);
6582 gimple_seq_add_seq (ilist
, tseq
);
6584 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6587 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6588 lower_omp (&tseq
, ctx
);
6589 gimple_seq_add_seq (dlist
, tseq
);
6590 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6592 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6596 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6603 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6604 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6605 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6610 tree lab2
= NULL_TREE
;
6611 /* GOMP_taskgroup_reduction_register memsets the whole
6612 array to zero. If the initializer is zero, we don't
6613 need to initialize it again, just mark it as ever
6614 used unconditionally, i.e. cond = true. */
6615 if (initializer_zerop (x
))
6617 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6619 gimple_seq_add_stmt (ilist
, g
);
6624 if (!cond) { cond = true; new_var = x; } */
6625 if (!is_parallel_ctx (ctx
))
6627 tree condv
= create_tmp_var (boolean_type_node
);
6628 tree m
= build_simple_mem_ref (cond
);
6629 g
= gimple_build_assign (condv
, m
);
6630 gimple_seq_add_stmt (ilist
, g
);
6632 = create_artificial_label (UNKNOWN_LOCATION
);
6633 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6634 g
= gimple_build_cond (NE_EXPR
, condv
,
6637 gimple_seq_add_stmt (ilist
, g
);
6638 gimple_seq_add_stmt (ilist
,
6639 gimple_build_label (lab1
));
6641 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6643 gimple_seq_add_stmt (ilist
, g
);
6644 gimplify_assign (new_var
, x
, ilist
);
6646 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6650 /* reduction(-:var) sums up the partial results, so it
6651 acts identically to reduction(+:var). */
6652 if (code
== MINUS_EXPR
)
6656 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6657 tree new_vard
= new_var
;
6658 if (is_simd
&& omp_privatize_by_reference (var
))
6660 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6661 new_vard
= TREE_OPERAND (new_var
, 0);
6662 gcc_assert (DECL_P (new_vard
));
6664 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6666 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6667 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6670 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6674 if (new_vard
!= new_var
)
6676 SET_DECL_VALUE_EXPR (new_vard
,
6677 build_fold_addr_expr (lvar
));
6678 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6681 tree ref
= build_outer_var_ref (var
, ctx
);
6685 if (ctx
->for_simd_scan_phase
)
6687 gimplify_assign (ivar
, ref
, &llist
[0]);
6688 ref
= build_outer_var_ref (var
, ctx
);
6689 gimplify_assign (ref
, rvar
, &llist
[3]);
6693 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6698 simt_lane
= create_tmp_var (unsigned_type_node
);
6699 x
= build_call_expr_internal_loc
6700 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6701 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6702 /* Make sure x is evaluated unconditionally. */
6703 tree bfly_var
= create_tmp_var (TREE_TYPE (ivar
));
6704 gimplify_assign (bfly_var
, x
, &llist
[2]);
6705 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, bfly_var
);
6706 gimplify_assign (ivar
, x
, &llist
[2]);
6712 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6713 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6714 boolean_type_node
, ivar
,
6716 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6717 boolean_type_node
, ref
,
6720 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6722 x
= fold_convert (TREE_TYPE (ref
), x
);
6723 ref
= build_outer_var_ref (var
, ctx
);
6724 gimplify_assign (ref
, x
, &llist
[1]);
6729 lower_private_allocate (var
, new_var
, allocator
,
6730 allocate_ptr
, ilist
, ctx
,
6732 if (omp_privatize_by_reference (var
) && is_simd
)
6733 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6734 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6735 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6737 gimplify_assign (new_var
, x
, ilist
);
6740 tree ref
= build_outer_var_ref (var
, ctx
);
6741 tree new_var2
= new_var
;
6745 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6747 = fold_build2_loc (clause_loc
, NE_EXPR
,
6748 boolean_type_node
, new_var
,
6750 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6751 boolean_type_node
, ref
,
6754 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6756 x
= fold_convert (TREE_TYPE (new_var
), x
);
6757 ref
= build_outer_var_ref (var
, ctx
);
6758 gimplify_assign (ref
, x
, dlist
);
6773 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6774 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6777 if (known_eq (sctx
.max_vf
, 1U))
6779 sctx
.is_simt
= false;
6780 if (ctx
->lastprivate_conditional_map
)
6782 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6784 /* Signal to lower_omp_1 that it should use parent context. */
6785 ctx
->combined_into_simd_safelen1
= true;
6786 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6787 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6788 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6790 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6791 omp_context
*outer
= ctx
->outer
;
6792 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6793 outer
= outer
->outer
;
6794 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6795 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6796 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6802 /* When not vectorized, treat lastprivate(conditional:) like
6803 normal lastprivate, as there will be just one simd lane
6804 writing the privatized variable. */
6805 delete ctx
->lastprivate_conditional_map
;
6806 ctx
->lastprivate_conditional_map
= NULL
;
6811 if (nonconst_simd_if
)
6813 if (sctx
.lane
== NULL_TREE
)
6815 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6816 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6818 /* FIXME: For now. */
6819 sctx
.is_simt
= false;
6822 if (sctx
.lane
|| sctx
.is_simt
)
6824 uid
= create_tmp_var (ptr_type_node
, "simduid");
6825 /* Don't want uninit warnings on simduid, it is always uninitialized,
6826 but we use it not for the value, but for the DECL_UID only. */
6827 suppress_warning (uid
, OPT_Wuninitialized
);
6828 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6829 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6830 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6831 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6833 /* Emit calls denoting privatized variables and initializing a pointer to
6834 structure that holds private variables as fields after ompdevlow pass. */
6837 sctx
.simt_eargs
[0] = uid
;
6839 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6840 gimple_call_set_lhs (g
, uid
);
6841 gimple_seq_add_stmt (ilist
, g
);
6842 sctx
.simt_eargs
.release ();
6844 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6845 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6846 gimple_call_set_lhs (g
, simtrec
);
6847 gimple_seq_add_stmt (ilist
, g
);
6851 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6852 2 + (nonconst_simd_if
!= NULL
),
6853 uid
, integer_zero_node
,
6855 gimple_call_set_lhs (g
, sctx
.lane
);
6856 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6857 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6858 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6859 build_int_cst (unsigned_type_node
, 0));
6860 gimple_seq_add_stmt (ilist
, g
);
6863 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6865 gimple_call_set_lhs (g
, sctx
.lastlane
);
6866 gimple_seq_add_stmt (dlist
, g
);
6867 gimple_seq_add_seq (dlist
, llist
[3]);
6869 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6872 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6873 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6874 gimple_call_set_lhs (g
, simt_vf
);
6875 gimple_seq_add_stmt (dlist
, g
);
6877 tree t
= build_int_cst (unsigned_type_node
, 1);
6878 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6879 gimple_seq_add_stmt (dlist
, g
);
6881 t
= build_int_cst (unsigned_type_node
, 0);
6882 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6883 gimple_seq_add_stmt (dlist
, g
);
6885 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6886 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6887 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6888 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6889 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6891 gimple_seq_add_seq (dlist
, llist
[2]);
6893 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6894 gimple_seq_add_stmt (dlist
, g
);
6896 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6897 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6898 gimple_seq_add_stmt (dlist
, g
);
6900 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6902 for (int i
= 0; i
< 2; i
++)
6905 tree vf
= create_tmp_var (unsigned_type_node
);
6906 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6907 gimple_call_set_lhs (g
, vf
);
6908 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6909 gimple_seq_add_stmt (seq
, g
);
6910 tree t
= build_int_cst (unsigned_type_node
, 0);
6911 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6912 gimple_seq_add_stmt (seq
, g
);
6913 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6914 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6915 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6916 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6917 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6918 gimple_seq_add_seq (seq
, llist
[i
]);
6919 t
= build_int_cst (unsigned_type_node
, 1);
6920 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6921 gimple_seq_add_stmt (seq
, g
);
6922 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6923 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6924 gimple_seq_add_stmt (seq
, g
);
6925 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6930 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6932 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6933 gimple_seq_add_stmt (dlist
, g
);
6936 /* The copyin sequence is not to be executed by the main thread, since
6937 that would result in self-copies. Perhaps not visible to scalars,
6938 but it certainly is to C++ operator=. */
6941 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6943 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6944 build_int_cst (TREE_TYPE (x
), 0));
6945 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6946 gimplify_and_add (x
, ilist
);
6949 /* If any copyin variable is passed by reference, we must ensure the
6950 master thread doesn't modify it before it is copied over in all
6951 threads. Similarly for variables in both firstprivate and
6952 lastprivate clauses we need to ensure the lastprivate copying
6953 happens after firstprivate copying in all threads. And similarly
6954 for UDRs if initializer expression refers to omp_orig. */
6955 if (copyin_by_ref
|| lastprivate_firstprivate
6956 || (reduction_omp_orig_ref
6957 && !ctx
->scan_inclusive
6958 && !ctx
->scan_exclusive
))
6960 /* Don't add any barrier for #pragma omp simd or
6961 #pragma omp distribute. */
6962 if (!is_task_ctx (ctx
)
6963 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6964 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6965 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6968 /* If max_vf is non-zero, then we can use only a vectorization factor
6969 up to the max_vf we chose. So stick it into the safelen clause. */
6970 if (maybe_ne (sctx
.max_vf
, 0U))
6972 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6973 OMP_CLAUSE_SAFELEN
);
6974 poly_uint64 safe_len
;
6976 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6977 && maybe_gt (safe_len
, sctx
.max_vf
)))
6979 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6980 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6982 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6983 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6988 /* Create temporary variables for lastprivate(conditional:) implementation
6989 in context CTX with CLAUSES. */
6992 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6994 tree iter_type
= NULL_TREE
;
6995 tree cond_ptr
= NULL_TREE
;
6996 tree iter_var
= NULL_TREE
;
6997 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6998 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6999 tree next
= *clauses
;
7000 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7001 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7002 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
7006 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7008 if (iter_type
== NULL_TREE
)
7010 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7011 iter_var
= create_tmp_var_raw (iter_type
);
7012 DECL_CONTEXT (iter_var
) = current_function_decl
;
7013 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7014 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7015 ctx
->block_vars
= iter_var
;
7017 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7018 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7019 OMP_CLAUSE_DECL (c3
) = iter_var
;
7020 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7022 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7024 next
= OMP_CLAUSE_CHAIN (cc
);
7025 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7026 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7027 ctx
->lastprivate_conditional_map
->put (o
, v
);
7030 if (iter_type
== NULL
)
7032 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7034 struct omp_for_data fd
;
7035 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7037 iter_type
= unsigned_type_for (fd
.iter_type
);
7039 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7040 iter_type
= unsigned_type_node
;
7041 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7045 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7046 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7050 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7051 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7052 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7053 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7054 ctx
->block_vars
= cond_ptr
;
7055 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7056 OMP_CLAUSE__CONDTEMP_
);
7057 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7058 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7061 iter_var
= create_tmp_var_raw (iter_type
);
7062 DECL_CONTEXT (iter_var
) = current_function_decl
;
7063 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7064 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7065 ctx
->block_vars
= iter_var
;
7067 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7068 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7069 OMP_CLAUSE_DECL (c3
) = iter_var
;
7070 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7071 OMP_CLAUSE_CHAIN (c2
) = c3
;
7072 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7074 tree v
= create_tmp_var_raw (iter_type
);
7075 DECL_CONTEXT (v
) = current_function_decl
;
7076 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7077 DECL_CHAIN (v
) = ctx
->block_vars
;
7078 ctx
->block_vars
= v
;
7079 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7080 ctx
->lastprivate_conditional_map
->put (o
, v
);
7085 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7086 both parallel and workshare constructs. PREDICATE may be NULL if it's
7087 always true. BODY_P is the sequence to insert early initialization
7088 if needed, STMT_LIST is where the non-conditional lastprivate handling
7089 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7093 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7094 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7097 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7098 bool par_clauses
= false;
7099 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7100 unsigned HOST_WIDE_INT conditional_off
= 0;
7101 gimple_seq post_stmt_list
= NULL
;
7103 /* Early exit if there are no lastprivate or linear clauses. */
7104 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7105 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7106 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7107 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7109 if (clauses
== NULL
)
7111 /* If this was a workshare clause, see if it had been combined
7112 with its parallel. In that case, look for the clauses on the
7113 parallel statement itself. */
7114 if (is_parallel_ctx (ctx
))
7118 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7121 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7122 OMP_CLAUSE_LASTPRIVATE
);
7123 if (clauses
== NULL
)
7128 bool maybe_simt
= false;
7129 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7130 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7132 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7133 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7135 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7141 tree label_true
, arm1
, arm2
;
7142 enum tree_code pred_code
= TREE_CODE (predicate
);
7144 label
= create_artificial_label (UNKNOWN_LOCATION
);
7145 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7146 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7148 arm1
= TREE_OPERAND (predicate
, 0);
7149 arm2
= TREE_OPERAND (predicate
, 1);
7150 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7151 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7156 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7157 arm2
= boolean_false_node
;
7158 pred_code
= NE_EXPR
;
7162 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7163 c
= fold_convert (integer_type_node
, c
);
7164 simtcond
= create_tmp_var (integer_type_node
);
7165 gimplify_assign (simtcond
, c
, stmt_list
);
7166 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7168 c
= create_tmp_var (integer_type_node
);
7169 gimple_call_set_lhs (g
, c
);
7170 gimple_seq_add_stmt (stmt_list
, g
);
7171 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7175 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7176 gimple_seq_add_stmt (stmt_list
, stmt
);
7177 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7180 tree cond_ptr
= NULL_TREE
;
7181 for (c
= clauses
; c
;)
7184 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7185 gimple_seq
*this_stmt_list
= stmt_list
;
7186 tree lab2
= NULL_TREE
;
7188 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7189 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7190 && ctx
->lastprivate_conditional_map
7191 && !ctx
->combined_into_simd_safelen1
)
7193 gcc_assert (body_p
);
7196 if (cond_ptr
== NULL_TREE
)
7198 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7199 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7201 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7202 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7203 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7204 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7205 this_stmt_list
= cstmt_list
;
7207 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7209 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7210 build_int_cst (TREE_TYPE (cond_ptr
),
7212 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7215 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7216 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7217 tree mem2
= copy_node (mem
);
7218 gimple_seq seq
= NULL
;
7219 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7220 gimple_seq_add_seq (this_stmt_list
, seq
);
7221 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7222 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7223 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7224 gimple_seq_add_stmt (this_stmt_list
, g
);
7225 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7226 gimplify_assign (mem2
, v
, this_stmt_list
);
7229 && ctx
->combined_into_simd_safelen1
7230 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7231 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7232 && ctx
->lastprivate_conditional_map
)
7233 this_stmt_list
= &post_stmt_list
;
7235 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7236 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7237 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7239 var
= OMP_CLAUSE_DECL (c
);
7240 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7241 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7242 && is_taskloop_ctx (ctx
))
7244 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7245 new_var
= lookup_decl (var
, ctx
->outer
);
7249 new_var
= lookup_decl (var
, ctx
);
7250 /* Avoid uninitialized warnings for lastprivate and
7251 for linear iterators. */
7253 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7254 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7255 suppress_warning (new_var
, OPT_Wuninitialized
);
7258 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7260 tree val
= DECL_VALUE_EXPR (new_var
);
7261 if (TREE_CODE (val
) == ARRAY_REF
7262 && VAR_P (TREE_OPERAND (val
, 0))
7263 && lookup_attribute ("omp simd array",
7264 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7267 if (lastlane
== NULL
)
7269 lastlane
= create_tmp_var (unsigned_type_node
);
7271 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7273 TREE_OPERAND (val
, 1));
7274 gimple_call_set_lhs (g
, lastlane
);
7275 gimple_seq_add_stmt (this_stmt_list
, g
);
7277 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7278 TREE_OPERAND (val
, 0), lastlane
,
7279 NULL_TREE
, NULL_TREE
);
7280 TREE_THIS_NOTRAP (new_var
) = 1;
7283 else if (maybe_simt
)
7285 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7286 ? DECL_VALUE_EXPR (new_var
)
7288 if (simtlast
== NULL
)
7290 simtlast
= create_tmp_var (unsigned_type_node
);
7291 gcall
*g
= gimple_build_call_internal
7292 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7293 gimple_call_set_lhs (g
, simtlast
);
7294 gimple_seq_add_stmt (this_stmt_list
, g
);
7296 x
= build_call_expr_internal_loc
7297 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7298 TREE_TYPE (val
), 2, val
, simtlast
);
7299 new_var
= unshare_expr (new_var
);
7300 gimplify_assign (new_var
, x
, this_stmt_list
);
7301 new_var
= unshare_expr (new_var
);
7304 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7305 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7307 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7308 gimple_seq_add_seq (this_stmt_list
,
7309 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7310 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7312 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7313 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7315 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7316 gimple_seq_add_seq (this_stmt_list
,
7317 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7318 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7322 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7323 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7324 && is_taskloop_ctx (ctx
))
7326 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7328 if (is_global_var (ovar
))
7332 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7333 if (omp_privatize_by_reference (var
))
7334 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7335 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7336 gimplify_and_add (x
, this_stmt_list
);
7339 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7343 c
= OMP_CLAUSE_CHAIN (c
);
7344 if (c
== NULL
&& !par_clauses
)
7346 /* If this was a workshare clause, see if it had been combined
7347 with its parallel. In that case, continue looking for the
7348 clauses also on the parallel statement itself. */
7349 if (is_parallel_ctx (ctx
))
7353 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7356 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7357 OMP_CLAUSE_LASTPRIVATE
);
7363 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7364 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7367 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7368 (which might be a placeholder). INNER is true if this is an inner
7369 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7370 join markers. Generate the before-loop forking sequence in
7371 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7372 general form of these sequences is
7374 GOACC_REDUCTION_SETUP
7376 GOACC_REDUCTION_INIT
7378 GOACC_REDUCTION_FINI
7380 GOACC_REDUCTION_TEARDOWN. */
7383 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7384 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7385 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7388 gimple_seq before_fork
= NULL
;
7389 gimple_seq after_fork
= NULL
;
7390 gimple_seq before_join
= NULL
;
7391 gimple_seq after_join
= NULL
;
7392 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7393 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7394 unsigned offset
= 0;
7396 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7397 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7399 /* No 'reduction' clauses on OpenACC 'kernels'. */
7400 gcc_checking_assert (!is_oacc_kernels (ctx
));
7401 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7402 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7404 tree orig
= OMP_CLAUSE_DECL (c
);
7405 tree var
= maybe_lookup_decl (orig
, ctx
);
7406 tree ref_to_res
= NULL_TREE
;
7407 tree incoming
, outgoing
, v1
, v2
, v3
;
7408 bool is_private
= false;
7410 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7411 if (rcode
== MINUS_EXPR
)
7413 else if (rcode
== TRUTH_ANDIF_EXPR
)
7414 rcode
= BIT_AND_EXPR
;
7415 else if (rcode
== TRUTH_ORIF_EXPR
)
7416 rcode
= BIT_IOR_EXPR
;
7417 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7422 incoming
= outgoing
= var
;
7426 /* See if an outer construct also reduces this variable. */
7427 omp_context
*outer
= ctx
;
7429 while (omp_context
*probe
= outer
->outer
)
7431 enum gimple_code type
= gimple_code (probe
->stmt
);
7436 case GIMPLE_OMP_FOR
:
7437 cls
= gimple_omp_for_clauses (probe
->stmt
);
7440 case GIMPLE_OMP_TARGET
:
7441 /* No 'reduction' clauses inside OpenACC 'kernels'
7443 gcc_checking_assert (!is_oacc_kernels (probe
));
7445 if (!is_gimple_omp_offloaded (probe
->stmt
))
7448 cls
= gimple_omp_target_clauses (probe
->stmt
);
7456 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7457 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7458 && orig
== OMP_CLAUSE_DECL (cls
))
7460 incoming
= outgoing
= lookup_decl (orig
, probe
);
7461 goto has_outer_reduction
;
7463 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7464 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7465 && orig
== OMP_CLAUSE_DECL (cls
))
7473 /* This is the outermost construct with this reduction,
7474 see if there's a mapping for it. */
7475 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7476 && maybe_lookup_field (orig
, outer
) && !is_private
)
7478 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7479 if (omp_privatize_by_reference (orig
))
7480 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7482 tree type
= TREE_TYPE (var
);
7483 if (POINTER_TYPE_P (type
))
7484 type
= TREE_TYPE (type
);
7487 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7491 /* Try to look at enclosing contexts for reduction var,
7492 use original if no mapping found. */
7494 omp_context
*c
= ctx
->outer
;
7497 t
= maybe_lookup_decl (orig
, c
);
7500 incoming
= outgoing
= (t
? t
: orig
);
7503 has_outer_reduction
:;
7507 ref_to_res
= integer_zero_node
;
7509 if (omp_privatize_by_reference (orig
))
7511 tree type
= TREE_TYPE (var
);
7512 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7516 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7517 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7520 v1
= create_tmp_var (type
, id
);
7521 v2
= create_tmp_var (type
, id
);
7522 v3
= create_tmp_var (type
, id
);
7524 gimplify_assign (v1
, var
, fork_seq
);
7525 gimplify_assign (v2
, var
, fork_seq
);
7526 gimplify_assign (v3
, var
, fork_seq
);
7528 var
= build_simple_mem_ref (var
);
7529 v1
= build_simple_mem_ref (v1
);
7530 v2
= build_simple_mem_ref (v2
);
7531 v3
= build_simple_mem_ref (v3
);
7532 outgoing
= build_simple_mem_ref (outgoing
);
7534 if (!TREE_CONSTANT (incoming
))
7535 incoming
= build_simple_mem_ref (incoming
);
7538 /* Note that 'var' might be a mem ref. */
7541 /* Determine position in reduction buffer, which may be used
7542 by target. The parser has ensured that this is not a
7543 variable-sized type. */
7544 fixed_size_mode mode
7545 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7546 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7547 offset
= (offset
+ align
- 1) & ~(align
- 1);
7548 tree off
= build_int_cst (sizetype
, offset
);
7549 offset
+= GET_MODE_SIZE (mode
);
7553 init_code
= build_int_cst (integer_type_node
,
7554 IFN_GOACC_REDUCTION_INIT
);
7555 fini_code
= build_int_cst (integer_type_node
,
7556 IFN_GOACC_REDUCTION_FINI
);
7557 setup_code
= build_int_cst (integer_type_node
,
7558 IFN_GOACC_REDUCTION_SETUP
);
7559 teardown_code
= build_int_cst (integer_type_node
,
7560 IFN_GOACC_REDUCTION_TEARDOWN
);
7564 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7565 TREE_TYPE (var
), 6, setup_code
,
7566 unshare_expr (ref_to_res
),
7567 unshare_expr (incoming
),
7570 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7571 TREE_TYPE (var
), 6, init_code
,
7572 unshare_expr (ref_to_res
),
7573 unshare_expr (v1
), level
, op
, off
);
7575 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7576 TREE_TYPE (var
), 6, fini_code
,
7577 unshare_expr (ref_to_res
),
7578 unshare_expr (v2
), level
, op
, off
);
7580 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7581 TREE_TYPE (var
), 6, teardown_code
,
7582 ref_to_res
, unshare_expr (v3
),
7585 gimplify_assign (unshare_expr (v1
), setup_call
, &before_fork
);
7586 gimplify_assign (unshare_expr (v2
), init_call
, &after_fork
);
7587 gimplify_assign (unshare_expr (v3
), fini_call
, &before_join
);
7588 gimplify_assign (unshare_expr (outgoing
), teardown_call
, &after_join
);
7591 /* Now stitch things together. */
7592 gimple_seq_add_seq (fork_seq
, before_fork
);
7594 gimple_seq_add_stmt (fork_seq
, private_marker
);
7596 gimple_seq_add_stmt (fork_seq
, fork
);
7597 gimple_seq_add_seq (fork_seq
, after_fork
);
7599 gimple_seq_add_seq (join_seq
, before_join
);
7601 gimple_seq_add_stmt (join_seq
, join
);
7602 gimple_seq_add_seq (join_seq
, after_join
);
7605 /* Generate code to implement the REDUCTION clauses, append it
7606 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7607 that should be emitted also inside of the critical section,
7608 in that case clear *CLIST afterwards, otherwise leave it as is
7609 and let the caller emit it itself. */
7612 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7613 gimple_seq
*clist
, omp_context
*ctx
)
7615 gimple_seq sub_seq
= NULL
;
7620 /* OpenACC loop reductions are handled elsewhere. */
7621 if (is_gimple_omp_oacc (ctx
->stmt
))
7624 /* SIMD reductions are handled in lower_rec_input_clauses. */
7625 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7626 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7629 /* inscan reductions are handled elsewhere. */
7630 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7633 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7634 update in that case, otherwise use a lock. */
7635 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7636 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7637 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7639 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7640 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7642 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7652 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7654 tree var
, ref
, new_var
, orig_var
;
7655 enum tree_code code
;
7656 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7658 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7659 || OMP_CLAUSE_REDUCTION_TASK (c
))
7662 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7663 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7664 if (TREE_CODE (var
) == MEM_REF
)
7666 var
= TREE_OPERAND (var
, 0);
7667 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7668 var
= TREE_OPERAND (var
, 0);
7669 if (TREE_CODE (var
) == ADDR_EXPR
)
7670 var
= TREE_OPERAND (var
, 0);
7673 /* If this is a pointer or referenced based array
7674 section, the var could be private in the outer
7675 context e.g. on orphaned loop construct. Pretend this
7676 is private variable's outer reference. */
7677 ccode
= OMP_CLAUSE_PRIVATE
;
7678 if (INDIRECT_REF_P (var
))
7679 var
= TREE_OPERAND (var
, 0);
7682 if (is_variable_sized (var
))
7684 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7685 var
= DECL_VALUE_EXPR (var
);
7686 gcc_assert (INDIRECT_REF_P (var
));
7687 var
= TREE_OPERAND (var
, 0);
7688 gcc_assert (DECL_P (var
));
7691 new_var
= lookup_decl (var
, ctx
);
7692 if (var
== OMP_CLAUSE_DECL (c
)
7693 && omp_privatize_by_reference (var
))
7694 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7695 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7696 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7698 /* reduction(-:var) sums up the partial results, so it acts
7699 identically to reduction(+:var). */
7700 if (code
== MINUS_EXPR
)
7703 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7706 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7708 addr
= save_expr (addr
);
7709 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7710 tree new_var2
= new_var
;
7714 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7715 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7716 boolean_type_node
, new_var
, zero
);
7717 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7720 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7723 x
= fold_convert (TREE_TYPE (new_var
), x
);
7724 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7725 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7726 gimplify_and_add (x
, stmt_seqp
);
7729 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7731 tree d
= OMP_CLAUSE_DECL (c
);
7732 tree type
= TREE_TYPE (d
);
7733 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7734 tree i
= create_tmp_var (TREE_TYPE (v
));
7735 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7736 tree bias
= TREE_OPERAND (d
, 1);
7737 d
= TREE_OPERAND (d
, 0);
7738 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7740 tree b
= TREE_OPERAND (d
, 1);
7741 b
= maybe_lookup_decl (b
, ctx
);
7744 b
= TREE_OPERAND (d
, 1);
7745 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7747 if (integer_zerop (bias
))
7751 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7752 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7753 TREE_TYPE (b
), b
, bias
);
7755 d
= TREE_OPERAND (d
, 0);
7757 /* For ref build_outer_var_ref already performs this, so
7758 only new_var needs a dereference. */
7759 if (INDIRECT_REF_P (d
))
7761 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7762 gcc_assert (omp_privatize_by_reference (var
)
7763 && var
== orig_var
);
7765 else if (TREE_CODE (d
) == ADDR_EXPR
)
7767 if (orig_var
== var
)
7769 new_var
= build_fold_addr_expr (new_var
);
7770 ref
= build_fold_addr_expr (ref
);
7775 gcc_assert (orig_var
== var
);
7776 if (omp_privatize_by_reference (var
))
7777 ref
= build_fold_addr_expr (ref
);
7781 tree t
= maybe_lookup_decl (v
, ctx
);
7785 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7786 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7788 if (!integer_zerop (bias
))
7790 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7791 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7792 TREE_TYPE (new_var
), new_var
,
7793 unshare_expr (bias
));
7794 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7795 TREE_TYPE (ref
), ref
, bias
);
7797 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7798 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7799 tree m
= create_tmp_var (ptype
);
7800 gimplify_assign (m
, new_var
, stmt_seqp
);
7802 m
= create_tmp_var (ptype
);
7803 gimplify_assign (m
, ref
, stmt_seqp
);
7805 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7806 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7807 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7808 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7809 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7810 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7811 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7813 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7814 tree decl_placeholder
7815 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7816 SET_DECL_VALUE_EXPR (placeholder
, out
);
7817 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7818 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7819 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7820 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7821 gimple_seq_add_seq (&sub_seq
,
7822 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7823 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7824 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7825 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7833 tree zero
= build_zero_cst (TREE_TYPE (out
));
7834 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7835 boolean_type_node
, out
, zero
);
7836 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7837 boolean_type_node
, priv
, zero
);
7839 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7841 x
= fold_convert (TREE_TYPE (out
), x
);
7842 out
= unshare_expr (out
);
7843 gimplify_assign (out
, x
, &sub_seq
);
7845 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7846 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7847 gimple_seq_add_stmt (&sub_seq
, g
);
7848 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7849 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7850 gimple_seq_add_stmt (&sub_seq
, g
);
7851 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7852 build_int_cst (TREE_TYPE (i
), 1));
7853 gimple_seq_add_stmt (&sub_seq
, g
);
7854 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7855 gimple_seq_add_stmt (&sub_seq
, g
);
7856 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7858 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7860 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7862 if (omp_privatize_by_reference (var
)
7863 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7865 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7866 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7867 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7868 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7869 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7870 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7871 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7875 tree new_var2
= new_var
;
7879 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7880 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7881 boolean_type_node
, new_var
, zero
);
7882 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7885 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7887 x
= fold_convert (TREE_TYPE (new_var
), x
);
7888 ref
= build_outer_var_ref (var
, ctx
);
7889 gimplify_assign (ref
, x
, &sub_seq
);
7893 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7895 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7897 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7901 gimple_seq_add_seq (stmt_seqp
, *clist
);
7905 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7907 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7911 /* Generate code to implement the COPYPRIVATE clauses. */
7914 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7919 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7921 tree var
, new_var
, ref
, x
;
7923 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7925 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7928 var
= OMP_CLAUSE_DECL (c
);
7929 by_ref
= use_pointer_for_field (var
, NULL
);
7931 ref
= build_sender_ref (var
, ctx
);
7932 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7935 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7936 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7938 gimplify_assign (ref
, x
, slist
);
7940 ref
= build_receiver_ref (var
, false, ctx
);
7943 ref
= fold_convert_loc (clause_loc
,
7944 build_pointer_type (TREE_TYPE (new_var
)),
7946 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7948 if (omp_privatize_by_reference (var
))
7950 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7951 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7952 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7954 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7955 gimplify_and_add (x
, rlist
);
7960 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7961 and REDUCTION from the sender (aka parent) side. */
7964 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7968 int ignored_looptemp
= 0;
7969 bool is_taskloop
= false;
7971 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7972 by GOMP_taskloop. */
7973 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7975 ignored_looptemp
= 2;
7979 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7981 tree val
, ref
, x
, var
;
7982 bool by_ref
, do_in
= false, do_out
= false;
7983 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7985 switch (OMP_CLAUSE_CODE (c
))
7987 case OMP_CLAUSE_PRIVATE
:
7988 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7991 case OMP_CLAUSE_FIRSTPRIVATE
:
7992 case OMP_CLAUSE_COPYIN
:
7993 case OMP_CLAUSE_LASTPRIVATE
:
7994 case OMP_CLAUSE_IN_REDUCTION
:
7995 case OMP_CLAUSE__REDUCTEMP_
:
7997 case OMP_CLAUSE_REDUCTION
:
7998 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
8001 case OMP_CLAUSE_SHARED
:
8002 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8005 case OMP_CLAUSE__LOOPTEMP_
:
8006 if (ignored_looptemp
)
8016 val
= OMP_CLAUSE_DECL (c
);
8017 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8018 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8019 && TREE_CODE (val
) == MEM_REF
)
8021 val
= TREE_OPERAND (val
, 0);
8022 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8023 val
= TREE_OPERAND (val
, 0);
8024 if (INDIRECT_REF_P (val
)
8025 || TREE_CODE (val
) == ADDR_EXPR
)
8026 val
= TREE_OPERAND (val
, 0);
8027 if (is_variable_sized (val
))
8031 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8032 outer taskloop region. */
8033 omp_context
*ctx_for_o
= ctx
;
8035 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8036 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8037 ctx_for_o
= ctx
->outer
;
8039 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8041 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8042 && is_global_var (var
)
8043 && (val
== OMP_CLAUSE_DECL (c
)
8044 || !is_task_ctx (ctx
)
8045 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8046 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8047 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8048 != POINTER_TYPE
)))))
8051 t
= omp_member_access_dummy_var (var
);
8054 var
= DECL_VALUE_EXPR (var
);
8055 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8057 var
= unshare_and_remap (var
, t
, o
);
8059 var
= unshare_expr (var
);
8062 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8064 /* Handle taskloop firstprivate/lastprivate, where the
8065 lastprivate on GIMPLE_OMP_TASK is represented as
8066 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8067 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8068 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8069 if (use_pointer_for_field (val
, ctx
))
8070 var
= build_fold_addr_expr (var
);
8071 gimplify_assign (x
, var
, ilist
);
8072 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8076 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8077 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8078 || val
== OMP_CLAUSE_DECL (c
))
8079 && is_variable_sized (val
))
8081 by_ref
= use_pointer_for_field (val
, NULL
);
8083 switch (OMP_CLAUSE_CODE (c
))
8085 case OMP_CLAUSE_FIRSTPRIVATE
:
8086 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8088 && is_task_ctx (ctx
))
8089 suppress_warning (var
);
8093 case OMP_CLAUSE_PRIVATE
:
8094 case OMP_CLAUSE_COPYIN
:
8095 case OMP_CLAUSE__LOOPTEMP_
:
8096 case OMP_CLAUSE__REDUCTEMP_
:
8100 case OMP_CLAUSE_LASTPRIVATE
:
8101 if (by_ref
|| omp_privatize_by_reference (val
))
8103 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8110 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8115 case OMP_CLAUSE_REDUCTION
:
8116 case OMP_CLAUSE_IN_REDUCTION
:
8118 if (val
== OMP_CLAUSE_DECL (c
))
8120 if (is_task_ctx (ctx
))
8121 by_ref
= use_pointer_for_field (val
, ctx
);
8123 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8126 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8135 ref
= build_sender_ref (val
, ctx
);
8136 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8137 gimplify_assign (ref
, x
, ilist
);
8138 if (is_task_ctx (ctx
))
8139 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8144 ref
= build_sender_ref (val
, ctx
);
8145 gimplify_assign (var
, ref
, olist
);
8150 /* Generate code to implement SHARED from the sender (aka parent)
8151 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8152 list things that got automatically shared. */
8155 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8157 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8159 if (ctx
->record_type
== NULL
)
8162 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8163 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8165 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8166 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8169 nvar
= maybe_lookup_decl (ovar
, ctx
);
8171 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8172 || (ctx
->allocate_map
8173 && ctx
->allocate_map
->get (ovar
)))
8176 /* If CTX is a nested parallel directive. Find the immediately
8177 enclosing parallel or workshare construct that contains a
8178 mapping for OVAR. */
8179 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8181 t
= omp_member_access_dummy_var (var
);
8184 var
= DECL_VALUE_EXPR (var
);
8185 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8187 var
= unshare_and_remap (var
, t
, o
);
8189 var
= unshare_expr (var
);
8192 if (use_pointer_for_field (ovar
, ctx
))
8194 x
= build_sender_ref (ovar
, ctx
);
8195 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8196 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8198 gcc_assert (is_parallel_ctx (ctx
)
8199 && DECL_ARTIFICIAL (ovar
));
8200 /* _condtemp_ clause. */
8201 var
= build_constructor (TREE_TYPE (x
), NULL
);
8204 var
= build_fold_addr_expr (var
);
8205 gimplify_assign (x
, var
, ilist
);
8209 x
= build_sender_ref (ovar
, ctx
);
8210 gimplify_assign (x
, var
, ilist
);
8212 if (!TREE_READONLY (var
)
8213 /* We don't need to receive a new reference to a result
8214 or parm decl. In fact we may not store to it as we will
8215 invalidate any pending RSO and generate wrong gimple
8217 && !((TREE_CODE (var
) == RESULT_DECL
8218 || TREE_CODE (var
) == PARM_DECL
)
8219 && DECL_BY_REFERENCE (var
)))
8221 x
= build_sender_ref (ovar
, ctx
);
8222 gimplify_assign (var
, x
, olist
);
8228 /* Emit an OpenACC head marker call, encapulating the partitioning and
8229 other information that must be processed by the target compiler.
8230 Return the maximum number of dimensions the associated loop might
8231 be partitioned over. */
8234 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8235 gimple_seq
*seq
, omp_context
*ctx
)
8237 unsigned levels
= 0;
8239 tree gang_static
= NULL_TREE
;
8240 auto_vec
<tree
, 5> args
;
8242 args
.quick_push (build_int_cst
8243 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8244 args
.quick_push (ddvar
);
8245 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8247 switch (OMP_CLAUSE_CODE (c
))
8249 case OMP_CLAUSE_GANG
:
8250 tag
|= OLF_DIM_GANG
;
8251 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8252 /* static:* is represented by -1, and we can ignore it, as
8253 scheduling is always static. */
8254 if (gang_static
&& integer_minus_onep (gang_static
))
8255 gang_static
= NULL_TREE
;
8259 case OMP_CLAUSE_WORKER
:
8260 tag
|= OLF_DIM_WORKER
;
8264 case OMP_CLAUSE_VECTOR
:
8265 tag
|= OLF_DIM_VECTOR
;
8269 case OMP_CLAUSE_SEQ
:
8273 case OMP_CLAUSE_AUTO
:
8277 case OMP_CLAUSE_INDEPENDENT
:
8278 tag
|= OLF_INDEPENDENT
;
8281 case OMP_CLAUSE_TILE
:
8285 case OMP_CLAUSE_REDUCTION
:
8286 tag
|= OLF_REDUCTION
;
8296 if (DECL_P (gang_static
))
8297 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8298 tag
|= OLF_GANG_STATIC
;
8301 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8302 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8304 else if (is_oacc_kernels (tgt
))
8305 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8307 else if (is_oacc_kernels_decomposed_part (tgt
))
8312 /* In a parallel region, loops are implicitly INDEPENDENT. */
8313 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8314 tag
|= OLF_INDEPENDENT
;
8316 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8317 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8318 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8320 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8321 gcc_assert (!(tag
& OLF_AUTO
));
8325 /* Tiling could use all 3 levels. */
8329 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8330 Ensure at least one level, or 2 for possible auto
8332 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8333 << OLF_DIM_BASE
) | OLF_SEQ
));
8335 if (levels
< 1u + maybe_auto
)
8336 levels
= 1u + maybe_auto
;
8339 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8340 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8342 args
.quick_push (gang_static
);
8344 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8345 gimple_set_location (call
, loc
);
8346 gimple_set_lhs (call
, ddvar
);
8347 gimple_seq_add_stmt (seq
, call
);
8352 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8353 partitioning level of the enclosed region. */
8356 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8357 tree tofollow
, gimple_seq
*seq
)
8359 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8360 : IFN_UNIQUE_OACC_TAIL_MARK
);
8361 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8362 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8363 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8364 marker
, ddvar
, tofollow
);
8365 gimple_set_location (call
, loc
);
8366 gimple_set_lhs (call
, ddvar
);
8367 gimple_seq_add_stmt (seq
, call
);
8370 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8371 the loop clauses, from which we extract reductions. Initialize
8375 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8376 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8379 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8380 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8382 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8386 gimple_set_location (private_marker
, loc
);
8387 gimple_call_set_lhs (private_marker
, ddvar
);
8388 gimple_call_set_arg (private_marker
, 1, ddvar
);
8391 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8392 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8395 for (unsigned done
= 1; count
; count
--, done
++)
8397 gimple_seq fork_seq
= NULL
;
8398 gimple_seq join_seq
= NULL
;
8400 tree place
= build_int_cst (integer_type_node
, -1);
8401 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8402 fork_kind
, ddvar
, place
);
8403 gimple_set_location (fork
, loc
);
8404 gimple_set_lhs (fork
, ddvar
);
8406 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8407 join_kind
, ddvar
, place
);
8408 gimple_set_location (join
, loc
);
8409 gimple_set_lhs (join
, ddvar
);
8411 /* Mark the beginning of this level sequence. */
8413 lower_oacc_loop_marker (loc
, ddvar
, true,
8414 build_int_cst (integer_type_node
, count
),
8416 lower_oacc_loop_marker (loc
, ddvar
, false,
8417 build_int_cst (integer_type_node
, done
),
8420 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8421 fork
, (count
== 1) ? private_marker
: NULL
,
8422 join
, &fork_seq
, &join_seq
, ctx
);
8424 /* Append this level to head. */
8425 gimple_seq_add_seq (head
, fork_seq
);
8426 /* Prepend it to tail. */
8427 gimple_seq_add_seq (&join_seq
, *tail
);
8433 /* Mark the end of the sequence. */
8434 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8435 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8438 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8439 catch handler and return it. This prevents programs from violating the
8440 structured block semantics with throws. */
8443 maybe_catch_exception (gimple_seq body
)
8448 if (!flag_exceptions
)
8451 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8452 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8454 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8456 g
= gimple_build_eh_must_not_throw (decl
);
8457 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8460 return gimple_seq_alloc_with_stmt (g
);
8464 /* Routines to lower OMP directives into OMP-GIMPLE. */
8466 /* If ctx is a worksharing context inside of a cancellable parallel
8467 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8468 and conditional branch to parallel's cancel_label to handle
8469 cancellation in the implicit barrier. */
8472 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8475 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8476 if (gimple_omp_return_nowait_p (omp_return
))
8478 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8479 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8480 && outer
->cancellable
)
8482 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8483 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8484 tree lhs
= create_tmp_var (c_bool_type
);
8485 gimple_omp_return_set_lhs (omp_return
, lhs
);
8486 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8487 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8488 fold_convert (c_bool_type
,
8489 boolean_false_node
),
8490 outer
->cancel_label
, fallthru_label
);
8491 gimple_seq_add_stmt (body
, g
);
8492 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8494 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8495 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8499 /* Find the first task_reduction or reduction clause or return NULL
8500 if there are none. */
8503 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8504 enum omp_clause_code ccode
)
8508 clauses
= omp_find_clause (clauses
, ccode
);
8509 if (clauses
== NULL_TREE
)
8511 if (ccode
!= OMP_CLAUSE_REDUCTION
8512 || code
== OMP_TASKLOOP
8513 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8515 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8519 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8520 gimple_seq
*, gimple_seq
*);
8522 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8523 CTX is the enclosing OMP context for the current statement. */
8526 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8528 tree block
, control
;
8529 gimple_stmt_iterator tgsi
;
8530 gomp_sections
*stmt
;
8532 gbind
*new_stmt
, *bind
;
8533 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8535 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8537 push_gimplify_context ();
8543 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8544 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8545 tree rtmp
= NULL_TREE
;
8548 tree type
= build_pointer_type (pointer_sized_int_node
);
8549 tree temp
= create_tmp_var (type
);
8550 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8551 OMP_CLAUSE_DECL (c
) = temp
;
8552 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8553 gimple_omp_sections_set_clauses (stmt
, c
);
8554 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8555 gimple_omp_sections_clauses (stmt
),
8556 &ilist
, &tred_dlist
);
8558 rtmp
= make_ssa_name (type
);
8559 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8562 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8563 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8565 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8566 &ilist
, &dlist
, ctx
, NULL
);
8568 control
= create_tmp_var (unsigned_type_node
, ".section");
8569 gimple_omp_sections_set_control (stmt
, control
);
8571 new_body
= gimple_omp_body (stmt
);
8572 gimple_omp_set_body (stmt
, NULL
);
8573 tgsi
= gsi_start (new_body
);
8574 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8579 sec_start
= gsi_stmt (tgsi
);
8580 sctx
= maybe_lookup_ctx (sec_start
);
8583 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8584 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8585 GSI_CONTINUE_LINKING
);
8586 gimple_omp_set_body (sec_start
, NULL
);
8588 if (gsi_one_before_end_p (tgsi
))
8590 gimple_seq l
= NULL
;
8591 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8592 &ilist
, &l
, &clist
, ctx
);
8593 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8594 gimple_omp_section_set_last (sec_start
);
8597 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8598 GSI_CONTINUE_LINKING
);
8601 block
= make_node (BLOCK
);
8602 bind
= gimple_build_bind (NULL
, new_body
, block
);
8605 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8609 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8610 gcall
*g
= gimple_build_call (fndecl
, 0);
8611 gimple_seq_add_stmt (&olist
, g
);
8612 gimple_seq_add_seq (&olist
, clist
);
8613 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8614 g
= gimple_build_call (fndecl
, 0);
8615 gimple_seq_add_stmt (&olist
, g
);
8618 block
= make_node (BLOCK
);
8619 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8620 gsi_replace (gsi_p
, new_stmt
, true);
8622 pop_gimplify_context (new_stmt
);
8623 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8624 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8625 if (BLOCK_VARS (block
))
8626 TREE_USED (block
) = 1;
8629 gimple_seq_add_seq (&new_body
, ilist
);
8630 gimple_seq_add_stmt (&new_body
, stmt
);
8631 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8632 gimple_seq_add_stmt (&new_body
, bind
);
8634 t
= gimple_build_omp_continue (control
, control
);
8635 gimple_seq_add_stmt (&new_body
, t
);
8637 gimple_seq_add_seq (&new_body
, olist
);
8638 if (ctx
->cancellable
)
8639 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8640 gimple_seq_add_seq (&new_body
, dlist
);
8642 new_body
= maybe_catch_exception (new_body
);
8644 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8645 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8646 t
= gimple_build_omp_return (nowait
);
8647 gimple_seq_add_stmt (&new_body
, t
);
8648 gimple_seq_add_seq (&new_body
, tred_dlist
);
8649 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8652 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8654 gimple_bind_set_body (new_stmt
, new_body
);
8658 /* A subroutine of lower_omp_single. Expand the simple form of
8659 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8661 if (GOMP_single_start ())
8663 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8665 FIXME. It may be better to delay expanding the logic of this until
8666 pass_expand_omp. The expanded logic may make the job more difficult
8667 to a synchronization analysis pass. */
8670 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8672 location_t loc
= gimple_location (single_stmt
);
8673 tree tlabel
= create_artificial_label (loc
);
8674 tree flabel
= create_artificial_label (loc
);
8675 gimple
*call
, *cond
;
8678 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8679 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8680 call
= gimple_build_call (decl
, 0);
8681 gimple_call_set_lhs (call
, lhs
);
8682 gimple_seq_add_stmt (pre_p
, call
);
8684 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8685 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8688 gimple_seq_add_stmt (pre_p
, cond
);
8689 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8690 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8691 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8695 /* A subroutine of lower_omp_single. Expand the simple form of
8696 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8698 #pragma omp single copyprivate (a, b, c)
8700 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8703 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8709 GOMP_single_copy_end (©out);
8720 FIXME. It may be better to delay expanding the logic of this until
8721 pass_expand_omp. The expanded logic may make the job more difficult
8722 to a synchronization analysis pass. */
8725 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8728 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8729 gimple_seq copyin_seq
;
8730 location_t loc
= gimple_location (single_stmt
);
8732 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8734 ptr_type
= build_pointer_type (ctx
->record_type
);
8735 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8737 l0
= create_artificial_label (loc
);
8738 l1
= create_artificial_label (loc
);
8739 l2
= create_artificial_label (loc
);
8741 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8742 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8743 t
= fold_convert_loc (loc
, ptr_type
, t
);
8744 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8746 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8747 build_int_cst (ptr_type
, 0));
8748 t
= build3 (COND_EXPR
, void_type_node
, t
,
8749 build_and_jump (&l0
), build_and_jump (&l1
));
8750 gimplify_and_add (t
, pre_p
);
8752 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8754 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8757 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8760 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8761 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8762 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8763 gimplify_and_add (t
, pre_p
);
8765 t
= build_and_jump (&l2
);
8766 gimplify_and_add (t
, pre_p
);
8768 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8770 gimple_seq_add_seq (pre_p
, copyin_seq
);
8772 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8776 /* Expand code for an OpenMP single directive. */
8779 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8782 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8784 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8786 push_gimplify_context ();
8788 block
= make_node (BLOCK
);
8789 bind
= gimple_build_bind (NULL
, NULL
, block
);
8790 gsi_replace (gsi_p
, bind
, true);
8793 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8794 &bind_body
, &dlist
, ctx
, NULL
);
8795 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8797 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8799 if (ctx
->record_type
)
8800 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8802 lower_omp_single_simple (single_stmt
, &bind_body
);
8804 gimple_omp_set_body (single_stmt
, NULL
);
8806 gimple_seq_add_seq (&bind_body
, dlist
);
8808 bind_body
= maybe_catch_exception (bind_body
);
8810 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8811 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8812 gimple
*g
= gimple_build_omp_return (nowait
);
8813 gimple_seq_add_stmt (&bind_body_tail
, g
);
8814 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8815 if (ctx
->record_type
)
8817 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8818 tree clobber
= build_clobber (ctx
->record_type
);
8819 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8820 clobber
), GSI_SAME_STMT
);
8822 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8823 gimple_bind_set_body (bind
, bind_body
);
8825 pop_gimplify_context (bind
);
8827 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8828 BLOCK_VARS (block
) = ctx
->block_vars
;
8829 if (BLOCK_VARS (block
))
8830 TREE_USED (block
) = 1;
8834 /* Lower code for an OMP scope directive. */
8837 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8840 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8842 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8843 gimple_seq tred_dlist
= NULL
;
8845 push_gimplify_context ();
8847 block
= make_node (BLOCK
);
8848 bind
= gimple_build_bind (NULL
, NULL
, block
);
8849 gsi_replace (gsi_p
, bind
, true);
8854 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8855 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8858 tree type
= build_pointer_type (pointer_sized_int_node
);
8859 tree temp
= create_tmp_var (type
);
8860 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8861 OMP_CLAUSE_DECL (c
) = temp
;
8862 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8863 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8864 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8865 gimple_omp_scope_clauses (scope_stmt
),
8866 &bind_body
, &tred_dlist
);
8868 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8869 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8870 gimple_seq_add_stmt (&bind_body
, stmt
);
8873 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8874 &bind_body
, &dlist
, ctx
, NULL
);
8875 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8877 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8879 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8881 gimple_omp_set_body (scope_stmt
, NULL
);
8883 gimple_seq clist
= NULL
;
8884 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8885 &bind_body
, &clist
, ctx
);
8888 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8889 gcall
*g
= gimple_build_call (fndecl
, 0);
8890 gimple_seq_add_stmt (&bind_body
, g
);
8891 gimple_seq_add_seq (&bind_body
, clist
);
8892 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8893 g
= gimple_build_call (fndecl
, 0);
8894 gimple_seq_add_stmt (&bind_body
, g
);
8897 gimple_seq_add_seq (&bind_body
, dlist
);
8899 bind_body
= maybe_catch_exception (bind_body
);
8901 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8902 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8903 gimple
*g
= gimple_build_omp_return (nowait
);
8904 gimple_seq_add_stmt (&bind_body_tail
, g
);
8905 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8906 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8907 if (ctx
->record_type
)
8909 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8910 tree clobber
= build_clobber (ctx
->record_type
);
8911 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8912 clobber
), GSI_SAME_STMT
);
8914 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8916 gimple_bind_set_body (bind
, bind_body
);
8918 pop_gimplify_context (bind
);
8920 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8921 BLOCK_VARS (block
) = ctx
->block_vars
;
8922 if (BLOCK_VARS (block
))
8923 TREE_USED (block
) = 1;
8925 /* Expand code for an OpenMP master or masked directive. */
8928 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8930 tree block
, lab
= NULL
, x
, bfn_decl
;
8931 gimple
*stmt
= gsi_stmt (*gsi_p
);
8933 location_t loc
= gimple_location (stmt
);
8935 tree filter
= integer_zero_node
;
8937 push_gimplify_context ();
8939 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
8941 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
8944 filter
= fold_convert (integer_type_node
,
8945 OMP_CLAUSE_FILTER_EXPR (filter
));
8947 filter
= integer_zero_node
;
8949 block
= make_node (BLOCK
);
8950 bind
= gimple_build_bind (NULL
, NULL
, block
);
8951 gsi_replace (gsi_p
, bind
, true);
8952 gimple_bind_add_stmt (bind
, stmt
);
8954 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8955 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8956 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
8957 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8959 gimplify_and_add (x
, &tseq
);
8960 gimple_bind_add_seq (bind
, tseq
);
8962 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8963 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8964 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8965 gimple_omp_set_body (stmt
, NULL
);
8967 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8969 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8971 pop_gimplify_context (bind
);
8973 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8974 BLOCK_VARS (block
) = ctx
->block_vars
;
8977 /* Helper function for lower_omp_task_reductions. For a specific PASS
8978 find out the current clause it should be processed, or return false
8979 if all have been processed already. */
8982 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8983 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8984 tree
*type
, tree
*next
)
8986 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8988 if (ccode
== OMP_CLAUSE_REDUCTION
8989 && code
!= OMP_TASKLOOP
8990 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8992 *decl
= OMP_CLAUSE_DECL (*c
);
8993 *type
= TREE_TYPE (*decl
);
8994 if (TREE_CODE (*decl
) == MEM_REF
)
9001 if (omp_privatize_by_reference (*decl
))
9002 *type
= TREE_TYPE (*type
);
9003 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
9006 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9015 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9016 OMP_TASKGROUP only with task modifier). Register mapping of those in
9017 START sequence and reducing them and unregister them in the END sequence. */
9020 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9021 gimple_seq
*start
, gimple_seq
*end
)
9023 enum omp_clause_code ccode
9024 = (code
== OMP_TASKGROUP
9025 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9026 tree cancellable
= NULL_TREE
;
9027 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9028 if (clauses
== NULL_TREE
)
9030 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9032 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9033 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9034 && outer
->cancellable
)
9036 cancellable
= error_mark_node
;
9039 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9040 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9043 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9044 tree
*last
= &TYPE_FIELDS (record_type
);
9048 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9050 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9053 DECL_CHAIN (field
) = ifield
;
9054 last
= &DECL_CHAIN (ifield
);
9055 DECL_CONTEXT (field
) = record_type
;
9056 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9057 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9058 DECL_CONTEXT (ifield
) = record_type
;
9059 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9060 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9062 for (int pass
= 0; pass
< 2; pass
++)
9064 tree decl
, type
, next
;
9065 for (tree c
= clauses
;
9066 omp_task_reduction_iterate (pass
, code
, ccode
,
9067 &c
, &decl
, &type
, &next
); c
= next
)
9070 tree new_type
= type
;
9072 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9074 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9075 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9077 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9079 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9080 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9081 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9084 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9085 DECL_CONTEXT (field
) = record_type
;
9086 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9087 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9089 last
= &DECL_CHAIN (field
);
9091 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9093 DECL_CONTEXT (bfield
) = record_type
;
9094 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9095 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9097 last
= &DECL_CHAIN (bfield
);
9101 layout_type (record_type
);
9103 /* Build up an array which registers with the runtime all the reductions
9104 and deregisters them at the end. Format documented in libgomp/task.c. */
9105 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9106 tree avar
= create_tmp_var_raw (atype
);
9107 gimple_add_tmp_var (avar
);
9108 TREE_ADDRESSABLE (avar
) = 1;
9109 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9110 NULL_TREE
, NULL_TREE
);
9111 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9112 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9113 gimple_seq seq
= NULL
;
9114 tree sz
= fold_convert (pointer_sized_int_node
,
9115 TYPE_SIZE_UNIT (record_type
));
9117 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9118 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9119 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9120 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9121 ctx
->task_reductions
.create (1 + cnt
);
9122 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9123 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9125 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9126 gimple_seq_add_seq (start
, seq
);
9127 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9128 NULL_TREE
, NULL_TREE
);
9129 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9130 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9131 NULL_TREE
, NULL_TREE
);
9132 t
= build_int_cst (pointer_sized_int_node
,
9133 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9134 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9135 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9136 NULL_TREE
, NULL_TREE
);
9137 t
= build_int_cst (pointer_sized_int_node
, -1);
9138 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9139 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9140 NULL_TREE
, NULL_TREE
);
9141 t
= build_int_cst (pointer_sized_int_node
, 0);
9142 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9144 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9145 and for each task reduction checks a bool right after the private variable
9146 within that thread's chunk; if the bool is clear, it hasn't been
9147 initialized and thus isn't going to be reduced nor destructed, otherwise
9148 reduce and destruct it. */
9149 tree idx
= create_tmp_var (size_type_node
);
9150 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9151 tree num_thr_sz
= create_tmp_var (size_type_node
);
9152 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9153 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9154 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9156 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9158 /* For worksharing constructs or scope, only perform it in the master
9159 thread, with the exception of cancelled implicit barriers - then only
9160 handle the current thread. */
9161 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9162 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9163 tree thr_num
= create_tmp_var (integer_type_node
);
9164 g
= gimple_build_call (t
, 0);
9165 gimple_call_set_lhs (g
, thr_num
);
9166 gimple_seq_add_stmt (end
, g
);
9170 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9171 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9172 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9173 if (code
== OMP_FOR
)
9174 c
= gimple_omp_for_clauses (ctx
->stmt
);
9175 else if (code
== OMP_SECTIONS
)
9176 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9177 else /* if (code == OMP_SCOPE) */
9178 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9179 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9181 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9183 gimple_seq_add_stmt (end
, g
);
9184 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9185 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9186 gimple_seq_add_stmt (end
, g
);
9187 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9188 build_one_cst (TREE_TYPE (idx
)));
9189 gimple_seq_add_stmt (end
, g
);
9190 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9191 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9193 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9194 gimple_seq_add_stmt (end
, g
);
9195 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9197 if (code
!= OMP_PARALLEL
)
9199 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9200 tree num_thr
= create_tmp_var (integer_type_node
);
9201 g
= gimple_build_call (t
, 0);
9202 gimple_call_set_lhs (g
, num_thr
);
9203 gimple_seq_add_stmt (end
, g
);
9204 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9205 gimple_seq_add_stmt (end
, g
);
9207 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9211 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9212 OMP_CLAUSE__REDUCTEMP_
);
9213 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9214 t
= fold_convert (size_type_node
, t
);
9215 gimplify_assign (num_thr_sz
, t
, end
);
9217 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9218 NULL_TREE
, NULL_TREE
);
9219 tree data
= create_tmp_var (pointer_sized_int_node
);
9220 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9221 if (code
== OMP_TASKLOOP
)
9223 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9224 g
= gimple_build_cond (NE_EXPR
, data
,
9225 build_zero_cst (pointer_sized_int_node
),
9227 gimple_seq_add_stmt (end
, g
);
9229 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9231 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9232 ptr
= create_tmp_var (build_pointer_type (record_type
));
9234 ptr
= create_tmp_var (ptr_type_node
);
9235 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9237 tree field
= TYPE_FIELDS (record_type
);
9240 field
= DECL_CHAIN (DECL_CHAIN (field
));
9241 for (int pass
= 0; pass
< 2; pass
++)
9243 tree decl
, type
, next
;
9244 for (tree c
= clauses
;
9245 omp_task_reduction_iterate (pass
, code
, ccode
,
9246 &c
, &decl
, &type
, &next
); c
= next
)
9248 tree var
= decl
, ref
;
9249 if (TREE_CODE (decl
) == MEM_REF
)
9251 var
= TREE_OPERAND (var
, 0);
9252 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9253 var
= TREE_OPERAND (var
, 0);
9255 if (TREE_CODE (var
) == ADDR_EXPR
)
9256 var
= TREE_OPERAND (var
, 0);
9257 else if (INDIRECT_REF_P (var
))
9258 var
= TREE_OPERAND (var
, 0);
9259 tree orig_var
= var
;
9260 if (is_variable_sized (var
))
9262 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9263 var
= DECL_VALUE_EXPR (var
);
9264 gcc_assert (INDIRECT_REF_P (var
));
9265 var
= TREE_OPERAND (var
, 0);
9266 gcc_assert (DECL_P (var
));
9268 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9269 if (orig_var
!= var
)
9270 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9271 else if (TREE_CODE (v
) == ADDR_EXPR
)
9272 t
= build_fold_addr_expr (t
);
9273 else if (INDIRECT_REF_P (v
))
9274 t
= build_fold_indirect_ref (t
);
9275 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9277 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9278 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9279 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9281 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9282 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9283 fold_convert (size_type_node
,
9284 TREE_OPERAND (decl
, 1)));
9288 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9289 if (!omp_privatize_by_reference (decl
))
9290 t
= build_fold_addr_expr (t
);
9292 t
= fold_convert (pointer_sized_int_node
, t
);
9294 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9295 gimple_seq_add_seq (start
, seq
);
9296 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9297 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9298 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9299 t
= unshare_expr (byte_position (field
));
9300 t
= fold_convert (pointer_sized_int_node
, t
);
9301 ctx
->task_reduction_map
->put (c
, cnt
);
9302 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9305 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9306 gimple_seq_add_seq (start
, seq
);
9307 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9308 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9309 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9311 tree bfield
= DECL_CHAIN (field
);
9313 if (code
== OMP_PARALLEL
9315 || code
== OMP_SECTIONS
9316 || code
== OMP_SCOPE
)
9317 /* In parallel, worksharing or scope all threads unconditionally
9318 initialize all their task reduction private variables. */
9319 cond
= boolean_true_node
;
9320 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9322 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9323 unshare_expr (byte_position (bfield
)));
9325 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9326 gimple_seq_add_seq (end
, seq
);
9327 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9328 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9329 build_int_cst (pbool
, 0));
9332 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9333 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9334 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9335 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9336 tree condv
= create_tmp_var (boolean_type_node
);
9337 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9338 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9340 gimple_seq_add_stmt (end
, g
);
9341 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9342 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9344 /* If this reduction doesn't need destruction and parallel
9345 has been cancelled, there is nothing to do for this
9346 reduction, so jump around the merge operation. */
9347 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9348 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9349 build_zero_cst (TREE_TYPE (cancellable
)),
9351 gimple_seq_add_stmt (end
, g
);
9352 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9356 if (TREE_TYPE (ptr
) == ptr_type_node
)
9358 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9359 unshare_expr (byte_position (field
)));
9361 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9362 gimple_seq_add_seq (end
, seq
);
9363 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9364 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9365 build_int_cst (pbool
, 0));
9368 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9369 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9371 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9372 if (TREE_CODE (decl
) != MEM_REF
9373 && omp_privatize_by_reference (decl
))
9374 ref
= build_simple_mem_ref (ref
);
9375 /* reduction(-:var) sums up the partial results, so it acts
9376 identically to reduction(+:var). */
9377 if (rcode
== MINUS_EXPR
)
9379 if (TREE_CODE (decl
) == MEM_REF
)
9381 tree type
= TREE_TYPE (new_var
);
9382 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9383 tree i
= create_tmp_var (TREE_TYPE (v
));
9384 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9387 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9388 tree vv
= create_tmp_var (TREE_TYPE (v
));
9389 gimplify_assign (vv
, v
, start
);
9392 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9393 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9394 new_var
= build_fold_addr_expr (new_var
);
9395 new_var
= fold_convert (ptype
, new_var
);
9396 ref
= fold_convert (ptype
, ref
);
9397 tree m
= create_tmp_var (ptype
);
9398 gimplify_assign (m
, new_var
, end
);
9400 m
= create_tmp_var (ptype
);
9401 gimplify_assign (m
, ref
, end
);
9403 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9404 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9405 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9406 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9407 tree priv
= build_simple_mem_ref (new_var
);
9408 tree out
= build_simple_mem_ref (ref
);
9409 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9411 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9412 tree decl_placeholder
9413 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9414 tree lab6
= NULL_TREE
;
9417 /* If this reduction needs destruction and parallel
9418 has been cancelled, jump around the merge operation
9419 to the destruction. */
9420 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9421 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9422 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9423 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9425 gimple_seq_add_stmt (end
, g
);
9426 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9428 SET_DECL_VALUE_EXPR (placeholder
, out
);
9429 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9430 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9431 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9432 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9433 gimple_seq_add_seq (end
,
9434 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9435 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9436 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9438 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9439 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9442 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9443 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9446 gimple_seq tseq
= NULL
;
9447 gimplify_stmt (&x
, &tseq
);
9448 gimple_seq_add_seq (end
, tseq
);
9453 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9454 out
= unshare_expr (out
);
9455 gimplify_assign (out
, x
, end
);
9458 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9459 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9460 gimple_seq_add_stmt (end
, g
);
9461 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9462 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9463 gimple_seq_add_stmt (end
, g
);
9464 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9465 build_int_cst (TREE_TYPE (i
), 1));
9466 gimple_seq_add_stmt (end
, g
);
9467 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9468 gimple_seq_add_stmt (end
, g
);
9469 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9471 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9473 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9474 tree oldv
= NULL_TREE
;
9475 tree lab6
= NULL_TREE
;
9478 /* If this reduction needs destruction and parallel
9479 has been cancelled, jump around the merge operation
9480 to the destruction. */
9481 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9482 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9483 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9484 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9486 gimple_seq_add_stmt (end
, g
);
9487 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9489 if (omp_privatize_by_reference (decl
)
9490 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9492 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9493 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9494 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9495 gimplify_assign (refv
, ref
, end
);
9496 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9497 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9498 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9499 tree d
= maybe_lookup_decl (decl
, ctx
);
9501 if (DECL_HAS_VALUE_EXPR_P (d
))
9502 oldv
= DECL_VALUE_EXPR (d
);
9503 if (omp_privatize_by_reference (var
))
9505 tree v
= fold_convert (TREE_TYPE (d
),
9506 build_fold_addr_expr (new_var
));
9507 SET_DECL_VALUE_EXPR (d
, v
);
9510 SET_DECL_VALUE_EXPR (d
, new_var
);
9511 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9512 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9514 SET_DECL_VALUE_EXPR (d
, oldv
);
9517 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9518 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9520 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9521 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9522 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9523 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9525 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9526 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9529 gimple_seq tseq
= NULL
;
9530 gimplify_stmt (&x
, &tseq
);
9531 gimple_seq_add_seq (end
, tseq
);
9536 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9537 ref
= unshare_expr (ref
);
9538 gimplify_assign (ref
, x
, end
);
9540 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9542 field
= DECL_CHAIN (bfield
);
9546 if (code
== OMP_TASKGROUP
)
9548 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9549 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9550 gimple_seq_add_stmt (start
, g
);
9555 if (code
== OMP_FOR
)
9556 c
= gimple_omp_for_clauses (ctx
->stmt
);
9557 else if (code
== OMP_SECTIONS
)
9558 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9559 else if (code
== OMP_SCOPE
)
9560 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9562 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9563 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9564 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9565 build_fold_addr_expr (avar
));
9566 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9569 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9570 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9572 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9573 gimple_seq_add_stmt (end
, g
);
9574 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9575 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9577 enum built_in_function bfn
9578 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9579 t
= builtin_decl_explicit (bfn
);
9580 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9584 arg
= create_tmp_var (c_bool_type
);
9585 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9589 arg
= build_int_cst (c_bool_type
, 0);
9590 g
= gimple_build_call (t
, 1, arg
);
9594 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9595 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9597 gimple_seq_add_stmt (end
, g
);
9599 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9600 t
= build_constructor (atype
, NULL
);
9601 TREE_THIS_VOLATILE (t
) = 1;
9602 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9605 /* Expand code for an OpenMP taskgroup directive. */
9608 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9610 gimple
*stmt
= gsi_stmt (*gsi_p
);
9613 gimple_seq dseq
= NULL
;
9614 tree block
= make_node (BLOCK
);
9616 bind
= gimple_build_bind (NULL
, NULL
, block
);
9617 gsi_replace (gsi_p
, bind
, true);
9618 gimple_bind_add_stmt (bind
, stmt
);
9620 push_gimplify_context ();
9622 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9624 gimple_bind_add_stmt (bind
, x
);
9626 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9627 gimple_omp_taskgroup_clauses (stmt
),
9628 gimple_bind_body_ptr (bind
), &dseq
);
9630 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9631 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9632 gimple_omp_set_body (stmt
, NULL
);
9634 gimple_bind_add_seq (bind
, dseq
);
9636 pop_gimplify_context (bind
);
9638 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9639 BLOCK_VARS (block
) = ctx
->block_vars
;
9643 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9646 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9649 struct omp_for_data fd
;
9650 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9653 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9654 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9655 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9659 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9660 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9661 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
9662 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
9664 /* Merge depend clauses from multiple adjacent
9665 #pragma omp ordered depend(sink:...) constructs
9666 into one #pragma omp ordered depend(sink:...), so that
9667 we can optimize them together. */
9668 gimple_stmt_iterator gsi
= *gsi_p
;
9670 while (!gsi_end_p (gsi
))
9672 gimple
*stmt
= gsi_stmt (gsi
);
9673 if (is_gimple_debug (stmt
)
9674 || gimple_code (stmt
) == GIMPLE_NOP
)
9679 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9681 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9682 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9684 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
9685 || OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9688 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9690 gsi_remove (&gsi
, true);
9694 /* Canonicalize sink dependence clauses into one folded clause if
9697 The basic algorithm is to create a sink vector whose first
9698 element is the GCD of all the first elements, and whose remaining
9699 elements are the minimum of the subsequent columns.
9701 We ignore dependence vectors whose first element is zero because
9702 such dependencies are known to be executed by the same thread.
9704 We take into account the direction of the loop, so a minimum
9705 becomes a maximum if the loop is iterating forwards. We also
9706 ignore sink clauses where the loop direction is unknown, or where
9707 the offsets are clearly invalid because they are not a multiple
9708 of the loop increment.
9712 #pragma omp for ordered(2)
9713 for (i=0; i < N; ++i)
9714 for (j=0; j < M; ++j)
9716 #pragma omp ordered \
9717 depend(sink:i-8,j-2) \
9718 depend(sink:i,j-1) \ // Completely ignored because i+0.
9719 depend(sink:i-4,j-3) \
9720 depend(sink:i-6,j-4)
9721 #pragma omp ordered depend(source)
9726 depend(sink:-gcd(8,4,6),-min(2,3,4))
9731 /* FIXME: Computing GCD's where the first element is zero is
9732 non-trivial in the presence of collapsed loops. Do this later. */
9733 if (fd
.collapse
> 1)
9736 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9738 /* wide_int is not a POD so it must be default-constructed. */
9739 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9740 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9742 tree folded_dep
= NULL_TREE
;
9743 /* TRUE if the first dimension's offset is negative. */
9744 bool neg_offset_p
= false;
9746 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9748 while ((c
= *list_p
) != NULL
)
9750 bool remove
= false;
9752 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
);
9753 if (OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9754 goto next_ordered_clause
;
9757 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9758 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9759 vec
= TREE_CHAIN (vec
), ++i
)
9761 gcc_assert (i
< len
);
9763 /* omp_extract_for_data has canonicalized the condition. */
9764 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9765 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9766 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9767 bool maybe_lexically_later
= true;
9769 /* While the committee makes up its mind, bail if we have any
9770 non-constant steps. */
9771 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9772 goto lower_omp_ordered_ret
;
9774 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9775 if (POINTER_TYPE_P (itype
))
9777 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9778 TYPE_PRECISION (itype
),
9781 /* Ignore invalid offsets that are not multiples of the step. */
9782 if (!wi::multiple_of_p (wi::abs (offset
),
9783 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9786 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9787 "ignoring sink clause with offset that is not "
9788 "a multiple of the loop step");
9790 goto next_ordered_clause
;
9793 /* Calculate the first dimension. The first dimension of
9794 the folded dependency vector is the GCD of the first
9795 elements, while ignoring any first elements whose offset
9799 /* Ignore dependence vectors whose first dimension is 0. */
9803 goto next_ordered_clause
;
9807 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9809 error_at (OMP_CLAUSE_LOCATION (c
),
9810 "first offset must be in opposite direction "
9811 "of loop iterations");
9812 goto lower_omp_ordered_ret
;
9816 neg_offset_p
= forward
;
9817 /* Initialize the first time around. */
9818 if (folded_dep
== NULL_TREE
)
9821 folded_deps
[0] = offset
;
9824 folded_deps
[0] = wi::gcd (folded_deps
[0],
9828 /* Calculate minimum for the remaining dimensions. */
9831 folded_deps
[len
+ i
- 1] = offset
;
9832 if (folded_dep
== c
)
9833 folded_deps
[i
] = offset
;
9834 else if (maybe_lexically_later
9835 && !wi::eq_p (folded_deps
[i
], offset
))
9837 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9841 for (j
= 1; j
<= i
; j
++)
9842 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9845 maybe_lexically_later
= false;
9849 gcc_assert (i
== len
);
9853 next_ordered_clause
:
9855 *list_p
= OMP_CLAUSE_CHAIN (c
);
9857 list_p
= &OMP_CLAUSE_CHAIN (c
);
9863 folded_deps
[0] = -folded_deps
[0];
9865 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9866 if (POINTER_TYPE_P (itype
))
9869 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9870 = wide_int_to_tree (itype
, folded_deps
[0]);
9871 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9872 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9875 lower_omp_ordered_ret
:
9877 /* Ordered without clauses is #pragma omp threads, while we want
9878 a nop instead if we remove all clauses. */
9879 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9880 gsi_replace (gsi_p
, gimple_build_nop (), true);
9884 /* Expand code for an OpenMP ordered directive. */
9887 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9890 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9891 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9894 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9896 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9899 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9900 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9901 OMP_CLAUSE_THREADS
);
9903 if (gimple_omp_ordered_standalone_p (ord_stmt
))
9905 /* FIXME: This is needs to be moved to the expansion to verify various
9906 conditions only testable on cfg with dominators computed, and also
9907 all the depend clauses to be merged still might need to be available
9908 for the runtime checks. */
9910 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9914 push_gimplify_context ();
9916 block
= make_node (BLOCK
);
9917 bind
= gimple_build_bind (NULL
, NULL
, block
);
9918 gsi_replace (gsi_p
, bind
, true);
9919 gimple_bind_add_stmt (bind
, stmt
);
9923 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9924 build_int_cst (NULL_TREE
, threads
));
9925 cfun
->has_simduid_loops
= true;
9928 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9930 gimple_bind_add_stmt (bind
, x
);
9932 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9935 counter
= create_tmp_var (integer_type_node
);
9936 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9937 gimple_call_set_lhs (g
, counter
);
9938 gimple_bind_add_stmt (bind
, g
);
9940 body
= create_artificial_label (UNKNOWN_LOCATION
);
9941 test
= create_artificial_label (UNKNOWN_LOCATION
);
9942 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9944 tree simt_pred
= create_tmp_var (integer_type_node
);
9945 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9946 gimple_call_set_lhs (g
, simt_pred
);
9947 gimple_bind_add_stmt (bind
, g
);
9949 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9950 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9951 gimple_bind_add_stmt (bind
, g
);
9953 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9955 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9956 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9957 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9958 gimple_omp_set_body (stmt
, NULL
);
9962 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9963 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9964 gimple_bind_add_stmt (bind
, g
);
9966 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9967 tree nonneg
= create_tmp_var (integer_type_node
);
9968 gimple_seq tseq
= NULL
;
9969 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9970 gimple_bind_add_seq (bind
, tseq
);
9972 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9973 gimple_call_set_lhs (g
, nonneg
);
9974 gimple_bind_add_stmt (bind
, g
);
9976 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9977 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9978 gimple_bind_add_stmt (bind
, g
);
9980 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9983 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9984 build_int_cst (NULL_TREE
, threads
));
9986 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9988 gimple_bind_add_stmt (bind
, x
);
9990 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9992 pop_gimplify_context (bind
);
9994 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9995 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9999 /* Expand code for an OpenMP scan directive and the structured block
10000 before the scan directive. */
10003 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10005 gimple
*stmt
= gsi_stmt (*gsi_p
);
10007 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
10008 tree lane
= NULL_TREE
;
10009 gimple_seq before
= NULL
;
10010 omp_context
*octx
= ctx
->outer
;
10012 if (octx
->scan_exclusive
&& !has_clauses
)
10014 gimple_stmt_iterator gsi2
= *gsi_p
;
10016 gimple
*stmt2
= gsi_stmt (gsi2
);
10017 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10018 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10019 the one with exclusive clause(s), comes first. */
10021 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10022 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10024 gsi_remove (gsi_p
, false);
10025 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10026 ctx
= maybe_lookup_ctx (stmt2
);
10028 lower_omp_scan (gsi_p
, ctx
);
10033 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10034 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10035 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10036 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10037 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10038 && !gimple_omp_for_combined_p (octx
->stmt
));
10039 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10040 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10043 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10044 OMP_CLAUSE__SIMDUID_
))
10046 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10047 lane
= create_tmp_var (unsigned_type_node
);
10048 tree t
= build_int_cst (integer_type_node
,
10050 : octx
->scan_inclusive
? 2 : 3);
10052 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10053 gimple_call_set_lhs (g
, lane
);
10054 gimple_seq_add_stmt (&before
, g
);
10057 if (is_simd
|| is_for
)
10059 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10060 c
; c
= OMP_CLAUSE_CHAIN (c
))
10061 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10062 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10064 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10065 tree var
= OMP_CLAUSE_DECL (c
);
10066 tree new_var
= lookup_decl (var
, octx
);
10067 tree val
= new_var
;
10068 tree var2
= NULL_TREE
;
10069 tree var3
= NULL_TREE
;
10070 tree var4
= NULL_TREE
;
10071 tree lane0
= NULL_TREE
;
10072 tree new_vard
= new_var
;
10073 if (omp_privatize_by_reference (var
))
10075 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10078 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10080 val
= DECL_VALUE_EXPR (new_vard
);
10081 if (new_vard
!= new_var
)
10083 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10084 val
= TREE_OPERAND (val
, 0);
10086 if (TREE_CODE (val
) == ARRAY_REF
10087 && VAR_P (TREE_OPERAND (val
, 0)))
10089 tree v
= TREE_OPERAND (val
, 0);
10090 if (lookup_attribute ("omp simd array",
10091 DECL_ATTRIBUTES (v
)))
10093 val
= unshare_expr (val
);
10094 lane0
= TREE_OPERAND (val
, 1);
10095 TREE_OPERAND (val
, 1) = lane
;
10096 var2
= lookup_decl (v
, octx
);
10097 if (octx
->scan_exclusive
)
10098 var4
= lookup_decl (var2
, octx
);
10100 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10101 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10104 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10105 var2
, lane
, NULL_TREE
, NULL_TREE
);
10106 TREE_THIS_NOTRAP (var2
) = 1;
10107 if (octx
->scan_exclusive
)
10109 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10110 var4
, lane
, NULL_TREE
,
10112 TREE_THIS_NOTRAP (var4
) = 1;
10123 var2
= build_outer_var_ref (var
, octx
);
10124 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10126 var3
= maybe_lookup_decl (new_vard
, octx
);
10127 if (var3
== new_vard
|| var3
== NULL_TREE
)
10129 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10131 var4
= maybe_lookup_decl (var3
, octx
);
10132 if (var4
== var3
|| var4
== NULL_TREE
)
10134 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10145 && octx
->scan_exclusive
10147 && var4
== NULL_TREE
)
10148 var4
= create_tmp_var (TREE_TYPE (val
));
10150 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10152 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10157 /* If we've added a separate identity element
10158 variable, copy it over into val. */
10159 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10161 gimplify_and_add (x
, &before
);
10163 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10165 /* Otherwise, assign to it the identity element. */
10166 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10168 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10169 tree ref
= build_outer_var_ref (var
, octx
);
10170 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10171 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10174 if (new_vard
!= new_var
)
10175 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10176 SET_DECL_VALUE_EXPR (new_vard
, val
);
10178 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10179 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10180 lower_omp (&tseq
, octx
);
10182 SET_DECL_VALUE_EXPR (new_vard
, x
);
10183 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10184 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10185 gimple_seq_add_seq (&before
, tseq
);
10187 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10193 if (octx
->scan_exclusive
)
10195 tree v4
= unshare_expr (var4
);
10196 tree v2
= unshare_expr (var2
);
10197 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10198 gimplify_and_add (x
, &before
);
10200 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10201 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10202 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10204 if (x
&& new_vard
!= new_var
)
10205 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10207 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10208 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10209 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10210 lower_omp (&tseq
, octx
);
10211 gimple_seq_add_seq (&before
, tseq
);
10212 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10214 SET_DECL_VALUE_EXPR (new_vard
, x
);
10215 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10216 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10217 if (octx
->scan_inclusive
)
10219 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10221 gimplify_and_add (x
, &before
);
10223 else if (lane0
== NULL_TREE
)
10225 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10227 gimplify_and_add (x
, &before
);
10235 /* input phase. Set val to initializer before
10237 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10238 gimplify_assign (val
, x
, &before
);
10243 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10244 if (code
== MINUS_EXPR
)
10247 tree x
= build2 (code
, TREE_TYPE (var2
),
10248 unshare_expr (var2
), unshare_expr (val
));
10249 if (octx
->scan_inclusive
)
10251 gimplify_assign (unshare_expr (var2
), x
, &before
);
10252 gimplify_assign (val
, var2
, &before
);
10256 gimplify_assign (unshare_expr (var4
),
10257 unshare_expr (var2
), &before
);
10258 gimplify_assign (var2
, x
, &before
);
10259 if (lane0
== NULL_TREE
)
10260 gimplify_assign (val
, var4
, &before
);
10264 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10266 tree vexpr
= unshare_expr (var4
);
10267 TREE_OPERAND (vexpr
, 1) = lane0
;
10268 if (new_vard
!= new_var
)
10269 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10270 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10274 if (is_simd
&& !is_for_simd
)
10276 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10277 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10278 gsi_replace (gsi_p
, gimple_build_nop (), true);
10281 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10284 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (stmt
));
10285 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10290 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10291 substitution of a couple of function calls. But in the NAMED case,
10292 requires that languages coordinate a symbol name. It is therefore
10293 best put here in common code. */
10295 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10298 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10301 tree name
, lock
, unlock
;
10302 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10304 location_t loc
= gimple_location (stmt
);
10307 name
= gimple_omp_critical_name (stmt
);
10312 if (!critical_name_mutexes
)
10313 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10315 tree
*n
= critical_name_mutexes
->get (name
);
10320 decl
= create_tmp_var_raw (ptr_type_node
);
10322 new_str
= ACONCAT ((".gomp_critical_user_",
10323 IDENTIFIER_POINTER (name
), NULL
));
10324 DECL_NAME (decl
) = get_identifier (new_str
);
10325 TREE_PUBLIC (decl
) = 1;
10326 TREE_STATIC (decl
) = 1;
10327 DECL_COMMON (decl
) = 1;
10328 DECL_ARTIFICIAL (decl
) = 1;
10329 DECL_IGNORED_P (decl
) = 1;
10331 varpool_node::finalize_decl (decl
);
10333 critical_name_mutexes
->put (name
, decl
);
10338 /* If '#pragma omp critical' is inside offloaded region or
10339 inside function marked as offloadable, the symbol must be
10340 marked as offloadable too. */
10342 if (cgraph_node::get (current_function_decl
)->offloadable
)
10343 varpool_node::get_create (decl
)->offloadable
= 1;
10345 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10346 if (is_gimple_omp_offloaded (octx
->stmt
))
10348 varpool_node::get_create (decl
)->offloadable
= 1;
10352 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10353 lock
= build_call_expr_loc (loc
, lock
, 1,
10354 build_fold_addr_expr_loc (loc
, decl
));
10356 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10357 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10358 build_fold_addr_expr_loc (loc
, decl
));
10362 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10363 lock
= build_call_expr_loc (loc
, lock
, 0);
10365 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10366 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10369 push_gimplify_context ();
10371 block
= make_node (BLOCK
);
10372 bind
= gimple_build_bind (NULL
, NULL
, block
);
10373 gsi_replace (gsi_p
, bind
, true);
10374 gimple_bind_add_stmt (bind
, stmt
);
10376 tbody
= gimple_bind_body (bind
);
10377 gimplify_and_add (lock
, &tbody
);
10378 gimple_bind_set_body (bind
, tbody
);
10380 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10381 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10382 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10383 gimple_omp_set_body (stmt
, NULL
);
10385 tbody
= gimple_bind_body (bind
);
10386 gimplify_and_add (unlock
, &tbody
);
10387 gimple_bind_set_body (bind
, tbody
);
10389 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10391 pop_gimplify_context (bind
);
10392 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10393 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10396 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10397 for a lastprivate clause. Given a loop control predicate of (V
10398 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10399 is appended to *DLIST, iterator initialization is appended to
10400 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10401 to be emitted in a critical section. */
10404 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10405 gimple_seq
*dlist
, gimple_seq
*clist
,
10406 struct omp_context
*ctx
)
10408 tree clauses
, cond
, vinit
;
10409 enum tree_code cond_code
;
10412 cond_code
= fd
->loop
.cond_code
;
10413 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10415 /* When possible, use a strict equality expression. This can let VRP
10416 type optimizations deduce the value and remove a copy. */
10417 if (tree_fits_shwi_p (fd
->loop
.step
))
10419 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10420 if (step
== 1 || step
== -1)
10421 cond_code
= EQ_EXPR
;
10424 tree n2
= fd
->loop
.n2
;
10425 if (fd
->collapse
> 1
10426 && TREE_CODE (n2
) != INTEGER_CST
10427 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10429 struct omp_context
*taskreg_ctx
= NULL
;
10430 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10432 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10433 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10434 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10436 if (gimple_omp_for_combined_into_p (gfor
))
10438 gcc_assert (ctx
->outer
->outer
10439 && is_parallel_ctx (ctx
->outer
->outer
));
10440 taskreg_ctx
= ctx
->outer
->outer
;
10444 struct omp_for_data outer_fd
;
10445 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10446 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10449 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10450 taskreg_ctx
= ctx
->outer
->outer
;
10452 else if (is_taskreg_ctx (ctx
->outer
))
10453 taskreg_ctx
= ctx
->outer
;
10457 tree taskreg_clauses
10458 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10459 tree innerc
= omp_find_clause (taskreg_clauses
,
10460 OMP_CLAUSE__LOOPTEMP_
);
10461 gcc_assert (innerc
);
10462 int count
= fd
->collapse
;
10464 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10465 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10466 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10468 for (i
= 0; i
< count
; i
++)
10470 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10471 OMP_CLAUSE__LOOPTEMP_
);
10472 gcc_assert (innerc
);
10474 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10475 OMP_CLAUSE__LOOPTEMP_
);
10477 n2
= fold_convert (TREE_TYPE (n2
),
10478 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10482 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10484 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10486 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10487 if (!gimple_seq_empty_p (stmts
))
10489 gimple_seq_add_seq (&stmts
, *dlist
);
10492 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10493 vinit
= fd
->loop
.n1
;
10494 if (cond_code
== EQ_EXPR
10495 && tree_fits_shwi_p (fd
->loop
.n2
)
10496 && ! integer_zerop (fd
->loop
.n2
))
10497 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10499 vinit
= unshare_expr (vinit
);
10501 /* Initialize the iterator variable, so that threads that don't execute
10502 any iterations don't execute the lastprivate clauses by accident. */
10503 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10507 /* OpenACC privatization.
10509 Or, in other words, *sharing* at the respective OpenACC level of
10512 From a correctness perspective, a non-addressable variable can't be accessed
10513 outside the current thread, so it can go in a (faster than shared memory)
10514 register -- though that register may need to be broadcast in some
10515 circumstances. A variable can only meaningfully be "shared" across workers
10516 or vector lanes if its address is taken, e.g. by a call to an atomic
10519 From an optimisation perspective, the answer might be fuzzier: maybe
10520 sometimes, using shared memory directly would be faster than
10524 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10525 const location_t loc
, const tree c
,
10528 const dump_user_location_t d_u_loc
10529 = dump_user_location_t::from_location_t (loc
);
10530 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10532 # pragma GCC diagnostic push
10533 # pragma GCC diagnostic ignored "-Wformat"
10535 dump_printf_loc (l_dump_flags
, d_u_loc
,
10536 "variable %<%T%> ", decl
);
10538 # pragma GCC diagnostic pop
10541 dump_printf (l_dump_flags
,
10543 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10545 dump_printf (l_dump_flags
,
10546 "declared in block ");
10550 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10553 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10555 /* There is some differentiation depending on block vs. clause. */
10560 if (res
&& !VAR_P (decl
))
10562 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10563 privatized into a new VAR_DECL. */
10564 gcc_checking_assert (TREE_CODE (decl
) != PARM_DECL
);
10568 if (dump_enabled_p ())
10570 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10571 dump_printf (l_dump_flags
,
10572 "potentially has improper OpenACC privatization level: %qs\n",
10573 get_tree_code_name (TREE_CODE (decl
)));
10577 if (res
&& block
&& TREE_STATIC (decl
))
10581 if (dump_enabled_p ())
10583 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10584 dump_printf (l_dump_flags
,
10585 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10590 if (res
&& block
&& DECL_EXTERNAL (decl
))
10594 if (dump_enabled_p ())
10596 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10597 dump_printf (l_dump_flags
,
10598 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10603 if (res
&& !TREE_ADDRESSABLE (decl
))
10607 if (dump_enabled_p ())
10609 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10610 dump_printf (l_dump_flags
,
10611 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10612 "not addressable");
10616 /* If an artificial variable has been added to a bind, e.g.
10617 a compiler-generated temporary structure used by the Fortran front-end, do
10618 not consider it as a privatization candidate. Note that variables on
10619 the stack are private per-thread by default: making them "gang-private"
10620 for OpenACC actually means to share a single instance of a variable
10621 amongst all workers and threads spawned within each gang.
10622 At present, no compiler-generated artificial variables require such
10623 sharing semantics, so this is safe. */
10625 if (res
&& block
&& DECL_ARTIFICIAL (decl
))
10629 if (dump_enabled_p ())
10631 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10632 dump_printf (l_dump_flags
,
10633 "isn%'t candidate for adjusting OpenACC privatization "
10634 "level: %s\n", "artificial");
10640 if (dump_enabled_p ())
10642 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10643 dump_printf (l_dump_flags
,
10644 "is candidate for adjusting OpenACC privatization level\n");
10648 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10650 print_generic_decl (dump_file
, decl
, dump_flags
);
10651 fprintf (dump_file
, "\n");
10657 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10661 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10663 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10664 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10666 tree decl
= OMP_CLAUSE_DECL (c
);
10668 tree new_decl
= lookup_decl (decl
, ctx
);
10670 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
,
10674 gcc_checking_assert
10675 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10676 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10680 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10684 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10686 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10688 tree new_decl
= lookup_decl (decl
, ctx
);
10689 gcc_checking_assert (new_decl
== decl
);
10691 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
,
10695 gcc_checking_assert
10696 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10697 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10701 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10704 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10705 struct walk_stmt_info
*wi
)
10707 gimple
*stmt
= gsi_stmt (*gsi_p
);
10709 *handled_ops_p
= true;
10710 switch (gimple_code (stmt
))
10714 case GIMPLE_OMP_FOR
:
10715 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10716 && gimple_omp_for_combined_into_p (stmt
))
10717 *handled_ops_p
= false;
10720 case GIMPLE_OMP_SCAN
:
10721 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10722 return integer_zero_node
;
10729 /* Helper function for lower_omp_for, add transformations for a worksharing
10730 loop with scan directives inside of it.
10731 For worksharing loop not combined with simd, transform:
10732 #pragma omp for reduction(inscan,+:r) private(i)
10733 for (i = 0; i < n; i = i + 1)
10738 #pragma omp scan inclusive(r)
10744 into two worksharing loops + code to merge results:
10746 num_threads = omp_get_num_threads ();
10747 thread_num = omp_get_thread_num ();
10748 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10753 // For UDRs this is UDR init, or if ctors are needed, copy from
10754 // var3 that has been constructed to contain the neutral element.
10758 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10759 // a shared array with num_threads elements and rprivb to a local array
10760 // number of elements equal to the number of (contiguous) iterations the
10761 // current thread will perform. controlb and controlp variables are
10762 // temporaries to handle deallocation of rprivb at the end of second
10764 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10765 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10766 for (i = 0; i < n; i = i + 1)
10769 // For UDRs this is UDR init or copy from var3.
10771 // This is the input phase from user code.
10775 // For UDRs this is UDR merge.
10777 // Rather than handing it over to the user, save to local thread's
10779 rprivb[ivar] = var2;
10780 // For exclusive scan, the above two statements are swapped.
10784 // And remember the final value from this thread's into the shared
10786 rpriva[(sizetype) thread_num] = var2;
10787 // If more than one thread, compute using Work-Efficient prefix sum
10788 // the inclusive parallel scan of the rpriva array.
10789 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10794 num_threadsu = (unsigned int) num_threads;
10795 thread_numup1 = (unsigned int) thread_num + 1;
10798 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10802 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10807 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10808 mul = REALPART_EXPR <cplx>;
10809 ovf = IMAGPART_EXPR <cplx>;
10810 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10813 andvm1 = andv + 4294967295;
10815 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10817 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10818 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10819 rpriva[l] = rpriva[l - k] + rpriva[l];
10821 if (down == 0) goto <D.2121>; else goto <D.2122>;
10829 if (k != 0) goto <D.2108>; else goto <D.2103>;
10831 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10833 // For UDRs this is UDR init or copy from var3.
10837 var2 = rpriva[thread_num - 1];
10840 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10841 reduction(inscan,+:r) private(i)
10842 for (i = 0; i < n; i = i + 1)
10845 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10846 r = var2 + rprivb[ivar];
10849 // This is the scan phase from user code.
10851 // Plus a bump of the iterator.
10857 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10858 struct omp_for_data
*fd
, omp_context
*ctx
)
10860 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10861 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10863 gimple_seq body
= gimple_omp_body (stmt
);
10864 gimple_stmt_iterator input1_gsi
= gsi_none ();
10865 struct walk_stmt_info wi
;
10866 memset (&wi
, 0, sizeof (wi
));
10867 wi
.val_only
= true;
10868 wi
.info
= (void *) &input1_gsi
;
10869 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10870 gcc_assert (!gsi_end_p (input1_gsi
));
10872 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10873 gimple_stmt_iterator gsi
= input1_gsi
;
10875 gimple_stmt_iterator scan1_gsi
= gsi
;
10876 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10877 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10879 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10880 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10881 gimple_omp_set_body (input_stmt1
, NULL
);
10882 gimple_omp_set_body (scan_stmt1
, NULL
);
10883 gimple_omp_set_body (stmt
, NULL
);
10885 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10886 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10887 gimple_omp_set_body (stmt
, body
);
10888 gimple_omp_set_body (input_stmt1
, input_body
);
10890 gimple_stmt_iterator input2_gsi
= gsi_none ();
10891 memset (&wi
, 0, sizeof (wi
));
10892 wi
.val_only
= true;
10893 wi
.info
= (void *) &input2_gsi
;
10894 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10895 gcc_assert (!gsi_end_p (input2_gsi
));
10897 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10900 gimple_stmt_iterator scan2_gsi
= gsi
;
10901 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10902 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10903 gimple_omp_set_body (scan_stmt2
, scan_body
);
10905 gimple_stmt_iterator input3_gsi
= gsi_none ();
10906 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10907 gimple_stmt_iterator input4_gsi
= gsi_none ();
10908 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10909 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10910 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10911 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10914 memset (&wi
, 0, sizeof (wi
));
10915 wi
.val_only
= true;
10916 wi
.info
= (void *) &input3_gsi
;
10917 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10918 gcc_assert (!gsi_end_p (input3_gsi
));
10920 input_stmt3
= gsi_stmt (input3_gsi
);
10924 scan_stmt3
= gsi_stmt (gsi
);
10925 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10927 memset (&wi
, 0, sizeof (wi
));
10928 wi
.val_only
= true;
10929 wi
.info
= (void *) &input4_gsi
;
10930 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10931 gcc_assert (!gsi_end_p (input4_gsi
));
10933 input_stmt4
= gsi_stmt (input4_gsi
);
10937 scan_stmt4
= gsi_stmt (gsi
);
10938 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10940 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10941 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
10944 tree num_threads
= create_tmp_var (integer_type_node
);
10945 tree thread_num
= create_tmp_var (integer_type_node
);
10946 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
10947 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
10948 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
10949 gimple_call_set_lhs (g
, num_threads
);
10950 gimple_seq_add_stmt (body_p
, g
);
10951 g
= gimple_build_call (threadnum_decl
, 0);
10952 gimple_call_set_lhs (g
, thread_num
);
10953 gimple_seq_add_stmt (body_p
, g
);
10955 tree ivar
= create_tmp_var (sizetype
);
10956 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
10957 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
10958 tree k
= create_tmp_var (unsigned_type_node
);
10959 tree l
= create_tmp_var (unsigned_type_node
);
10961 gimple_seq clist
= NULL
, mdlist
= NULL
;
10962 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
10963 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
10964 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
10965 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
10966 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10967 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10968 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10970 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10971 tree var
= OMP_CLAUSE_DECL (c
);
10972 tree new_var
= lookup_decl (var
, ctx
);
10973 tree var3
= NULL_TREE
;
10974 tree new_vard
= new_var
;
10975 if (omp_privatize_by_reference (var
))
10976 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10977 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10979 var3
= maybe_lookup_decl (new_vard
, ctx
);
10980 if (var3
== new_vard
)
10984 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
10985 tree rpriva
= create_tmp_var (ptype
);
10986 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10987 OMP_CLAUSE_DECL (nc
) = rpriva
;
10989 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10991 tree rprivb
= create_tmp_var (ptype
);
10992 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10993 OMP_CLAUSE_DECL (nc
) = rprivb
;
10994 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
10996 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10998 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
10999 if (new_vard
!= new_var
)
11000 TREE_ADDRESSABLE (var2
) = 1;
11001 gimple_add_tmp_var (var2
);
11003 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
11004 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11005 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11006 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11007 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11009 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
11010 thread_num
, integer_minus_one_node
);
11011 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11012 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11013 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11014 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11015 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11017 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
11018 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11019 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11020 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11021 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11023 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
11024 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11025 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11026 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11027 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11028 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11030 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
11031 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11032 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
11033 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11035 tree var4
= is_for_simd
? new_var
: var2
;
11036 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
11039 var5
= lookup_decl (var
, input_simd_ctx
);
11040 var6
= lookup_decl (var
, scan_simd_ctx
);
11041 if (new_vard
!= new_var
)
11043 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11044 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11047 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11049 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11052 x
= lang_hooks
.decls
.omp_clause_default_ctor
11053 (c
, var2
, build_outer_var_ref (var
, ctx
));
11055 gimplify_and_add (x
, &clist
);
11057 x
= build_outer_var_ref (var
, ctx
);
11058 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11060 gimplify_and_add (x
, &thr01_list
);
11062 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11063 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11066 x
= unshare_expr (var4
);
11067 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11068 gimplify_and_add (x
, &thrn1_list
);
11069 x
= unshare_expr (var4
);
11070 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11071 gimplify_and_add (x
, &thr02_list
);
11073 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11075 /* Otherwise, assign to it the identity element. */
11076 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11077 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11080 if (new_vard
!= new_var
)
11081 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11082 SET_DECL_VALUE_EXPR (new_vard
, val
);
11083 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11085 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11086 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11087 lower_omp (&tseq
, ctx
);
11088 gimple_seq_add_seq (&thrn1_list
, tseq
);
11089 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11090 lower_omp (&tseq
, ctx
);
11091 gimple_seq_add_seq (&thr02_list
, tseq
);
11092 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11093 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11094 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11096 SET_DECL_VALUE_EXPR (new_vard
, y
);
11099 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11100 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11104 x
= unshare_expr (var4
);
11105 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11106 gimplify_and_add (x
, &thrn2_list
);
11110 x
= unshare_expr (rprivb_ref
);
11111 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11112 gimplify_and_add (x
, &scan1_list
);
11116 if (ctx
->scan_exclusive
)
11118 x
= unshare_expr (rprivb_ref
);
11119 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11120 gimplify_and_add (x
, &scan1_list
);
11123 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11124 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11125 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11126 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11127 lower_omp (&tseq
, ctx
);
11128 gimple_seq_add_seq (&scan1_list
, tseq
);
11130 if (ctx
->scan_inclusive
)
11132 x
= unshare_expr (rprivb_ref
);
11133 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11134 gimplify_and_add (x
, &scan1_list
);
11138 x
= unshare_expr (rpriva_ref
);
11139 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11140 unshare_expr (var4
));
11141 gimplify_and_add (x
, &mdlist
);
11143 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11144 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11145 gimplify_and_add (x
, &input2_list
);
11148 if (new_vard
!= new_var
)
11149 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11151 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11152 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11153 SET_DECL_VALUE_EXPR (new_vard
, val
);
11154 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11157 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11158 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11161 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11162 lower_omp (&tseq
, ctx
);
11164 SET_DECL_VALUE_EXPR (new_vard
, y
);
11167 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11168 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11172 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11173 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11174 lower_omp (&tseq
, ctx
);
11176 gimple_seq_add_seq (&input2_list
, tseq
);
11178 x
= build_outer_var_ref (var
, ctx
);
11179 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11180 gimplify_and_add (x
, &last_list
);
11182 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11183 gimplify_and_add (x
, &reduc_list
);
11184 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11185 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11187 if (new_vard
!= new_var
)
11188 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11189 SET_DECL_VALUE_EXPR (new_vard
, val
);
11190 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11191 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11192 lower_omp (&tseq
, ctx
);
11193 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11194 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11195 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11197 SET_DECL_VALUE_EXPR (new_vard
, y
);
11200 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11201 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11203 gimple_seq_add_seq (&reduc_list
, tseq
);
11204 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11205 gimplify_and_add (x
, &reduc_list
);
11207 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11209 gimplify_and_add (x
, dlist
);
11213 x
= build_outer_var_ref (var
, ctx
);
11214 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11216 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11217 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11219 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11221 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11223 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11224 if (code
== MINUS_EXPR
)
11228 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11231 if (ctx
->scan_exclusive
)
11232 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11234 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11235 gimplify_assign (var2
, x
, &scan1_list
);
11236 if (ctx
->scan_inclusive
)
11237 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11241 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11244 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11245 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11247 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11250 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11251 unshare_expr (rprival_ref
));
11252 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11256 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11257 gimple_seq_add_stmt (&scan1_list
, g
);
11258 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11259 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11260 ? scan_stmt4
: scan_stmt2
), g
);
11262 tree controlb
= create_tmp_var (boolean_type_node
);
11263 tree controlp
= create_tmp_var (ptr_type_node
);
11264 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11265 OMP_CLAUSE_DECL (nc
) = controlb
;
11266 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11268 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11269 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11270 OMP_CLAUSE_DECL (nc
) = controlp
;
11271 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11273 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11274 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11275 OMP_CLAUSE_DECL (nc
) = controlb
;
11276 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11278 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11279 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11280 OMP_CLAUSE_DECL (nc
) = controlp
;
11281 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11283 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11285 *cp1
= gimple_omp_for_clauses (stmt
);
11286 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11287 *cp2
= gimple_omp_for_clauses (new_stmt
);
11288 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11292 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11293 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11295 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11297 gsi_remove (&input3_gsi
, true);
11298 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11300 gsi_remove (&scan3_gsi
, true);
11301 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11303 gsi_remove (&input4_gsi
, true);
11304 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11306 gsi_remove (&scan4_gsi
, true);
11310 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11311 gimple_omp_set_body (input_stmt2
, input2_list
);
11314 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11316 gsi_remove (&input1_gsi
, true);
11317 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11319 gsi_remove (&scan1_gsi
, true);
11320 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11322 gsi_remove (&input2_gsi
, true);
11323 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11325 gsi_remove (&scan2_gsi
, true);
11327 gimple_seq_add_seq (body_p
, clist
);
11329 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11330 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11331 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11332 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11333 gimple_seq_add_stmt (body_p
, g
);
11334 g
= gimple_build_label (lab1
);
11335 gimple_seq_add_stmt (body_p
, g
);
11336 gimple_seq_add_seq (body_p
, thr01_list
);
11337 g
= gimple_build_goto (lab3
);
11338 gimple_seq_add_stmt (body_p
, g
);
11339 g
= gimple_build_label (lab2
);
11340 gimple_seq_add_stmt (body_p
, g
);
11341 gimple_seq_add_seq (body_p
, thrn1_list
);
11342 g
= gimple_build_label (lab3
);
11343 gimple_seq_add_stmt (body_p
, g
);
11345 g
= gimple_build_assign (ivar
, size_zero_node
);
11346 gimple_seq_add_stmt (body_p
, g
);
11348 gimple_seq_add_stmt (body_p
, stmt
);
11349 gimple_seq_add_seq (body_p
, body
);
11350 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11353 g
= gimple_build_omp_return (true);
11354 gimple_seq_add_stmt (body_p
, g
);
11355 gimple_seq_add_seq (body_p
, mdlist
);
11357 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11358 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11359 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11360 gimple_seq_add_stmt (body_p
, g
);
11361 g
= gimple_build_label (lab1
);
11362 gimple_seq_add_stmt (body_p
, g
);
11364 g
= omp_build_barrier (NULL
);
11365 gimple_seq_add_stmt (body_p
, g
);
11367 tree down
= create_tmp_var (unsigned_type_node
);
11368 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11369 gimple_seq_add_stmt (body_p
, g
);
11371 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11372 gimple_seq_add_stmt (body_p
, g
);
11374 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11375 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11376 gimple_seq_add_stmt (body_p
, g
);
11378 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11379 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11380 gimple_seq_add_stmt (body_p
, g
);
11382 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11383 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11384 build_int_cst (unsigned_type_node
, 1));
11385 gimple_seq_add_stmt (body_p
, g
);
11387 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11388 g
= gimple_build_label (lab3
);
11389 gimple_seq_add_stmt (body_p
, g
);
11391 tree twok
= create_tmp_var (unsigned_type_node
);
11392 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11393 gimple_seq_add_stmt (body_p
, g
);
11395 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11396 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11397 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11398 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11399 gimple_seq_add_stmt (body_p
, g
);
11400 g
= gimple_build_label (lab4
);
11401 gimple_seq_add_stmt (body_p
, g
);
11402 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11403 gimple_seq_add_stmt (body_p
, g
);
11404 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11405 gimple_seq_add_stmt (body_p
, g
);
11407 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11408 gimple_seq_add_stmt (body_p
, g
);
11409 g
= gimple_build_label (lab6
);
11410 gimple_seq_add_stmt (body_p
, g
);
11412 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11413 gimple_seq_add_stmt (body_p
, g
);
11415 g
= gimple_build_label (lab5
);
11416 gimple_seq_add_stmt (body_p
, g
);
11418 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11419 gimple_seq_add_stmt (body_p
, g
);
11421 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11422 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11423 gimple_call_set_lhs (g
, cplx
);
11424 gimple_seq_add_stmt (body_p
, g
);
11425 tree mul
= create_tmp_var (unsigned_type_node
);
11426 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11427 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11428 gimple_seq_add_stmt (body_p
, g
);
11429 tree ovf
= create_tmp_var (unsigned_type_node
);
11430 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11431 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11432 gimple_seq_add_stmt (body_p
, g
);
11434 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11435 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11436 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11438 gimple_seq_add_stmt (body_p
, g
);
11439 g
= gimple_build_label (lab7
);
11440 gimple_seq_add_stmt (body_p
, g
);
11442 tree andv
= create_tmp_var (unsigned_type_node
);
11443 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11444 gimple_seq_add_stmt (body_p
, g
);
11445 tree andvm1
= create_tmp_var (unsigned_type_node
);
11446 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11447 build_minus_one_cst (unsigned_type_node
));
11448 gimple_seq_add_stmt (body_p
, g
);
11450 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11451 gimple_seq_add_stmt (body_p
, g
);
11453 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11454 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11455 gimple_seq_add_stmt (body_p
, g
);
11456 g
= gimple_build_label (lab9
);
11457 gimple_seq_add_stmt (body_p
, g
);
11458 gimple_seq_add_seq (body_p
, reduc_list
);
11459 g
= gimple_build_label (lab8
);
11460 gimple_seq_add_stmt (body_p
, g
);
11462 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11463 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11464 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11465 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11467 gimple_seq_add_stmt (body_p
, g
);
11468 g
= gimple_build_label (lab10
);
11469 gimple_seq_add_stmt (body_p
, g
);
11470 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11471 gimple_seq_add_stmt (body_p
, g
);
11472 g
= gimple_build_goto (lab12
);
11473 gimple_seq_add_stmt (body_p
, g
);
11474 g
= gimple_build_label (lab11
);
11475 gimple_seq_add_stmt (body_p
, g
);
11476 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11477 gimple_seq_add_stmt (body_p
, g
);
11478 g
= gimple_build_label (lab12
);
11479 gimple_seq_add_stmt (body_p
, g
);
11481 g
= omp_build_barrier (NULL
);
11482 gimple_seq_add_stmt (body_p
, g
);
11484 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11486 gimple_seq_add_stmt (body_p
, g
);
11488 g
= gimple_build_label (lab2
);
11489 gimple_seq_add_stmt (body_p
, g
);
11491 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11492 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11493 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11494 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11495 gimple_seq_add_stmt (body_p
, g
);
11496 g
= gimple_build_label (lab1
);
11497 gimple_seq_add_stmt (body_p
, g
);
11498 gimple_seq_add_seq (body_p
, thr02_list
);
11499 g
= gimple_build_goto (lab3
);
11500 gimple_seq_add_stmt (body_p
, g
);
11501 g
= gimple_build_label (lab2
);
11502 gimple_seq_add_stmt (body_p
, g
);
11503 gimple_seq_add_seq (body_p
, thrn2_list
);
11504 g
= gimple_build_label (lab3
);
11505 gimple_seq_add_stmt (body_p
, g
);
11507 g
= gimple_build_assign (ivar
, size_zero_node
);
11508 gimple_seq_add_stmt (body_p
, g
);
11509 gimple_seq_add_stmt (body_p
, new_stmt
);
11510 gimple_seq_add_seq (body_p
, new_body
);
11512 gimple_seq new_dlist
= NULL
;
11513 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11514 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11515 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11516 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11517 integer_minus_one_node
);
11518 gimple_seq_add_stmt (&new_dlist
, g
);
11519 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11520 gimple_seq_add_stmt (&new_dlist
, g
);
11521 g
= gimple_build_label (lab1
);
11522 gimple_seq_add_stmt (&new_dlist
, g
);
11523 gimple_seq_add_seq (&new_dlist
, last_list
);
11524 g
= gimple_build_label (lab2
);
11525 gimple_seq_add_stmt (&new_dlist
, g
);
11526 gimple_seq_add_seq (&new_dlist
, *dlist
);
11527 *dlist
= new_dlist
;
11530 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11531 the addresses of variables to be made private at the surrounding
11532 parallelism level. Such functions appear in the gimple code stream in two
11533 forms, e.g. for a partitioned loop:
11535 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11536 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11537 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11538 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11540 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11541 not as part of a HEAD_MARK sequence:
11543 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11545 For such stand-alone appearances, the 3rd argument is always 0, denoting
11546 gang partitioning. */
11549 lower_oacc_private_marker (omp_context
*ctx
)
11551 if (ctx
->oacc_privatization_candidates
.length () == 0)
11554 auto_vec
<tree
, 5> args
;
11556 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11557 args
.quick_push (integer_zero_node
);
11558 args
.quick_push (integer_minus_one_node
);
11562 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11564 gcc_checking_assert (TREE_ADDRESSABLE (decl
));
11565 tree addr
= build_fold_addr_expr (decl
);
11566 args
.safe_push (addr
);
11569 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11572 /* Lower code for an OMP loop directive. */
11575 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11577 tree
*rhs_p
, block
;
11578 struct omp_for_data fd
, *fdp
= NULL
;
11579 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11581 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11582 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11583 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11586 push_gimplify_context ();
11588 if (is_gimple_omp_oacc (ctx
->stmt
))
11589 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11591 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11593 block
= make_node (BLOCK
);
11594 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11595 /* Replace at gsi right away, so that 'stmt' is no member
11596 of a sequence anymore as we're going to add to a different
11598 gsi_replace (gsi_p
, new_stmt
, true);
11600 /* Move declaration of temporaries in the loop body before we make
11602 omp_for_body
= gimple_omp_body (stmt
);
11603 if (!gimple_seq_empty_p (omp_for_body
)
11604 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11607 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11608 tree vars
= gimple_bind_vars (inner_bind
);
11609 if (is_gimple_omp_oacc (ctx
->stmt
))
11610 oacc_privatization_scan_decl_chain (ctx
, vars
);
11611 gimple_bind_append_vars (new_stmt
, vars
);
11612 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11613 keep them on the inner_bind and it's block. */
11614 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11615 if (gimple_bind_block (inner_bind
))
11616 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11619 if (gimple_omp_for_combined_into_p (stmt
))
11621 omp_extract_for_data (stmt
, &fd
, NULL
);
11624 /* We need two temporaries with fd.loop.v type (istart/iend)
11625 and then (fd.collapse - 1) temporaries with the same
11626 type for count2 ... countN-1 vars if not constant. */
11628 tree type
= fd
.iter_type
;
11629 if (fd
.collapse
> 1
11630 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11631 count
+= fd
.collapse
- 1;
11633 tree type2
= NULL_TREE
;
11635 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11636 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11637 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11639 tree clauses
= *pc
;
11640 if (fd
.collapse
> 1
11642 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11643 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11644 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11645 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11647 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11648 type2
= TREE_TYPE (v
);
11654 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11655 OMP_CLAUSE__LOOPTEMP_
);
11656 if (ctx
->simt_stmt
)
11657 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11658 OMP_CLAUSE__LOOPTEMP_
);
11659 for (i
= 0; i
< count
+ count2
; i
++)
11664 gcc_assert (outerc
);
11665 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11666 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11667 OMP_CLAUSE__LOOPTEMP_
);
11671 /* If there are 2 adjacent SIMD stmts, one with _simt_
11672 clause, another without, make sure they have the same
11673 decls in _looptemp_ clauses, because the outer stmt
11674 they are combined into will look up just one inner_stmt. */
11675 if (ctx
->simt_stmt
)
11676 temp
= OMP_CLAUSE_DECL (simtc
);
11678 temp
= create_tmp_var (i
>= count
? type2
: type
);
11679 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11681 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11682 OMP_CLAUSE_DECL (*pc
) = temp
;
11683 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11684 if (ctx
->simt_stmt
)
11685 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11686 OMP_CLAUSE__LOOPTEMP_
);
11691 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11695 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11696 OMP_CLAUSE_REDUCTION
);
11697 tree rtmp
= NULL_TREE
;
11700 tree type
= build_pointer_type (pointer_sized_int_node
);
11701 tree temp
= create_tmp_var (type
);
11702 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11703 OMP_CLAUSE_DECL (c
) = temp
;
11704 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11705 gimple_omp_for_set_clauses (stmt
, c
);
11706 lower_omp_task_reductions (ctx
, OMP_FOR
,
11707 gimple_omp_for_clauses (stmt
),
11708 &tred_ilist
, &tred_dlist
);
11710 rtmp
= make_ssa_name (type
);
11711 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11714 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11717 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11719 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11720 gimple_omp_for_pre_body (stmt
));
11722 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11724 gcall
*private_marker
= NULL
;
11725 if (is_gimple_omp_oacc (ctx
->stmt
)
11726 && !gimple_seq_empty_p (omp_for_body
))
11727 private_marker
= lower_oacc_private_marker (ctx
);
11729 /* Lower the header expressions. At this point, we can assume that
11730 the header is of the form:
11732 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11734 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11735 using the .omp_data_s mapping, if needed. */
11736 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11738 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11739 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11741 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11742 TREE_VEC_ELT (*rhs_p
, 1)
11743 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11744 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11745 TREE_VEC_ELT (*rhs_p
, 2)
11746 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11748 else if (!is_gimple_min_invariant (*rhs_p
))
11749 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11750 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11751 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11753 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11754 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11756 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11757 TREE_VEC_ELT (*rhs_p
, 1)
11758 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11759 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11760 TREE_VEC_ELT (*rhs_p
, 2)
11761 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11763 else if (!is_gimple_min_invariant (*rhs_p
))
11764 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11765 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11766 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11768 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11769 if (!is_gimple_min_invariant (*rhs_p
))
11770 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11773 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11775 gimple_seq_add_seq (&body
, cnt_list
);
11777 /* Once lowered, extract the bounds and clauses. */
11778 omp_extract_for_data (stmt
, &fd
, NULL
);
11780 if (is_gimple_omp_oacc (ctx
->stmt
)
11781 && !ctx_in_oacc_kernels_region (ctx
))
11782 lower_oacc_head_tail (gimple_location (stmt
),
11783 gimple_omp_for_clauses (stmt
), private_marker
,
11784 &oacc_head
, &oacc_tail
, ctx
);
11786 /* Add OpenACC partitioning and reduction markers just before the loop. */
11788 gimple_seq_add_seq (&body
, oacc_head
);
11790 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11792 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11793 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11794 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11795 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11797 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11798 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11799 OMP_CLAUSE_LINEAR_STEP (c
)
11800 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11804 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11805 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11806 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11809 gimple_seq_add_stmt (&body
, stmt
);
11810 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11813 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11816 /* After the loop, add exit clauses. */
11817 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11821 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11822 gcall
*g
= gimple_build_call (fndecl
, 0);
11823 gimple_seq_add_stmt (&body
, g
);
11824 gimple_seq_add_seq (&body
, clist
);
11825 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11826 g
= gimple_build_call (fndecl
, 0);
11827 gimple_seq_add_stmt (&body
, g
);
11830 if (ctx
->cancellable
)
11831 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11833 gimple_seq_add_seq (&body
, dlist
);
11837 gimple_seq_add_seq (&tred_ilist
, body
);
11841 body
= maybe_catch_exception (body
);
11843 /* Region exit marker goes at the end of the loop body. */
11844 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11845 gimple_seq_add_stmt (&body
, g
);
11847 gimple_seq_add_seq (&body
, tred_dlist
);
11849 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11852 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11854 /* Add OpenACC joining and reduction markers just after the loop. */
11856 gimple_seq_add_seq (&body
, oacc_tail
);
11858 pop_gimplify_context (new_stmt
);
11860 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11861 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11862 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11863 if (BLOCK_VARS (block
))
11864 TREE_USED (block
) = 1;
11866 gimple_bind_set_body (new_stmt
, body
);
11867 gimple_omp_set_body (stmt
, NULL
);
11868 gimple_omp_for_set_pre_body (stmt
, NULL
);
11871 /* Callback for walk_stmts. Check if the current statement only contains
11872 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11875 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11876 bool *handled_ops_p
,
11877 struct walk_stmt_info
*wi
)
11879 int *info
= (int *) wi
->info
;
11880 gimple
*stmt
= gsi_stmt (*gsi_p
);
11882 *handled_ops_p
= true;
11883 switch (gimple_code (stmt
))
11889 case GIMPLE_OMP_FOR
:
11890 case GIMPLE_OMP_SECTIONS
:
11891 *info
= *info
== 0 ? 1 : -1;
11900 struct omp_taskcopy_context
11902 /* This field must be at the beginning, as we do "inheritance": Some
11903 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11904 receive a copy_body_data pointer that is up-casted to an
11905 omp_context pointer. */
11911 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11913 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11915 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11916 return create_tmp_var (TREE_TYPE (var
));
11922 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11924 tree name
, new_fields
= NULL
, type
, f
;
11926 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11927 name
= DECL_NAME (TYPE_NAME (orig_type
));
11928 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11929 TYPE_DECL
, name
, type
);
11930 TYPE_NAME (type
) = name
;
11932 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
11934 tree new_f
= copy_node (f
);
11935 DECL_CONTEXT (new_f
) = type
;
11936 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
11937 TREE_CHAIN (new_f
) = new_fields
;
11938 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11939 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11940 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
11942 new_fields
= new_f
;
11943 tcctx
->cb
.decl_map
->put (f
, new_f
);
11945 TYPE_FIELDS (type
) = nreverse (new_fields
);
11946 layout_type (type
);
11950 /* Create task copyfn. */
11953 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
11955 struct function
*child_cfun
;
11956 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
11957 tree record_type
, srecord_type
, bind
, list
;
11958 bool record_needs_remap
= false, srecord_needs_remap
= false;
11960 struct omp_taskcopy_context tcctx
;
11961 location_t loc
= gimple_location (task_stmt
);
11962 size_t looptempno
= 0;
11964 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
11965 task_cpyfns
.safe_push (task_stmt
);
11966 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
11967 gcc_assert (child_cfun
->cfg
== NULL
);
11968 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
11970 /* Reset DECL_CONTEXT on function arguments. */
11971 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
11972 DECL_CONTEXT (t
) = child_fn
;
11974 /* Populate the function. */
11975 push_gimplify_context ();
11976 push_cfun (child_cfun
);
11978 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11979 TREE_SIDE_EFFECTS (bind
) = 1;
11981 DECL_SAVED_TREE (child_fn
) = bind
;
11982 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
11984 /* Remap src and dst argument types if needed. */
11985 record_type
= ctx
->record_type
;
11986 srecord_type
= ctx
->srecord_type
;
11987 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
11988 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11990 record_needs_remap
= true;
11993 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
11994 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11996 srecord_needs_remap
= true;
12000 if (record_needs_remap
|| srecord_needs_remap
)
12002 memset (&tcctx
, '\0', sizeof (tcctx
));
12003 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
12004 tcctx
.cb
.dst_fn
= child_fn
;
12005 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
12006 gcc_checking_assert (tcctx
.cb
.src_node
);
12007 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
12008 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
12009 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
12010 tcctx
.cb
.eh_lp_nr
= 0;
12011 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
12012 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
12015 if (record_needs_remap
)
12016 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
12017 if (srecord_needs_remap
)
12018 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
12021 tcctx
.cb
.decl_map
= NULL
;
12023 arg
= DECL_ARGUMENTS (child_fn
);
12024 TREE_TYPE (arg
) = build_pointer_type (record_type
);
12025 sarg
= DECL_CHAIN (arg
);
12026 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
12028 /* First pass: initialize temporaries used in record_type and srecord_type
12029 sizes and field offsets. */
12030 if (tcctx
.cb
.decl_map
)
12031 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12032 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12036 decl
= OMP_CLAUSE_DECL (c
);
12037 p
= tcctx
.cb
.decl_map
->get (decl
);
12040 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12041 sf
= (tree
) n
->value
;
12042 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12043 src
= build_simple_mem_ref_loc (loc
, sarg
);
12044 src
= omp_build_component_ref (src
, sf
);
12045 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12046 append_to_statement_list (t
, &list
);
12049 /* Second pass: copy shared var pointers and copy construct non-VLA
12050 firstprivate vars. */
12051 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12052 switch (OMP_CLAUSE_CODE (c
))
12054 splay_tree_key key
;
12055 case OMP_CLAUSE_SHARED
:
12056 decl
= OMP_CLAUSE_DECL (c
);
12057 key
= (splay_tree_key
) decl
;
12058 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12059 key
= (splay_tree_key
) &DECL_UID (decl
);
12060 n
= splay_tree_lookup (ctx
->field_map
, key
);
12063 f
= (tree
) n
->value
;
12064 if (tcctx
.cb
.decl_map
)
12065 f
= *tcctx
.cb
.decl_map
->get (f
);
12066 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12067 sf
= (tree
) n
->value
;
12068 if (tcctx
.cb
.decl_map
)
12069 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12070 src
= build_simple_mem_ref_loc (loc
, sarg
);
12071 src
= omp_build_component_ref (src
, sf
);
12072 dst
= build_simple_mem_ref_loc (loc
, arg
);
12073 dst
= omp_build_component_ref (dst
, f
);
12074 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12075 append_to_statement_list (t
, &list
);
12077 case OMP_CLAUSE_REDUCTION
:
12078 case OMP_CLAUSE_IN_REDUCTION
:
12079 decl
= OMP_CLAUSE_DECL (c
);
12080 if (TREE_CODE (decl
) == MEM_REF
)
12082 decl
= TREE_OPERAND (decl
, 0);
12083 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12084 decl
= TREE_OPERAND (decl
, 0);
12085 if (TREE_CODE (decl
) == INDIRECT_REF
12086 || TREE_CODE (decl
) == ADDR_EXPR
)
12087 decl
= TREE_OPERAND (decl
, 0);
12089 key
= (splay_tree_key
) decl
;
12090 n
= splay_tree_lookup (ctx
->field_map
, key
);
12093 f
= (tree
) n
->value
;
12094 if (tcctx
.cb
.decl_map
)
12095 f
= *tcctx
.cb
.decl_map
->get (f
);
12096 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12097 sf
= (tree
) n
->value
;
12098 if (tcctx
.cb
.decl_map
)
12099 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12100 src
= build_simple_mem_ref_loc (loc
, sarg
);
12101 src
= omp_build_component_ref (src
, sf
);
12102 if (decl
!= OMP_CLAUSE_DECL (c
)
12103 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12104 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12105 src
= build_simple_mem_ref_loc (loc
, src
);
12106 dst
= build_simple_mem_ref_loc (loc
, arg
);
12107 dst
= omp_build_component_ref (dst
, f
);
12108 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12109 append_to_statement_list (t
, &list
);
12111 case OMP_CLAUSE__LOOPTEMP_
:
12112 /* Fields for first two _looptemp_ clauses are initialized by
12113 GOMP_taskloop*, the rest are handled like firstprivate. */
12114 if (looptempno
< 2)
12120 case OMP_CLAUSE__REDUCTEMP_
:
12121 case OMP_CLAUSE_FIRSTPRIVATE
:
12122 decl
= OMP_CLAUSE_DECL (c
);
12123 if (is_variable_sized (decl
))
12125 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12128 f
= (tree
) n
->value
;
12129 if (tcctx
.cb
.decl_map
)
12130 f
= *tcctx
.cb
.decl_map
->get (f
);
12131 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12134 sf
= (tree
) n
->value
;
12135 if (tcctx
.cb
.decl_map
)
12136 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12137 src
= build_simple_mem_ref_loc (loc
, sarg
);
12138 src
= omp_build_component_ref (src
, sf
);
12139 if (use_pointer_for_field (decl
, NULL
)
12140 || omp_privatize_by_reference (decl
))
12141 src
= build_simple_mem_ref_loc (loc
, src
);
12145 dst
= build_simple_mem_ref_loc (loc
, arg
);
12146 dst
= omp_build_component_ref (dst
, f
);
12147 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12148 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12151 if (ctx
->allocate_map
)
12152 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12154 tree allocator
= *allocatorp
;
12155 HOST_WIDE_INT ialign
= 0;
12156 if (TREE_CODE (allocator
) == TREE_LIST
)
12158 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12159 allocator
= TREE_PURPOSE (allocator
);
12161 if (TREE_CODE (allocator
) != INTEGER_CST
)
12163 n
= splay_tree_lookup (ctx
->sfield_map
,
12164 (splay_tree_key
) allocator
);
12165 allocator
= (tree
) n
->value
;
12166 if (tcctx
.cb
.decl_map
)
12167 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12168 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12169 allocator
= omp_build_component_ref (a
, allocator
);
12171 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12172 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12173 tree align
= build_int_cst (size_type_node
,
12175 DECL_ALIGN_UNIT (decl
)));
12176 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12177 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12179 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12180 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12181 append_to_statement_list (t
, &list
);
12182 dst
= build_simple_mem_ref_loc (loc
, dst
);
12184 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12186 append_to_statement_list (t
, &list
);
12188 case OMP_CLAUSE_PRIVATE
:
12189 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12191 decl
= OMP_CLAUSE_DECL (c
);
12192 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12193 f
= (tree
) n
->value
;
12194 if (tcctx
.cb
.decl_map
)
12195 f
= *tcctx
.cb
.decl_map
->get (f
);
12196 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12199 sf
= (tree
) n
->value
;
12200 if (tcctx
.cb
.decl_map
)
12201 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12202 src
= build_simple_mem_ref_loc (loc
, sarg
);
12203 src
= omp_build_component_ref (src
, sf
);
12204 if (use_pointer_for_field (decl
, NULL
))
12205 src
= build_simple_mem_ref_loc (loc
, src
);
12209 dst
= build_simple_mem_ref_loc (loc
, arg
);
12210 dst
= omp_build_component_ref (dst
, f
);
12211 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12212 append_to_statement_list (t
, &list
);
12218 /* Last pass: handle VLA firstprivates. */
12219 if (tcctx
.cb
.decl_map
)
12220 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12221 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12225 decl
= OMP_CLAUSE_DECL (c
);
12226 if (!is_variable_sized (decl
))
12228 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12231 f
= (tree
) n
->value
;
12232 f
= *tcctx
.cb
.decl_map
->get (f
);
12233 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12234 ind
= DECL_VALUE_EXPR (decl
);
12235 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12236 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12237 n
= splay_tree_lookup (ctx
->sfield_map
,
12238 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12239 sf
= (tree
) n
->value
;
12240 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12241 src
= build_simple_mem_ref_loc (loc
, sarg
);
12242 src
= omp_build_component_ref (src
, sf
);
12243 src
= build_simple_mem_ref_loc (loc
, src
);
12244 dst
= build_simple_mem_ref_loc (loc
, arg
);
12245 dst
= omp_build_component_ref (dst
, f
);
12246 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12247 append_to_statement_list (t
, &list
);
12248 n
= splay_tree_lookup (ctx
->field_map
,
12249 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12250 df
= (tree
) n
->value
;
12251 df
= *tcctx
.cb
.decl_map
->get (df
);
12252 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12253 ptr
= omp_build_component_ref (ptr
, df
);
12254 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12255 build_fold_addr_expr_loc (loc
, dst
));
12256 append_to_statement_list (t
, &list
);
12259 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12260 append_to_statement_list (t
, &list
);
12262 if (tcctx
.cb
.decl_map
)
12263 delete tcctx
.cb
.decl_map
;
12264 pop_gimplify_context (NULL
);
12265 BIND_EXPR_BODY (bind
) = list
;
12270 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12274 size_t cnt
[5] = { 0, 0, 0, 0, 0 }, idx
= 2, i
;
12276 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12277 gcc_assert (clauses
);
12278 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12279 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12280 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12282 case OMP_CLAUSE_DEPEND_LAST
:
12283 /* Lowering already done at gimplification. */
12285 case OMP_CLAUSE_DEPEND_IN
:
12288 case OMP_CLAUSE_DEPEND_OUT
:
12289 case OMP_CLAUSE_DEPEND_INOUT
:
12292 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12295 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12298 case OMP_CLAUSE_DEPEND_INOUTSET
:
12302 gcc_unreachable ();
12304 if (cnt
[1] || cnt
[3] || cnt
[4])
12306 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3] + cnt
[4];
12307 size_t inoutidx
= total
+ idx
;
12308 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
+ 2 * cnt
[4]);
12309 tree array
= create_tmp_var (type
);
12310 TREE_ADDRESSABLE (array
) = 1;
12311 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12315 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12316 gimple_seq_add_stmt (iseq
, g
);
12317 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12320 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12321 gimple_seq_add_stmt (iseq
, g
);
12322 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12324 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12325 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12326 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12327 gimple_seq_add_stmt (iseq
, g
);
12329 for (i
= 0; i
< 5; i
++)
12333 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12334 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12338 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12340 case OMP_CLAUSE_DEPEND_IN
:
12344 case OMP_CLAUSE_DEPEND_OUT
:
12345 case OMP_CLAUSE_DEPEND_INOUT
:
12349 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12353 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12357 case OMP_CLAUSE_DEPEND_INOUTSET
:
12362 gcc_unreachable ();
12364 tree t
= OMP_CLAUSE_DECL (c
);
12367 t
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12368 size_int (inoutidx
), NULL_TREE
, NULL_TREE
);
12369 t
= build_fold_addr_expr (t
);
12372 t
= fold_convert (ptr_type_node
, t
);
12373 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12374 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12375 NULL_TREE
, NULL_TREE
);
12376 g
= gimple_build_assign (r
, t
);
12377 gimple_seq_add_stmt (iseq
, g
);
12381 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12382 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12383 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_INOUTSET
)
12385 tree t
= OMP_CLAUSE_DECL (c
);
12386 t
= fold_convert (ptr_type_node
, t
);
12387 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12388 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12389 NULL_TREE
, NULL_TREE
);
12390 g
= gimple_build_assign (r
, t
);
12391 gimple_seq_add_stmt (iseq
, g
);
12392 t
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
12393 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12394 NULL_TREE
, NULL_TREE
);
12395 g
= gimple_build_assign (r
, t
);
12396 gimple_seq_add_stmt (iseq
, g
);
12399 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12400 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12401 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12402 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12404 tree clobber
= build_clobber (type
);
12405 g
= gimple_build_assign (array
, clobber
);
12406 gimple_seq_add_stmt (oseq
, g
);
12409 /* Lower the OpenMP parallel or task directive in the current statement
12410 in GSI_P. CTX holds context information for the directive. */
12413 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12417 gimple
*stmt
= gsi_stmt (*gsi_p
);
12418 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12419 gimple_seq par_body
;
12420 location_t loc
= gimple_location (stmt
);
12422 clauses
= gimple_omp_taskreg_clauses (stmt
);
12423 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12424 && gimple_omp_task_taskwait_p (stmt
))
12432 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12433 par_body
= gimple_bind_body (par_bind
);
12435 child_fn
= ctx
->cb
.dst_fn
;
12436 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12437 && !gimple_omp_parallel_combined_p (stmt
))
12439 struct walk_stmt_info wi
;
12442 memset (&wi
, 0, sizeof (wi
));
12444 wi
.val_only
= true;
12445 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12447 gimple_omp_parallel_set_combined_p (stmt
, true);
12449 gimple_seq dep_ilist
= NULL
;
12450 gimple_seq dep_olist
= NULL
;
12451 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12452 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12454 push_gimplify_context ();
12455 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12456 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12457 &dep_ilist
, &dep_olist
);
12460 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12461 && gimple_omp_task_taskwait_p (stmt
))
12465 gsi_replace (gsi_p
, dep_bind
, true);
12466 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12467 gimple_bind_add_stmt (dep_bind
, stmt
);
12468 gimple_bind_add_seq (dep_bind
, dep_olist
);
12469 pop_gimplify_context (dep_bind
);
12474 if (ctx
->srecord_type
)
12475 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12477 gimple_seq tskred_ilist
= NULL
;
12478 gimple_seq tskred_olist
= NULL
;
12479 if ((is_task_ctx (ctx
)
12480 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12481 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12482 OMP_CLAUSE_REDUCTION
))
12483 || (is_parallel_ctx (ctx
)
12484 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12485 OMP_CLAUSE__REDUCTEMP_
)))
12487 if (dep_bind
== NULL
)
12489 push_gimplify_context ();
12490 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12492 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12494 gimple_omp_taskreg_clauses (ctx
->stmt
),
12495 &tskred_ilist
, &tskred_olist
);
12498 push_gimplify_context ();
12500 gimple_seq par_olist
= NULL
;
12501 gimple_seq par_ilist
= NULL
;
12502 gimple_seq par_rlist
= NULL
;
12503 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12504 lower_omp (&par_body
, ctx
);
12505 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12506 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12508 /* Declare all the variables created by mapping and the variables
12509 declared in the scope of the parallel body. */
12510 record_vars_into (ctx
->block_vars
, child_fn
);
12511 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12512 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12514 if (ctx
->record_type
)
12517 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12518 : ctx
->record_type
, ".omp_data_o");
12519 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12520 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12521 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12524 gimple_seq olist
= NULL
;
12525 gimple_seq ilist
= NULL
;
12526 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12527 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12529 if (ctx
->record_type
)
12531 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12532 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12536 /* Once all the expansions are done, sequence all the different
12537 fragments inside gimple_omp_body. */
12539 gimple_seq new_body
= NULL
;
12541 if (ctx
->record_type
)
12543 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12544 /* fixup_child_record_type might have changed receiver_decl's type. */
12545 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12546 gimple_seq_add_stmt (&new_body
,
12547 gimple_build_assign (ctx
->receiver_decl
, t
));
12550 gimple_seq_add_seq (&new_body
, par_ilist
);
12551 gimple_seq_add_seq (&new_body
, par_body
);
12552 gimple_seq_add_seq (&new_body
, par_rlist
);
12553 if (ctx
->cancellable
)
12554 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12555 gimple_seq_add_seq (&new_body
, par_olist
);
12556 new_body
= maybe_catch_exception (new_body
);
12557 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12558 gimple_seq_add_stmt (&new_body
,
12559 gimple_build_omp_continue (integer_zero_node
,
12560 integer_zero_node
));
12561 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12562 gimple_omp_set_body (stmt
, new_body
);
12564 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12565 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12567 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12568 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12569 gimple_bind_add_seq (bind
, ilist
);
12570 gimple_bind_add_stmt (bind
, stmt
);
12571 gimple_bind_add_seq (bind
, olist
);
12573 pop_gimplify_context (NULL
);
12577 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12578 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12579 gimple_bind_add_stmt (dep_bind
, bind
);
12580 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12581 gimple_bind_add_seq (dep_bind
, dep_olist
);
12582 pop_gimplify_context (dep_bind
);
12586 /* Lower the GIMPLE_OMP_TARGET in the current statement
12587 in GSI_P. CTX holds context information for the directive. */
12590 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12593 tree child_fn
, t
, c
;
12594 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12595 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12596 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12597 location_t loc
= gimple_location (stmt
);
12598 bool offloaded
, data_region
;
12599 unsigned int map_cnt
= 0;
12600 tree in_reduction_clauses
= NULL_TREE
;
12602 offloaded
= is_gimple_omp_offloaded (stmt
);
12603 switch (gimple_omp_target_kind (stmt
))
12605 case GF_OMP_TARGET_KIND_REGION
:
12607 q
= &in_reduction_clauses
;
12608 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12609 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12612 q
= &OMP_CLAUSE_CHAIN (*q
);
12613 *p
= OMP_CLAUSE_CHAIN (*p
);
12616 p
= &OMP_CLAUSE_CHAIN (*p
);
12618 *p
= in_reduction_clauses
;
12620 case GF_OMP_TARGET_KIND_UPDATE
:
12621 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12622 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12623 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12624 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12625 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12626 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12627 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12628 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12629 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12630 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12631 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12632 data_region
= false;
12634 case GF_OMP_TARGET_KIND_DATA
:
12635 case GF_OMP_TARGET_KIND_OACC_DATA
:
12636 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12637 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12638 data_region
= true;
12641 gcc_unreachable ();
12644 /* Ensure that requires map is written via output_offload_tables, even if only
12645 'target (enter/exit) data' is used in the translation unit. */
12646 if (ENABLE_OFFLOADING
&& (omp_requires_mask
& OMP_REQUIRES_TARGET_USED
))
12647 g
->have_offload
= true;
12649 clauses
= gimple_omp_target_clauses (stmt
);
12651 gimple_seq dep_ilist
= NULL
;
12652 gimple_seq dep_olist
= NULL
;
12653 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12654 if (has_depend
|| in_reduction_clauses
)
12656 push_gimplify_context ();
12657 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12659 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12660 &dep_ilist
, &dep_olist
);
12661 if (in_reduction_clauses
)
12662 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12670 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12671 tgt_body
= gimple_bind_body (tgt_bind
);
12673 else if (data_region
)
12674 tgt_body
= gimple_omp_body (stmt
);
12675 child_fn
= ctx
->cb
.dst_fn
;
12677 push_gimplify_context ();
12680 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12681 switch (OMP_CLAUSE_CODE (c
))
12687 case OMP_CLAUSE_MAP
:
12689 /* First check what we're prepared to handle in the following. */
12690 switch (OMP_CLAUSE_MAP_KIND (c
))
12692 case GOMP_MAP_ALLOC
:
12694 case GOMP_MAP_FROM
:
12695 case GOMP_MAP_TOFROM
:
12696 case GOMP_MAP_POINTER
:
12697 case GOMP_MAP_TO_PSET
:
12698 case GOMP_MAP_DELETE
:
12699 case GOMP_MAP_RELEASE
:
12700 case GOMP_MAP_ALWAYS_TO
:
12701 case GOMP_MAP_ALWAYS_FROM
:
12702 case GOMP_MAP_ALWAYS_TOFROM
:
12703 case GOMP_MAP_FORCE_PRESENT
:
12704 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
12705 case GOMP_MAP_ALWAYS_PRESENT_TO
:
12706 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
12708 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12709 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12710 case GOMP_MAP_STRUCT
:
12711 case GOMP_MAP_ALWAYS_POINTER
:
12712 case GOMP_MAP_ATTACH
:
12713 case GOMP_MAP_DETACH
:
12714 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12715 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12717 case GOMP_MAP_IF_PRESENT
:
12718 case GOMP_MAP_FORCE_ALLOC
:
12719 case GOMP_MAP_FORCE_TO
:
12720 case GOMP_MAP_FORCE_FROM
:
12721 case GOMP_MAP_FORCE_TOFROM
:
12722 case GOMP_MAP_FORCE_DEVICEPTR
:
12723 case GOMP_MAP_DEVICE_RESIDENT
:
12724 case GOMP_MAP_LINK
:
12725 case GOMP_MAP_FORCE_DETACH
:
12726 gcc_assert (is_gimple_omp_oacc (stmt
));
12729 gcc_unreachable ();
12733 case OMP_CLAUSE_TO
:
12734 case OMP_CLAUSE_FROM
:
12736 var
= OMP_CLAUSE_DECL (c
);
12739 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12740 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12741 && (OMP_CLAUSE_MAP_KIND (c
)
12742 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12747 if (DECL_SIZE (var
)
12748 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12750 tree var2
= DECL_VALUE_EXPR (var
);
12751 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12752 var2
= TREE_OPERAND (var2
, 0);
12753 gcc_assert (DECL_P (var2
));
12758 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12759 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12760 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12762 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12764 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12765 && varpool_node::get_create (var
)->offloadable
)
12768 tree type
= build_pointer_type (TREE_TYPE (var
));
12769 tree new_var
= lookup_decl (var
, ctx
);
12770 x
= create_tmp_var_raw (type
, get_name (new_var
));
12771 gimple_add_tmp_var (x
);
12772 x
= build_simple_mem_ref (x
);
12773 SET_DECL_VALUE_EXPR (new_var
, x
);
12774 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12779 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12780 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12781 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12782 && is_omp_target (stmt
))
12784 gcc_assert (maybe_lookup_field (c
, ctx
));
12789 if (!maybe_lookup_field (var
, ctx
))
12792 /* Don't remap compute constructs' reduction variables, because the
12793 intermediate result must be local to each gang. */
12794 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12795 && is_gimple_omp_oacc (ctx
->stmt
)
12796 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12798 x
= build_receiver_ref (var
, true, ctx
);
12799 tree new_var
= lookup_decl (var
, ctx
);
12801 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12802 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12803 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12804 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12805 x
= build_simple_mem_ref (x
);
12806 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12808 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12809 if (omp_privatize_by_reference (new_var
)
12810 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12811 || DECL_BY_REFERENCE (var
)))
12813 /* Create a local object to hold the instance
12815 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12816 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12817 tree inst
= create_tmp_var (type
, id
);
12818 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12819 x
= build_fold_addr_expr (inst
);
12821 gimplify_assign (new_var
, x
, &fplist
);
12823 else if (DECL_P (new_var
))
12825 SET_DECL_VALUE_EXPR (new_var
, x
);
12826 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12829 gcc_unreachable ();
12834 case OMP_CLAUSE_FIRSTPRIVATE
:
12835 omp_firstprivate_recv
:
12836 gcc_checking_assert (offloaded
);
12837 if (is_gimple_omp_oacc (ctx
->stmt
))
12839 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12840 gcc_checking_assert (!is_oacc_kernels (ctx
));
12841 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12842 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12844 goto oacc_firstprivate
;
12847 var
= OMP_CLAUSE_DECL (c
);
12848 if (!omp_privatize_by_reference (var
)
12849 && !is_gimple_reg_type (TREE_TYPE (var
)))
12851 tree new_var
= lookup_decl (var
, ctx
);
12852 if (is_variable_sized (var
))
12854 tree pvar
= DECL_VALUE_EXPR (var
);
12855 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12856 pvar
= TREE_OPERAND (pvar
, 0);
12857 gcc_assert (DECL_P (pvar
));
12858 tree new_pvar
= lookup_decl (pvar
, ctx
);
12859 x
= build_fold_indirect_ref (new_pvar
);
12860 TREE_THIS_NOTRAP (x
) = 1;
12863 x
= build_receiver_ref (var
, true, ctx
);
12864 SET_DECL_VALUE_EXPR (new_var
, x
);
12865 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12867 /* Fortran array descriptors: firstprivate of data + attach. */
12868 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
12869 && lang_hooks
.decls
.omp_array_data (var
, true))
12873 case OMP_CLAUSE_PRIVATE
:
12874 gcc_checking_assert (offloaded
);
12875 if (is_gimple_omp_oacc (ctx
->stmt
))
12877 /* No 'private' clauses on OpenACC 'kernels'. */
12878 gcc_checking_assert (!is_oacc_kernels (ctx
));
12879 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12880 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12884 var
= OMP_CLAUSE_DECL (c
);
12885 if (is_variable_sized (var
))
12887 tree new_var
= lookup_decl (var
, ctx
);
12888 tree pvar
= DECL_VALUE_EXPR (var
);
12889 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12890 pvar
= TREE_OPERAND (pvar
, 0);
12891 gcc_assert (DECL_P (pvar
));
12892 tree new_pvar
= lookup_decl (pvar
, ctx
);
12893 x
= build_fold_indirect_ref (new_pvar
);
12894 TREE_THIS_NOTRAP (x
) = 1;
12895 SET_DECL_VALUE_EXPR (new_var
, x
);
12896 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12900 case OMP_CLAUSE_USE_DEVICE_PTR
:
12901 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12902 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12903 case OMP_CLAUSE_IS_DEVICE_PTR
:
12904 var
= OMP_CLAUSE_DECL (c
);
12905 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12907 while (TREE_CODE (var
) == INDIRECT_REF
12908 || TREE_CODE (var
) == ARRAY_REF
)
12909 var
= TREE_OPERAND (var
, 0);
12910 if (lang_hooks
.decls
.omp_array_data (var
, true))
12911 goto omp_firstprivate_recv
;
12914 if (is_variable_sized (var
))
12916 tree new_var
= lookup_decl (var
, ctx
);
12917 tree pvar
= DECL_VALUE_EXPR (var
);
12918 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12919 pvar
= TREE_OPERAND (pvar
, 0);
12920 gcc_assert (DECL_P (pvar
));
12921 tree new_pvar
= lookup_decl (pvar
, ctx
);
12922 x
= build_fold_indirect_ref (new_pvar
);
12923 TREE_THIS_NOTRAP (x
) = 1;
12924 SET_DECL_VALUE_EXPR (new_var
, x
);
12925 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12927 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12928 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12929 && !omp_privatize_by_reference (var
)
12930 && !omp_is_allocatable_or_ptr (var
)
12931 && !lang_hooks
.decls
.omp_array_data (var
, true))
12932 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12934 tree new_var
= lookup_decl (var
, ctx
);
12935 tree type
= build_pointer_type (TREE_TYPE (var
));
12936 x
= create_tmp_var_raw (type
, get_name (new_var
));
12937 gimple_add_tmp_var (x
);
12938 x
= build_simple_mem_ref (x
);
12939 SET_DECL_VALUE_EXPR (new_var
, x
);
12940 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12944 tree new_var
= lookup_decl (var
, ctx
);
12945 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
12946 gimple_add_tmp_var (x
);
12947 SET_DECL_VALUE_EXPR (new_var
, x
);
12948 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12955 target_nesting_level
++;
12956 lower_omp (&tgt_body
, ctx
);
12957 target_nesting_level
--;
12959 else if (data_region
)
12960 lower_omp (&tgt_body
, ctx
);
12964 /* Declare all the variables created by mapping and the variables
12965 declared in the scope of the target body. */
12966 record_vars_into (ctx
->block_vars
, child_fn
);
12967 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
12968 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
12973 if (ctx
->record_type
)
12976 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
12977 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12978 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12979 t
= make_tree_vec (3);
12980 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
12981 TREE_VEC_ELT (t
, 1)
12982 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
12983 ".omp_data_sizes");
12984 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
12985 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
12986 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
12987 tree tkind_type
= short_unsigned_type_node
;
12988 int talign_shift
= 8;
12989 TREE_VEC_ELT (t
, 2)
12990 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
12991 ".omp_data_kinds");
12992 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
12993 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
12994 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
12995 gimple_omp_target_set_data_arg (stmt
, t
);
12997 vec
<constructor_elt
, va_gc
> *vsize
;
12998 vec
<constructor_elt
, va_gc
> *vkind
;
12999 vec_alloc (vsize
, map_cnt
);
13000 vec_alloc (vkind
, map_cnt
);
13001 unsigned int map_idx
= 0;
13003 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13004 switch (OMP_CLAUSE_CODE (c
))
13006 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
13007 unsigned int talign
;
13012 case OMP_CLAUSE_MAP
:
13013 case OMP_CLAUSE_TO
:
13014 case OMP_CLAUSE_FROM
:
13015 oacc_firstprivate_map
:
13017 ovar
= OMP_CLAUSE_DECL (c
);
13018 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13019 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13020 || (OMP_CLAUSE_MAP_KIND (c
)
13021 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13023 if (!DECL_P (ovar
))
13025 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13026 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
13028 nc
= OMP_CLAUSE_CHAIN (c
);
13029 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
13030 == get_base_address (ovar
));
13031 ovar
= OMP_CLAUSE_DECL (nc
);
13035 tree x
= build_sender_ref (ovar
, ctx
);
13037 if (in_reduction_clauses
13038 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13039 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13041 v
= unshare_expr (v
);
13043 while (handled_component_p (*p
)
13044 || TREE_CODE (*p
) == INDIRECT_REF
13045 || TREE_CODE (*p
) == ADDR_EXPR
13046 || TREE_CODE (*p
) == MEM_REF
13047 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
13048 p
= &TREE_OPERAND (*p
, 0);
13050 if (is_variable_sized (d
))
13052 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13053 d
= DECL_VALUE_EXPR (d
);
13054 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13055 d
= TREE_OPERAND (d
, 0);
13056 gcc_assert (DECL_P (d
));
13059 = (splay_tree_key
) &DECL_CONTEXT (d
);
13060 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13065 *p
= build_fold_indirect_ref (nd
);
13067 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
13068 gimplify_assign (x
, v
, &ilist
);
13074 if (DECL_SIZE (ovar
)
13075 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
13077 tree ovar2
= DECL_VALUE_EXPR (ovar
);
13078 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
13079 ovar2
= TREE_OPERAND (ovar2
, 0);
13080 gcc_assert (DECL_P (ovar2
));
13083 if (!maybe_lookup_field (ovar
, ctx
)
13084 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13085 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13086 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13090 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13091 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13092 talign
= DECL_ALIGN_UNIT (ovar
);
13097 if (in_reduction_clauses
13098 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13099 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13102 if (is_variable_sized (d
))
13104 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13105 d
= DECL_VALUE_EXPR (d
);
13106 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13107 d
= TREE_OPERAND (d
, 0);
13108 gcc_assert (DECL_P (d
));
13111 = (splay_tree_key
) &DECL_CONTEXT (d
);
13112 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13117 var
= build_fold_indirect_ref (nd
);
13120 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13123 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13124 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13125 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13126 && is_omp_target (stmt
))
13128 x
= build_sender_ref (c
, ctx
);
13129 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13133 x
= build_sender_ref (ovar
, ctx
);
13135 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13136 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13137 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13138 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13140 gcc_assert (offloaded
);
13142 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13143 mark_addressable (avar
);
13144 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13145 talign
= DECL_ALIGN_UNIT (avar
);
13146 avar
= build_fold_addr_expr (avar
);
13147 gimplify_assign (x
, avar
, &ilist
);
13149 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13151 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13152 if (!omp_privatize_by_reference (var
))
13154 if (is_gimple_reg (var
)
13155 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13156 suppress_warning (var
);
13157 var
= build_fold_addr_expr (var
);
13160 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13161 gimplify_assign (x
, var
, &ilist
);
13163 else if (is_gimple_reg (var
))
13165 gcc_assert (offloaded
);
13166 tree avar
= create_tmp_var (TREE_TYPE (var
));
13167 mark_addressable (avar
);
13168 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13169 if (GOMP_MAP_COPY_TO_P (map_kind
)
13170 || map_kind
== GOMP_MAP_POINTER
13171 || map_kind
== GOMP_MAP_TO_PSET
13172 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13174 /* If we need to initialize a temporary
13175 with VAR because it is not addressable, and
13176 the variable hasn't been initialized yet, then
13177 we'll get a warning for the store to avar.
13178 Don't warn in that case, the mapping might
13180 suppress_warning (var
, OPT_Wuninitialized
);
13181 gimplify_assign (avar
, var
, &ilist
);
13183 avar
= build_fold_addr_expr (avar
);
13184 gimplify_assign (x
, avar
, &ilist
);
13185 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13186 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13187 && !TYPE_READONLY (TREE_TYPE (var
)))
13189 x
= unshare_expr (x
);
13190 x
= build_simple_mem_ref (x
);
13191 gimplify_assign (var
, x
, &olist
);
13196 /* While MAP is handled explicitly by the FE,
13197 for 'target update', only the identified is passed. */
13198 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13199 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13200 && (omp_is_allocatable_or_ptr (var
)
13201 && omp_check_optional_argument (var
, false)))
13202 var
= build_fold_indirect_ref (var
);
13203 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13204 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13205 || (!omp_is_allocatable_or_ptr (var
)
13206 && !omp_check_optional_argument (var
, false)))
13207 var
= build_fold_addr_expr (var
);
13208 gimplify_assign (x
, var
, &ilist
);
13212 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13214 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13215 s
= TREE_TYPE (ovar
);
13216 if (TREE_CODE (s
) == REFERENCE_TYPE
13217 || omp_check_optional_argument (ovar
, false))
13219 s
= TYPE_SIZE_UNIT (s
);
13222 s
= OMP_CLAUSE_SIZE (c
);
13223 if (s
== NULL_TREE
)
13224 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13225 s
= fold_convert (size_type_node
, s
);
13226 purpose
= size_int (map_idx
++);
13227 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13228 if (TREE_CODE (s
) != INTEGER_CST
)
13229 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13231 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13232 switch (OMP_CLAUSE_CODE (c
))
13234 case OMP_CLAUSE_MAP
:
13235 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13236 tkind_zero
= tkind
;
13237 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13240 case GOMP_MAP_ALLOC
:
13241 case GOMP_MAP_IF_PRESENT
:
13243 case GOMP_MAP_FROM
:
13244 case GOMP_MAP_TOFROM
:
13245 case GOMP_MAP_ALWAYS_TO
:
13246 case GOMP_MAP_ALWAYS_FROM
:
13247 case GOMP_MAP_ALWAYS_TOFROM
:
13248 case GOMP_MAP_ALWAYS_PRESENT_TO
:
13249 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
13250 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
13251 case GOMP_MAP_RELEASE
:
13252 case GOMP_MAP_FORCE_TO
:
13253 case GOMP_MAP_FORCE_FROM
:
13254 case GOMP_MAP_FORCE_TOFROM
:
13255 case GOMP_MAP_FORCE_PRESENT
:
13256 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13258 case GOMP_MAP_DELETE
:
13259 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13263 if (tkind_zero
!= tkind
)
13265 if (integer_zerop (s
))
13266 tkind
= tkind_zero
;
13267 else if (integer_nonzerop (s
))
13268 tkind_zero
= tkind
;
13270 if (tkind_zero
== tkind
13271 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13272 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13273 & ~GOMP_MAP_IMPLICIT
)
13276 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13277 bits are not interfered by other special bit encodings,
13278 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13280 tkind
|= GOMP_MAP_IMPLICIT
;
13281 tkind_zero
= tkind
;
13284 case OMP_CLAUSE_FIRSTPRIVATE
:
13285 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13286 tkind
= GOMP_MAP_TO
;
13287 tkind_zero
= tkind
;
13289 case OMP_CLAUSE_TO
:
13291 = (OMP_CLAUSE_MOTION_PRESENT (c
)
13292 ? GOMP_MAP_ALWAYS_PRESENT_TO
: GOMP_MAP_TO
);
13293 tkind_zero
= tkind
;
13295 case OMP_CLAUSE_FROM
:
13297 = (OMP_CLAUSE_MOTION_PRESENT (c
)
13298 ? GOMP_MAP_ALWAYS_PRESENT_FROM
: GOMP_MAP_FROM
);
13299 tkind_zero
= tkind
;
13302 gcc_unreachable ();
13304 gcc_checking_assert (tkind
13305 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13306 gcc_checking_assert (tkind_zero
13307 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13308 talign
= ceil_log2 (talign
);
13309 tkind
|= talign
<< talign_shift
;
13310 tkind_zero
|= talign
<< talign_shift
;
13311 gcc_checking_assert (tkind
13312 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13313 gcc_checking_assert (tkind_zero
13314 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13315 if (tkind
== tkind_zero
)
13316 x
= build_int_cstu (tkind_type
, tkind
);
13319 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13320 x
= build3 (COND_EXPR
, tkind_type
,
13321 fold_build2 (EQ_EXPR
, boolean_type_node
,
13322 unshare_expr (s
), size_zero_node
),
13323 build_int_cstu (tkind_type
, tkind_zero
),
13324 build_int_cstu (tkind_type
, tkind
));
13326 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13331 case OMP_CLAUSE_FIRSTPRIVATE
:
13332 omp_has_device_addr_descr
:
13333 if (is_gimple_omp_oacc (ctx
->stmt
))
13334 goto oacc_firstprivate_map
;
13335 ovar
= OMP_CLAUSE_DECL (c
);
13336 if (omp_privatize_by_reference (ovar
))
13337 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13339 talign
= DECL_ALIGN_UNIT (ovar
);
13340 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13341 x
= build_sender_ref (ovar
, ctx
);
13342 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13343 type
= TREE_TYPE (ovar
);
13344 if (omp_privatize_by_reference (ovar
))
13345 type
= TREE_TYPE (type
);
13346 if ((INTEGRAL_TYPE_P (type
)
13347 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13348 || TREE_CODE (type
) == POINTER_TYPE
)
13350 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13352 if (omp_privatize_by_reference (var
))
13353 t
= build_simple_mem_ref (var
);
13354 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13355 suppress_warning (var
);
13356 if (TREE_CODE (type
) != POINTER_TYPE
)
13357 t
= fold_convert (pointer_sized_int_node
, t
);
13358 t
= fold_convert (TREE_TYPE (x
), t
);
13359 gimplify_assign (x
, t
, &ilist
);
13361 else if (omp_privatize_by_reference (var
))
13362 gimplify_assign (x
, var
, &ilist
);
13363 else if (is_gimple_reg (var
))
13365 tree avar
= create_tmp_var (TREE_TYPE (var
));
13366 mark_addressable (avar
);
13367 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13368 suppress_warning (var
);
13369 gimplify_assign (avar
, var
, &ilist
);
13370 avar
= build_fold_addr_expr (avar
);
13371 gimplify_assign (x
, avar
, &ilist
);
13375 var
= build_fold_addr_expr (var
);
13376 gimplify_assign (x
, var
, &ilist
);
13378 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13380 else if (omp_privatize_by_reference (ovar
))
13381 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13383 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13384 s
= fold_convert (size_type_node
, s
);
13385 purpose
= size_int (map_idx
++);
13386 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13387 if (TREE_CODE (s
) != INTEGER_CST
)
13388 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13390 gcc_checking_assert (tkind
13391 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13392 talign
= ceil_log2 (talign
);
13393 tkind
|= talign
<< talign_shift
;
13394 gcc_checking_assert (tkind
13395 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13396 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13397 build_int_cstu (tkind_type
, tkind
));
13398 /* Fortran array descriptors: firstprivate of data + attach. */
13399 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13400 && lang_hooks
.decls
.omp_array_data (ovar
, true))
13402 tree not_null_lb
, null_lb
, after_lb
;
13403 tree var1
, var2
, size1
, size2
;
13404 tree present
= omp_check_optional_argument (ovar
, true);
13407 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13408 not_null_lb
= create_artificial_label (clause_loc
);
13409 null_lb
= create_artificial_label (clause_loc
);
13410 after_lb
= create_artificial_label (clause_loc
);
13411 gimple_seq seq
= NULL
;
13412 present
= force_gimple_operand (present
, &seq
, true,
13414 gimple_seq_add_seq (&ilist
, seq
);
13415 gimple_seq_add_stmt (&ilist
,
13416 gimple_build_cond_from_tree (present
,
13417 not_null_lb
, null_lb
));
13418 gimple_seq_add_stmt (&ilist
,
13419 gimple_build_label (not_null_lb
));
13421 var1
= lang_hooks
.decls
.omp_array_data (var
, false);
13422 size1
= lang_hooks
.decls
.omp_array_size (var
, &ilist
);
13423 var2
= build_fold_addr_expr (x
);
13424 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
13425 var
= build_fold_addr_expr (var
);
13426 size2
= fold_build2 (POINTER_DIFF_EXPR
, ssizetype
,
13427 build_fold_addr_expr (var1
), var
);
13428 size2
= fold_convert (sizetype
, size2
);
13431 tree tmp
= create_tmp_var (TREE_TYPE (var1
));
13432 gimplify_assign (tmp
, var1
, &ilist
);
13434 tmp
= create_tmp_var (TREE_TYPE (var2
));
13435 gimplify_assign (tmp
, var2
, &ilist
);
13437 tmp
= create_tmp_var (TREE_TYPE (size1
));
13438 gimplify_assign (tmp
, size1
, &ilist
);
13440 tmp
= create_tmp_var (TREE_TYPE (size2
));
13441 gimplify_assign (tmp
, size2
, &ilist
);
13443 gimple_seq_add_stmt (&ilist
, gimple_build_goto (after_lb
));
13444 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_lb
));
13445 gimplify_assign (var1
, null_pointer_node
, &ilist
);
13446 gimplify_assign (var2
, null_pointer_node
, &ilist
);
13447 gimplify_assign (size1
, size_zero_node
, &ilist
);
13448 gimplify_assign (size2
, size_zero_node
, &ilist
);
13449 gimple_seq_add_stmt (&ilist
, gimple_build_label (after_lb
));
13451 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13452 gimplify_assign (x
, var1
, &ilist
);
13453 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13454 talign
= DECL_ALIGN_UNIT (ovar
);
13455 talign
= ceil_log2 (talign
);
13456 tkind
|= talign
<< talign_shift
;
13457 gcc_checking_assert (tkind
13459 TYPE_MAX_VALUE (tkind_type
)));
13460 purpose
= size_int (map_idx
++);
13461 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size1
);
13462 if (TREE_CODE (size1
) != INTEGER_CST
)
13463 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13464 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13465 build_int_cstu (tkind_type
, tkind
));
13466 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13467 gimplify_assign (x
, var2
, &ilist
);
13468 tkind
= GOMP_MAP_ATTACH
;
13469 purpose
= size_int (map_idx
++);
13470 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size2
);
13471 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13472 build_int_cstu (tkind_type
, tkind
));
13476 case OMP_CLAUSE_USE_DEVICE_PTR
:
13477 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13478 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13479 case OMP_CLAUSE_IS_DEVICE_PTR
:
13480 ovar
= OMP_CLAUSE_DECL (c
);
13481 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13483 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13484 goto omp_has_device_addr_descr
;
13485 while (TREE_CODE (ovar
) == INDIRECT_REF
13486 || TREE_CODE (ovar
) == ARRAY_REF
)
13487 ovar
= TREE_OPERAND (ovar
, 0);
13489 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13491 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13493 tkind
= ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13494 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13495 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13496 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13498 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13499 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13501 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13502 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13506 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13507 x
= build_sender_ref (ovar
, ctx
);
13510 if (is_gimple_omp_oacc (ctx
->stmt
))
13512 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13514 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13515 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13518 type
= TREE_TYPE (ovar
);
13519 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13520 var
= lang_hooks
.decls
.omp_array_data (var
, false);
13521 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13522 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13523 && !omp_privatize_by_reference (ovar
)
13524 && !omp_is_allocatable_or_ptr (ovar
))
13525 || TREE_CODE (type
) == ARRAY_TYPE
)
13526 var
= build_fold_addr_expr (var
);
13529 if (omp_privatize_by_reference (ovar
)
13530 || omp_check_optional_argument (ovar
, false)
13531 || omp_is_allocatable_or_ptr (ovar
))
13533 type
= TREE_TYPE (type
);
13534 if (POINTER_TYPE_P (type
)
13535 && TREE_CODE (type
) != ARRAY_TYPE
13536 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13537 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13538 && !omp_is_allocatable_or_ptr (ovar
))
13539 || (omp_privatize_by_reference (ovar
)
13540 && omp_is_allocatable_or_ptr (ovar
))))
13541 var
= build_simple_mem_ref (var
);
13542 var
= fold_convert (TREE_TYPE (x
), var
);
13546 present
= omp_check_optional_argument (ovar
, true);
13549 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13550 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13551 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13552 tree new_x
= unshare_expr (x
);
13553 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13555 gcond
*cond
= gimple_build_cond_from_tree (present
,
13558 gimple_seq_add_stmt (&ilist
, cond
);
13559 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13560 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13561 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13562 gimple_seq_add_stmt (&ilist
,
13563 gimple_build_label (notnull_label
));
13564 gimplify_assign (x
, var
, &ilist
);
13565 gimple_seq_add_stmt (&ilist
,
13566 gimple_build_label (opt_arg_label
));
13569 gimplify_assign (x
, var
, &ilist
);
13571 purpose
= size_int (map_idx
++);
13572 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13573 gcc_checking_assert (tkind
13574 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13575 gcc_checking_assert (tkind
13576 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13577 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13578 build_int_cstu (tkind_type
, tkind
));
13582 gcc_assert (map_idx
== map_cnt
);
13584 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13585 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13586 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13587 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13588 for (int i
= 1; i
<= 2; i
++)
13589 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13591 gimple_seq initlist
= NULL
;
13592 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13593 TREE_VEC_ELT (t
, i
)),
13594 &initlist
, true, NULL_TREE
);
13595 gimple_seq_add_seq (&ilist
, initlist
);
13597 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13598 gimple_seq_add_stmt (&olist
,
13599 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13602 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13604 tree id
= get_identifier ("omp declare target");
13605 tree decl
= TREE_VEC_ELT (t
, i
);
13606 DECL_ATTRIBUTES (decl
)
13607 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13608 varpool_node
*node
= varpool_node::get (decl
);
13611 node
->offloadable
= 1;
13612 if (ENABLE_OFFLOADING
)
13614 g
->have_offload
= true;
13615 vec_safe_push (offload_vars
, t
);
13620 tree clobber
= build_clobber (ctx
->record_type
);
13621 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13625 /* Once all the expansions are done, sequence all the different
13626 fragments inside gimple_omp_body. */
13631 && ctx
->record_type
)
13633 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13634 /* fixup_child_record_type might have changed receiver_decl's type. */
13635 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13636 gimple_seq_add_stmt (&new_body
,
13637 gimple_build_assign (ctx
->receiver_decl
, t
));
13639 gimple_seq_add_seq (&new_body
, fplist
);
13641 if (offloaded
|| data_region
)
13643 tree prev
= NULL_TREE
;
13644 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13645 switch (OMP_CLAUSE_CODE (c
))
13650 case OMP_CLAUSE_FIRSTPRIVATE
:
13651 omp_firstprivatize_data_region
:
13652 if (is_gimple_omp_oacc (ctx
->stmt
))
13654 var
= OMP_CLAUSE_DECL (c
);
13655 if (omp_privatize_by_reference (var
)
13656 || is_gimple_reg_type (TREE_TYPE (var
)))
13658 tree new_var
= lookup_decl (var
, ctx
);
13660 type
= TREE_TYPE (var
);
13661 if (omp_privatize_by_reference (var
))
13662 type
= TREE_TYPE (type
);
13663 if ((INTEGRAL_TYPE_P (type
)
13664 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13665 || TREE_CODE (type
) == POINTER_TYPE
)
13667 x
= build_receiver_ref (var
, false, ctx
);
13668 if (TREE_CODE (type
) != POINTER_TYPE
)
13669 x
= fold_convert (pointer_sized_int_node
, x
);
13670 x
= fold_convert (type
, x
);
13671 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13673 if (omp_privatize_by_reference (var
))
13675 tree v
= create_tmp_var_raw (type
, get_name (var
));
13676 gimple_add_tmp_var (v
);
13677 TREE_ADDRESSABLE (v
) = 1;
13678 gimple_seq_add_stmt (&new_body
,
13679 gimple_build_assign (v
, x
));
13680 x
= build_fold_addr_expr (v
);
13682 gimple_seq_add_stmt (&new_body
,
13683 gimple_build_assign (new_var
, x
));
13687 bool by_ref
= !omp_privatize_by_reference (var
);
13688 x
= build_receiver_ref (var
, by_ref
, ctx
);
13689 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13691 gimple_seq_add_stmt (&new_body
,
13692 gimple_build_assign (new_var
, x
));
13695 else if (is_variable_sized (var
))
13697 tree pvar
= DECL_VALUE_EXPR (var
);
13698 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13699 pvar
= TREE_OPERAND (pvar
, 0);
13700 gcc_assert (DECL_P (pvar
));
13701 tree new_var
= lookup_decl (pvar
, ctx
);
13702 x
= build_receiver_ref (var
, false, ctx
);
13703 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13704 gimple_seq_add_stmt (&new_body
,
13705 gimple_build_assign (new_var
, x
));
13708 case OMP_CLAUSE_PRIVATE
:
13709 if (is_gimple_omp_oacc (ctx
->stmt
))
13711 var
= OMP_CLAUSE_DECL (c
);
13712 if (omp_privatize_by_reference (var
))
13714 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13715 tree new_var
= lookup_decl (var
, ctx
);
13716 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13717 if (TREE_CONSTANT (x
))
13719 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13721 gimple_add_tmp_var (x
);
13722 TREE_ADDRESSABLE (x
) = 1;
13723 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13728 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13729 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13730 gimple_seq_add_stmt (&new_body
,
13731 gimple_build_assign (new_var
, x
));
13734 case OMP_CLAUSE_USE_DEVICE_PTR
:
13735 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13736 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13737 case OMP_CLAUSE_IS_DEVICE_PTR
:
13739 gimple_seq assign_body
;
13740 bool is_array_data
;
13741 bool do_optional_check
;
13742 assign_body
= NULL
;
13743 do_optional_check
= false;
13744 var
= OMP_CLAUSE_DECL (c
);
13745 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13746 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
&& is_array_data
)
13747 goto omp_firstprivatize_data_region
;
13749 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13750 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13751 x
= build_sender_ref (is_array_data
13752 ? (splay_tree_key
) &DECL_NAME (var
)
13753 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13756 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13758 while (TREE_CODE (var
) == INDIRECT_REF
13759 || TREE_CODE (var
) == ARRAY_REF
)
13760 var
= TREE_OPERAND (var
, 0);
13762 x
= build_receiver_ref (var
, false, ctx
);
13767 bool is_ref
= omp_privatize_by_reference (var
);
13768 do_optional_check
= true;
13769 /* First, we copy the descriptor data from the host; then
13770 we update its data to point to the target address. */
13771 new_var
= lookup_decl (var
, ctx
);
13772 new_var
= DECL_VALUE_EXPR (new_var
);
13775 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
13776 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13777 v2
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
13781 v2
= build_fold_indirect_ref (v2
);
13782 v
= create_tmp_var_raw (TREE_TYPE (v2
), get_name (var
));
13783 gimple_add_tmp_var (v
);
13784 TREE_ADDRESSABLE (v
) = 1;
13785 gimplify_assign (v
, v2
, &assign_body
);
13786 tree rhs
= build_fold_addr_expr (v
);
13787 gimple_seq_add_stmt (&assign_body
,
13788 gimple_build_assign (new_var
, rhs
));
13791 gimplify_assign (new_var
, v2
, &assign_body
);
13793 v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13795 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13796 gimple_seq_add_stmt (&assign_body
,
13797 gimple_build_assign (v2
, x
));
13799 else if (is_variable_sized (var
))
13801 tree pvar
= DECL_VALUE_EXPR (var
);
13802 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13803 pvar
= TREE_OPERAND (pvar
, 0);
13804 gcc_assert (DECL_P (pvar
));
13805 new_var
= lookup_decl (pvar
, ctx
);
13806 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13807 gimple_seq_add_stmt (&assign_body
,
13808 gimple_build_assign (new_var
, x
));
13810 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13811 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13812 && !omp_privatize_by_reference (var
)
13813 && !omp_is_allocatable_or_ptr (var
))
13814 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13816 new_var
= lookup_decl (var
, ctx
);
13817 new_var
= DECL_VALUE_EXPR (new_var
);
13818 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13819 new_var
= TREE_OPERAND (new_var
, 0);
13820 gcc_assert (DECL_P (new_var
));
13821 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13822 gimple_seq_add_stmt (&assign_body
,
13823 gimple_build_assign (new_var
, x
));
13827 tree type
= TREE_TYPE (var
);
13828 new_var
= lookup_decl (var
, ctx
);
13829 if (omp_privatize_by_reference (var
))
13831 type
= TREE_TYPE (type
);
13832 if (POINTER_TYPE_P (type
)
13833 && TREE_CODE (type
) != ARRAY_TYPE
13834 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13835 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13836 || (omp_privatize_by_reference (var
)
13837 && omp_is_allocatable_or_ptr (var
))))
13839 tree v
= create_tmp_var_raw (type
, get_name (var
));
13840 gimple_add_tmp_var (v
);
13841 TREE_ADDRESSABLE (v
) = 1;
13842 x
= fold_convert (type
, x
);
13843 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13845 gimple_seq_add_stmt (&assign_body
,
13846 gimple_build_assign (v
, x
));
13847 x
= build_fold_addr_expr (v
);
13848 do_optional_check
= true;
13851 new_var
= DECL_VALUE_EXPR (new_var
);
13852 x
= fold_convert (TREE_TYPE (new_var
), x
);
13853 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13854 gimple_seq_add_stmt (&assign_body
,
13855 gimple_build_assign (new_var
, x
));
13858 present
= ((do_optional_check
13859 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13860 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13861 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13865 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13866 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13867 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13868 glabel
*null_glabel
= gimple_build_label (null_label
);
13869 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13870 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13871 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13873 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13875 gcond
*cond
= gimple_build_cond_from_tree (present
,
13878 gimple_seq_add_stmt (&new_body
, cond
);
13879 gimple_seq_add_stmt (&new_body
, null_glabel
);
13880 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13881 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13882 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13883 gimple_seq_add_seq (&new_body
, assign_body
);
13884 gimple_seq_add_stmt (&new_body
,
13885 gimple_build_label (opt_arg_label
));
13888 gimple_seq_add_seq (&new_body
, assign_body
);
13891 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13892 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13893 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13894 or references to VLAs. */
13895 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13896 switch (OMP_CLAUSE_CODE (c
))
13901 case OMP_CLAUSE_MAP
:
13902 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13903 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13905 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13906 poly_int64 offset
= 0;
13908 var
= OMP_CLAUSE_DECL (c
);
13910 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13911 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13913 && varpool_node::get_create (var
)->offloadable
)
13915 if (TREE_CODE (var
) == INDIRECT_REF
13916 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13917 var
= TREE_OPERAND (var
, 0);
13918 if (TREE_CODE (var
) == COMPONENT_REF
)
13920 var
= get_addr_base_and_unit_offset (var
, &offset
);
13921 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13923 else if (DECL_SIZE (var
)
13924 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13926 tree var2
= DECL_VALUE_EXPR (var
);
13927 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13928 var2
= TREE_OPERAND (var2
, 0);
13929 gcc_assert (DECL_P (var2
));
13932 tree new_var
= lookup_decl (var
, ctx
), x
;
13933 tree type
= TREE_TYPE (new_var
);
13935 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13936 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13939 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13941 new_var
= build2 (MEM_REF
, type
,
13942 build_fold_addr_expr (new_var
),
13943 build_int_cst (build_pointer_type (type
),
13946 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
13948 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
13949 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
13950 new_var
= build2 (MEM_REF
, type
,
13951 build_fold_addr_expr (new_var
),
13952 build_int_cst (build_pointer_type (type
),
13956 is_ref
= omp_privatize_by_reference (var
);
13957 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13959 bool ref_to_array
= false;
13960 bool ref_to_ptr
= false;
13963 type
= TREE_TYPE (type
);
13964 if (TREE_CODE (type
) == ARRAY_TYPE
)
13966 type
= build_pointer_type (type
);
13967 ref_to_array
= true;
13970 else if (TREE_CODE (type
) == ARRAY_TYPE
)
13972 tree decl2
= DECL_VALUE_EXPR (new_var
);
13973 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
13974 decl2
= TREE_OPERAND (decl2
, 0);
13975 gcc_assert (DECL_P (decl2
));
13977 type
= TREE_TYPE (new_var
);
13979 else if (TREE_CODE (type
) == REFERENCE_TYPE
13980 && TREE_CODE (TREE_TYPE (type
)) == POINTER_TYPE
)
13982 type
= TREE_TYPE (type
);
13985 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
13986 x
= fold_convert_loc (clause_loc
, type
, x
);
13987 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
13989 tree bias
= OMP_CLAUSE_SIZE (c
);
13991 bias
= lookup_decl (bias
, ctx
);
13992 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
13993 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
13995 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
13996 TREE_TYPE (x
), x
, bias
);
13999 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14000 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14001 if ((is_ref
&& !ref_to_array
)
14004 tree t
= create_tmp_var_raw (type
, get_name (var
));
14005 gimple_add_tmp_var (t
);
14006 TREE_ADDRESSABLE (t
) = 1;
14007 gimple_seq_add_stmt (&new_body
,
14008 gimple_build_assign (t
, x
));
14009 x
= build_fold_addr_expr_loc (clause_loc
, t
);
14011 gimple_seq_add_stmt (&new_body
,
14012 gimple_build_assign (new_var
, x
));
14015 else if (OMP_CLAUSE_CHAIN (c
)
14016 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
14018 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14019 == GOMP_MAP_FIRSTPRIVATE_POINTER
14020 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14021 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
14024 case OMP_CLAUSE_PRIVATE
:
14025 var
= OMP_CLAUSE_DECL (c
);
14026 if (is_variable_sized (var
))
14028 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14029 tree new_var
= lookup_decl (var
, ctx
);
14030 tree pvar
= DECL_VALUE_EXPR (var
);
14031 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
14032 pvar
= TREE_OPERAND (pvar
, 0);
14033 gcc_assert (DECL_P (pvar
));
14034 tree new_pvar
= lookup_decl (pvar
, ctx
);
14035 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14036 tree al
= size_int (DECL_ALIGN (var
));
14037 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
14038 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14039 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
14040 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14041 gimple_seq_add_stmt (&new_body
,
14042 gimple_build_assign (new_pvar
, x
));
14044 else if (omp_privatize_by_reference (var
)
14045 && !is_gimple_omp_oacc (ctx
->stmt
))
14047 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14048 tree new_var
= lookup_decl (var
, ctx
);
14049 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
14050 if (TREE_CONSTANT (x
))
14055 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14056 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
14057 tree al
= size_int (TYPE_ALIGN (rtype
));
14058 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14061 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14062 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14063 gimple_seq_add_stmt (&new_body
,
14064 gimple_build_assign (new_var
, x
));
14069 gimple_seq fork_seq
= NULL
;
14070 gimple_seq join_seq
= NULL
;
14072 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
14074 /* If there are reductions on the offloaded region itself, treat
14075 them as a dummy GANG loop. */
14076 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
14078 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
14080 if (private_marker
)
14081 gimple_call_set_arg (private_marker
, 2, level
);
14083 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
14084 false, NULL
, private_marker
, NULL
, &fork_seq
,
14088 gimple_seq_add_seq (&new_body
, fork_seq
);
14089 gimple_seq_add_seq (&new_body
, tgt_body
);
14090 gimple_seq_add_seq (&new_body
, join_seq
);
14094 new_body
= maybe_catch_exception (new_body
);
14095 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
14097 gimple_omp_set_body (stmt
, new_body
);
14100 bind
= gimple_build_bind (NULL
, NULL
,
14101 tgt_bind
? gimple_bind_block (tgt_bind
)
14103 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
14104 gimple_bind_add_seq (bind
, ilist
);
14105 gimple_bind_add_stmt (bind
, stmt
);
14106 gimple_bind_add_seq (bind
, olist
);
14108 pop_gimplify_context (NULL
);
14112 gimple_bind_add_seq (dep_bind
, dep_ilist
);
14113 gimple_bind_add_stmt (dep_bind
, bind
);
14114 gimple_bind_add_seq (dep_bind
, dep_olist
);
14115 pop_gimplify_context (dep_bind
);
14119 /* Expand code for an OpenMP teams directive. */
14122 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14124 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
14125 push_gimplify_context ();
14127 tree block
= make_node (BLOCK
);
14128 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
14129 gsi_replace (gsi_p
, bind
, true);
14130 gimple_seq bind_body
= NULL
;
14131 gimple_seq dlist
= NULL
;
14132 gimple_seq olist
= NULL
;
14134 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14135 OMP_CLAUSE_NUM_TEAMS
);
14136 tree num_teams_lower
= NULL_TREE
;
14137 if (num_teams
== NULL_TREE
)
14138 num_teams
= build_int_cst (unsigned_type_node
, 0);
14141 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
14142 if (num_teams_lower
)
14144 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
14145 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
14148 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
14149 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
14150 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
14152 if (num_teams_lower
== NULL_TREE
)
14153 num_teams_lower
= num_teams
;
14154 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14155 OMP_CLAUSE_THREAD_LIMIT
);
14156 if (thread_limit
== NULL_TREE
)
14157 thread_limit
= build_int_cst (unsigned_type_node
, 0);
14160 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
14161 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
14162 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
14165 location_t loc
= gimple_location (teams_stmt
);
14166 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
14167 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
14168 tree first
= create_tmp_var (rettype
);
14169 gimple_seq_add_stmt (&bind_body
,
14170 gimple_build_assign (first
, build_one_cst (rettype
)));
14171 tree llabel
= create_artificial_label (loc
);
14172 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
14174 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
14176 gimple_set_location (call
, loc
);
14177 tree temp
= create_tmp_var (rettype
);
14178 gimple_call_set_lhs (call
, temp
);
14179 gimple_seq_add_stmt (&bind_body
, call
);
14181 tree tlabel
= create_artificial_label (loc
);
14182 tree flabel
= create_artificial_label (loc
);
14183 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
14185 gimple_seq_add_stmt (&bind_body
, cond
);
14186 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
14187 gimple_seq_add_stmt (&bind_body
,
14188 gimple_build_assign (first
, build_zero_cst (rettype
)));
14190 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
14191 &bind_body
, &dlist
, ctx
, NULL
);
14192 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
14193 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
14195 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
14197 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
14198 gimple_omp_set_body (teams_stmt
, NULL
);
14199 gimple_seq_add_seq (&bind_body
, olist
);
14200 gimple_seq_add_seq (&bind_body
, dlist
);
14201 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
14202 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
14203 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
14204 gimple_bind_set_body (bind
, bind_body
);
14206 pop_gimplify_context (bind
);
14208 gimple_bind_append_vars (bind
, ctx
->block_vars
);
14209 BLOCK_VARS (block
) = ctx
->block_vars
;
14210 if (BLOCK_VARS (block
))
14211 TREE_USED (block
) = 1;
14214 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14215 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14216 of OMP context, but with make_addressable_vars set. */
14219 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14224 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14225 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14227 && DECL_HAS_VALUE_EXPR_P (t
))
14230 if (make_addressable_vars
14232 && bitmap_bit_p (make_addressable_vars
, DECL_UID (t
)))
14235 /* If a global variable has been privatized, TREE_CONSTANT on
14236 ADDR_EXPR might be wrong. */
14237 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14238 recompute_tree_invariant_for_addr_expr (t
);
14240 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14244 /* Data to be communicated between lower_omp_regimplify_operands and
14245 lower_omp_regimplify_operands_p. */
14247 struct lower_omp_regimplify_operands_data
14253 /* Helper function for lower_omp_regimplify_operands. Find
14254 omp_member_access_dummy_var vars and adjust temporarily their
14255 DECL_VALUE_EXPRs if needed. */
14258 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14261 tree t
= omp_member_access_dummy_var (*tp
);
14264 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14265 lower_omp_regimplify_operands_data
*ldata
14266 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14267 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14270 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14271 ldata
->decls
->safe_push (*tp
);
14272 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14273 SET_DECL_VALUE_EXPR (*tp
, v
);
14276 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14280 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14281 of omp_member_access_dummy_var vars during regimplification. */
14284 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14285 gimple_stmt_iterator
*gsi_p
)
14287 auto_vec
<tree
, 10> decls
;
14290 struct walk_stmt_info wi
;
14291 memset (&wi
, '\0', sizeof (wi
));
14292 struct lower_omp_regimplify_operands_data data
;
14294 data
.decls
= &decls
;
14296 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14298 gimple_regimplify_operands (stmt
, gsi_p
);
14299 while (!decls
.is_empty ())
14301 tree t
= decls
.pop ();
14302 tree v
= decls
.pop ();
14303 SET_DECL_VALUE_EXPR (t
, v
);
14308 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14310 gimple
*stmt
= gsi_stmt (*gsi_p
);
14311 struct walk_stmt_info wi
;
14314 if (gimple_has_location (stmt
))
14315 input_location
= gimple_location (stmt
);
14317 if (make_addressable_vars
)
14318 memset (&wi
, '\0', sizeof (wi
));
14320 /* If we have issued syntax errors, avoid doing any heavy lifting.
14321 Just replace the OMP directives with a NOP to avoid
14322 confusing RTL expansion. */
14323 if (seen_error () && is_gimple_omp (stmt
))
14325 gsi_replace (gsi_p
, gimple_build_nop (), true);
14329 switch (gimple_code (stmt
))
14333 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14334 if ((ctx
|| make_addressable_vars
)
14335 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14336 lower_omp_regimplify_p
,
14337 ctx
? NULL
: &wi
, NULL
)
14338 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14339 lower_omp_regimplify_p
,
14340 ctx
? NULL
: &wi
, NULL
)))
14341 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14345 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14347 case GIMPLE_EH_FILTER
:
14348 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14351 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14352 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14354 case GIMPLE_ASSUME
:
14355 lower_omp (gimple_assume_body_ptr (stmt
), ctx
);
14357 case GIMPLE_TRANSACTION
:
14358 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14362 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14364 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14365 oacc_privatization_scan_decl_chain (ctx
, vars
);
14367 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14368 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14370 case GIMPLE_OMP_PARALLEL
:
14371 case GIMPLE_OMP_TASK
:
14372 ctx
= maybe_lookup_ctx (stmt
);
14374 if (ctx
->cancellable
)
14375 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14376 lower_omp_taskreg (gsi_p
, ctx
);
14378 case GIMPLE_OMP_FOR
:
14379 ctx
= maybe_lookup_ctx (stmt
);
14381 if (ctx
->cancellable
)
14382 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14383 lower_omp_for (gsi_p
, ctx
);
14385 case GIMPLE_OMP_SECTIONS
:
14386 ctx
= maybe_lookup_ctx (stmt
);
14388 if (ctx
->cancellable
)
14389 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14390 lower_omp_sections (gsi_p
, ctx
);
14392 case GIMPLE_OMP_SCOPE
:
14393 ctx
= maybe_lookup_ctx (stmt
);
14395 lower_omp_scope (gsi_p
, ctx
);
14397 case GIMPLE_OMP_SINGLE
:
14398 ctx
= maybe_lookup_ctx (stmt
);
14400 lower_omp_single (gsi_p
, ctx
);
14402 case GIMPLE_OMP_STRUCTURED_BLOCK
:
14403 /* We have already done error checking at this point, so these nodes
14404 can be completely removed and replaced with their body. */
14405 ctx
= maybe_lookup_ctx (stmt
);
14407 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
14408 gsi_replace_with_seq (gsi_p
, gimple_omp_body (stmt
), true);
14410 case GIMPLE_OMP_MASTER
:
14411 case GIMPLE_OMP_MASKED
:
14412 ctx
= maybe_lookup_ctx (stmt
);
14414 lower_omp_master (gsi_p
, ctx
);
14416 case GIMPLE_OMP_TASKGROUP
:
14417 ctx
= maybe_lookup_ctx (stmt
);
14419 lower_omp_taskgroup (gsi_p
, ctx
);
14421 case GIMPLE_OMP_ORDERED
:
14422 ctx
= maybe_lookup_ctx (stmt
);
14424 lower_omp_ordered (gsi_p
, ctx
);
14426 case GIMPLE_OMP_SCAN
:
14427 ctx
= maybe_lookup_ctx (stmt
);
14429 lower_omp_scan (gsi_p
, ctx
);
14431 case GIMPLE_OMP_CRITICAL
:
14432 ctx
= maybe_lookup_ctx (stmt
);
14434 lower_omp_critical (gsi_p
, ctx
);
14436 case GIMPLE_OMP_ATOMIC_LOAD
:
14437 if ((ctx
|| make_addressable_vars
)
14438 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14439 as_a
<gomp_atomic_load
*> (stmt
)),
14440 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14441 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14443 case GIMPLE_OMP_TARGET
:
14444 ctx
= maybe_lookup_ctx (stmt
);
14446 lower_omp_target (gsi_p
, ctx
);
14448 case GIMPLE_OMP_TEAMS
:
14449 ctx
= maybe_lookup_ctx (stmt
);
14451 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14452 lower_omp_taskreg (gsi_p
, ctx
);
14454 lower_omp_teams (gsi_p
, ctx
);
14458 call_stmt
= as_a
<gcall
*> (stmt
);
14459 fndecl
= gimple_call_fndecl (call_stmt
);
14461 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14462 switch (DECL_FUNCTION_CODE (fndecl
))
14464 case BUILT_IN_GOMP_BARRIER
:
14468 case BUILT_IN_GOMP_CANCEL
:
14469 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14472 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14473 cctx
= cctx
->outer
;
14474 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14475 if (!cctx
->cancellable
)
14477 if (DECL_FUNCTION_CODE (fndecl
)
14478 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14480 stmt
= gimple_build_nop ();
14481 gsi_replace (gsi_p
, stmt
, false);
14485 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14487 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14488 gimple_call_set_fndecl (call_stmt
, fndecl
);
14489 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14492 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14493 gimple_call_set_lhs (call_stmt
, lhs
);
14494 tree fallthru_label
;
14495 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14497 g
= gimple_build_label (fallthru_label
);
14498 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14499 g
= gimple_build_cond (NE_EXPR
, lhs
,
14500 fold_convert (TREE_TYPE (lhs
),
14501 boolean_false_node
),
14502 cctx
->cancel_label
, fallthru_label
);
14503 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14510 case GIMPLE_ASSIGN
:
14511 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14513 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14514 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14515 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14516 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14517 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14518 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14519 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14520 && (gimple_omp_target_kind (up
->stmt
)
14521 == GF_OMP_TARGET_KIND_DATA
)))
14523 else if (!up
->lastprivate_conditional_map
)
14525 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14526 if (TREE_CODE (lhs
) == MEM_REF
14527 && DECL_P (TREE_OPERAND (lhs
, 0))
14528 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14529 0))) == REFERENCE_TYPE
)
14530 lhs
= TREE_OPERAND (lhs
, 0);
14532 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14535 if (up
->combined_into_simd_safelen1
)
14538 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14541 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14542 clauses
= gimple_omp_for_clauses (up
->stmt
);
14544 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14545 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14546 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14547 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14548 OMP_CLAUSE__CONDTEMP_
);
14549 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14550 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14551 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14558 if ((ctx
|| make_addressable_vars
)
14559 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14562 /* Just remove clobbers, this should happen only if we have
14563 "privatized" local addressable variables in SIMD regions,
14564 the clobber isn't needed in that case and gimplifying address
14565 of the ARRAY_REF into a pointer and creating MEM_REF based
14566 clobber would create worse code than we get with the clobber
14568 if (gimple_clobber_p (stmt
))
14570 gsi_replace (gsi_p
, gimple_build_nop (), true);
14573 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14580 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14582 location_t saved_location
= input_location
;
14583 gimple_stmt_iterator gsi
;
14584 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14585 lower_omp_1 (&gsi
, ctx
);
14586 /* During gimplification, we haven't folded statments inside offloading
14587 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14588 if (target_nesting_level
|| taskreg_nesting_level
)
14589 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14591 input_location
= saved_location
;
14594 /* Main entry point. */
14596 static unsigned int
14597 execute_lower_omp (void)
14603 /* This pass always runs, to provide PROP_gimple_lomp.
14604 But often, there is nothing to do. */
14605 if (flag_openacc
== 0 && flag_openmp
== 0
14606 && flag_openmp_simd
== 0)
14609 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14610 delete_omp_context
);
14612 body
= gimple_body (current_function_decl
);
14614 scan_omp (&body
, NULL
);
14615 gcc_assert (taskreg_nesting_level
== 0);
14616 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14617 finish_taskreg_scan (ctx
);
14618 taskreg_contexts
.release ();
14620 if (all_contexts
->root
)
14622 if (make_addressable_vars
)
14623 push_gimplify_context ();
14624 lower_omp (&body
, NULL
);
14625 if (make_addressable_vars
)
14626 pop_gimplify_context (NULL
);
14631 splay_tree_delete (all_contexts
);
14632 all_contexts
= NULL
;
14634 BITMAP_FREE (make_addressable_vars
);
14635 BITMAP_FREE (global_nonaddressable_vars
);
14637 /* If current function is a method, remove artificial dummy VAR_DECL created
14638 for non-static data member privatization, they aren't needed for
14639 debuginfo nor anything else, have been already replaced everywhere in the
14640 IL and cause problems with LTO. */
14641 if (DECL_ARGUMENTS (current_function_decl
)
14642 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14643 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14645 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14647 for (auto task_stmt
: task_cpyfns
)
14648 finalize_task_copyfn (task_stmt
);
14649 task_cpyfns
.release ();
14655 const pass_data pass_data_lower_omp
=
14657 GIMPLE_PASS
, /* type */
14658 "omplower", /* name */
14659 OPTGROUP_OMP
, /* optinfo_flags */
14660 TV_NONE
, /* tv_id */
14661 PROP_gimple_any
, /* properties_required */
14662 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14663 0, /* properties_destroyed */
14664 0, /* todo_flags_start */
14665 0, /* todo_flags_finish */
14668 class pass_lower_omp
: public gimple_opt_pass
14671 pass_lower_omp (gcc::context
*ctxt
)
14672 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14675 /* opt_pass methods: */
14676 unsigned int execute (function
*) final override
14678 return execute_lower_omp ();
14681 }; // class pass_lower_omp
14683 } // anon namespace
14686 make_pass_lower_omp (gcc::context
*ctxt
)
14688 return new pass_lower_omp (ctxt
);
14691 /* The following is a utility to diagnose structured block violations.
14692 It is not part of the "omplower" pass, as that's invoked too late. It
14693 should be invoked by the respective front ends after gimplification. */
14695 static splay_tree all_labels
;
14697 /* Check for mismatched contexts and generate an error if needed. Return
14698 true if an error is detected. */
14701 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14702 gimple
*branch_ctx
, gimple
*label_ctx
)
14704 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14705 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14707 if (label_ctx
== branch_ctx
)
14710 const char* kind
= NULL
;
14714 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14715 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14717 gcc_checking_assert (kind
== NULL
);
14723 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14727 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14728 so we could traverse it and issue a correct "exit" or "enter" error
14729 message upon a structured block violation.
14731 We built the context by building a list with tree_cons'ing, but there is
14732 no easy counterpart in gimple tuples. It seems like far too much work
14733 for issuing exit/enter error messages. If someone really misses the
14734 distinct error message... patches welcome. */
14737 /* Try to avoid confusing the user by producing and error message
14738 with correct "exit" or "enter" verbiage. We prefer "exit"
14739 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14740 if (branch_ctx
== NULL
)
14746 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14751 label_ctx
= TREE_CHAIN (label_ctx
);
14756 error ("invalid exit from %s structured block", kind
);
14758 error ("invalid entry to %s structured block", kind
);
14761 /* If it's obvious we have an invalid entry, be specific about the error. */
14762 if (branch_ctx
== NULL
)
14763 error ("invalid entry to %s structured block", kind
);
14766 /* Otherwise, be vague and lazy, but efficient. */
14767 error ("invalid branch to/from %s structured block", kind
);
14770 gsi_replace (gsi_p
, gimple_build_nop (), false);
14774 /* Pass 1: Create a minimal tree of structured blocks, and record
14775 where each label is found. */
14778 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14779 struct walk_stmt_info
*wi
)
14781 gimple
*context
= (gimple
*) wi
->info
;
14782 gimple
*inner_context
;
14783 gimple
*stmt
= gsi_stmt (*gsi_p
);
14785 *handled_ops_p
= true;
14787 switch (gimple_code (stmt
))
14791 case GIMPLE_OMP_PARALLEL
:
14792 case GIMPLE_OMP_TASK
:
14793 case GIMPLE_OMP_SCOPE
:
14794 case GIMPLE_OMP_SECTIONS
:
14795 case GIMPLE_OMP_SINGLE
:
14796 case GIMPLE_OMP_SECTION
:
14797 case GIMPLE_OMP_STRUCTURED_BLOCK
:
14798 case GIMPLE_OMP_MASTER
:
14799 case GIMPLE_OMP_MASKED
:
14800 case GIMPLE_OMP_ORDERED
:
14801 case GIMPLE_OMP_SCAN
:
14802 case GIMPLE_OMP_CRITICAL
:
14803 case GIMPLE_OMP_TARGET
:
14804 case GIMPLE_OMP_TEAMS
:
14805 case GIMPLE_OMP_TASKGROUP
:
14806 /* The minimal context here is just the current OMP construct. */
14807 inner_context
= stmt
;
14808 wi
->info
= inner_context
;
14809 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14810 wi
->info
= context
;
14813 case GIMPLE_OMP_FOR
:
14814 inner_context
= stmt
;
14815 wi
->info
= inner_context
;
14816 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14818 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14819 diagnose_sb_1
, NULL
, wi
);
14820 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14821 wi
->info
= context
;
14825 splay_tree_insert (all_labels
,
14826 (splay_tree_key
) gimple_label_label (
14827 as_a
<glabel
*> (stmt
)),
14828 (splay_tree_value
) context
);
14838 /* Pass 2: Check each branch and see if its context differs from that of
14839 the destination label's context. */
14842 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14843 struct walk_stmt_info
*wi
)
14845 gimple
*context
= (gimple
*) wi
->info
;
14847 gimple
*stmt
= gsi_stmt (*gsi_p
);
14849 *handled_ops_p
= true;
14851 switch (gimple_code (stmt
))
14855 case GIMPLE_OMP_PARALLEL
:
14856 case GIMPLE_OMP_TASK
:
14857 case GIMPLE_OMP_SCOPE
:
14858 case GIMPLE_OMP_SECTIONS
:
14859 case GIMPLE_OMP_SINGLE
:
14860 case GIMPLE_OMP_SECTION
:
14861 case GIMPLE_OMP_STRUCTURED_BLOCK
:
14862 case GIMPLE_OMP_MASTER
:
14863 case GIMPLE_OMP_MASKED
:
14864 case GIMPLE_OMP_ORDERED
:
14865 case GIMPLE_OMP_SCAN
:
14866 case GIMPLE_OMP_CRITICAL
:
14867 case GIMPLE_OMP_TARGET
:
14868 case GIMPLE_OMP_TEAMS
:
14869 case GIMPLE_OMP_TASKGROUP
:
14871 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14872 wi
->info
= context
;
14875 case GIMPLE_OMP_FOR
:
14877 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14879 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14880 diagnose_sb_2
, NULL
, wi
);
14881 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14882 wi
->info
= context
;
14887 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14888 tree lab
= gimple_cond_true_label (cond_stmt
);
14891 n
= splay_tree_lookup (all_labels
,
14892 (splay_tree_key
) lab
);
14893 diagnose_sb_0 (gsi_p
, context
,
14894 n
? (gimple
*) n
->value
: NULL
);
14896 lab
= gimple_cond_false_label (cond_stmt
);
14899 n
= splay_tree_lookup (all_labels
,
14900 (splay_tree_key
) lab
);
14901 diagnose_sb_0 (gsi_p
, context
,
14902 n
? (gimple
*) n
->value
: NULL
);
14909 tree lab
= gimple_goto_dest (stmt
);
14910 if (TREE_CODE (lab
) != LABEL_DECL
)
14913 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14914 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14918 case GIMPLE_SWITCH
:
14920 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14922 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14924 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14925 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14926 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14932 case GIMPLE_RETURN
:
14933 diagnose_sb_0 (gsi_p
, context
, NULL
);
14943 static unsigned int
14944 diagnose_omp_structured_block_errors (void)
14946 struct walk_stmt_info wi
;
14947 gimple_seq body
= gimple_body (current_function_decl
);
14949 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14951 memset (&wi
, 0, sizeof (wi
));
14952 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14954 memset (&wi
, 0, sizeof (wi
));
14955 wi
.want_locations
= true;
14956 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14958 gimple_set_body (current_function_decl
, body
);
14960 splay_tree_delete (all_labels
);
14968 const pass_data pass_data_diagnose_omp_blocks
=
14970 GIMPLE_PASS
, /* type */
14971 "*diagnose_omp_blocks", /* name */
14972 OPTGROUP_OMP
, /* optinfo_flags */
14973 TV_NONE
, /* tv_id */
14974 PROP_gimple_any
, /* properties_required */
14975 0, /* properties_provided */
14976 0, /* properties_destroyed */
14977 0, /* todo_flags_start */
14978 0, /* todo_flags_finish */
14981 class pass_diagnose_omp_blocks
: public gimple_opt_pass
14984 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14985 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
14988 /* opt_pass methods: */
14989 bool gate (function
*) final override
14991 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
14993 unsigned int execute (function
*) final override
14995 return diagnose_omp_structured_block_errors ();
14998 }; // class pass_diagnose_omp_blocks
15000 } // anon namespace
15003 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15005 return new pass_diagnose_omp_blocks (ctxt
);
15009 #include "gt-omp-low.h"