1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap make_addressable_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
194 static vec
<gomp_task
*> task_cpyfns
;
196 static void scan_omp (gimple_seq
*, omp_context
*);
197 static tree
scan_omp_1_op (tree
*, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context
*ctx
);
200 #define WALK_SUBSTMTS \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_TRANSACTION: \
206 /* The sub-statements for these should be walked. */ \
207 *handled_ops_p = false; \
210 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 is_oacc_parallel_or_serial (omp_context
*ctx
)
216 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
217 return ((outer_type
== GIMPLE_OMP_TARGET
)
218 && ((gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
220 || (gimple_omp_target_kind (ctx
->stmt
)
221 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
224 /* Return whether CTX represents an OpenACC 'kernels' construct.
225 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 is_oacc_kernels (omp_context
*ctx
)
230 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
231 return ((outer_type
== GIMPLE_OMP_TARGET
)
232 && (gimple_omp_target_kind (ctx
->stmt
)
233 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
236 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
241 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
242 return ((outer_type
== GIMPLE_OMP_TARGET
)
243 && ((gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
247 || (gimple_omp_target_kind (ctx
->stmt
)
248 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
251 /* Return true if STMT corresponds to an OpenMP target region. */
253 is_omp_target (gimple
*stmt
)
255 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
257 int kind
= gimple_omp_target_kind (stmt
);
258 return (kind
== GF_OMP_TARGET_KIND_REGION
259 || kind
== GF_OMP_TARGET_KIND_DATA
260 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
261 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
266 /* If DECL is the artificial dummy VAR_DECL created for non-static
267 data member privatization, return the underlying "this" parameter,
268 otherwise return NULL. */
271 omp_member_access_dummy_var (tree decl
)
274 || !DECL_ARTIFICIAL (decl
)
275 || !DECL_IGNORED_P (decl
)
276 || !DECL_HAS_VALUE_EXPR_P (decl
)
277 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
280 tree v
= DECL_VALUE_EXPR (decl
);
281 if (TREE_CODE (v
) != COMPONENT_REF
)
285 switch (TREE_CODE (v
))
291 case POINTER_PLUS_EXPR
:
292 v
= TREE_OPERAND (v
, 0);
295 if (DECL_CONTEXT (v
) == current_function_decl
296 && DECL_ARTIFICIAL (v
)
297 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
305 /* Helper for unshare_and_remap, called through walk_tree. */
308 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
310 tree
*pair
= (tree
*) data
;
313 *tp
= unshare_expr (pair
[1]);
316 else if (IS_TYPE_OR_DECL_P (*tp
))
321 /* Return unshare_expr (X) with all occurrences of FROM
325 unshare_and_remap (tree x
, tree from
, tree to
)
327 tree pair
[2] = { from
, to
};
328 x
= unshare_expr (x
);
329 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
333 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 scan_omp_op (tree
*tp
, omp_context
*ctx
)
338 struct walk_stmt_info wi
;
340 memset (&wi
, 0, sizeof (wi
));
342 wi
.want_locations
= true;
344 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
347 static void lower_omp (gimple_seq
*, omp_context
*);
348 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
351 /* Return true if CTX is for an omp parallel. */
354 is_parallel_ctx (omp_context
*ctx
)
356 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
360 /* Return true if CTX is for an omp task. */
363 is_task_ctx (omp_context
*ctx
)
365 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
369 /* Return true if CTX is for an omp taskloop. */
372 is_taskloop_ctx (omp_context
*ctx
)
374 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
375 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
379 /* Return true if CTX is for a host omp teams. */
382 is_host_teams_ctx (omp_context
*ctx
)
384 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
385 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
388 /* Return true if CTX is for an omp parallel or omp task or host omp teams
389 (the last one is strictly not a task region in OpenMP speak, but we
390 need to treat it similarly). */
393 is_taskreg_ctx (omp_context
*ctx
)
395 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
398 /* Return true if EXPR is variable sized. */
401 is_variable_sized (const_tree expr
)
403 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
406 /* Lookup variables. The "maybe" form
407 allows for the variable form to not have been entered, otherwise we
408 assert that the variable must have been entered. */
411 lookup_decl (tree var
, omp_context
*ctx
)
413 tree
*n
= ctx
->cb
.decl_map
->get (var
);
418 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
420 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
421 return n
? *n
: NULL_TREE
;
425 lookup_field (tree var
, omp_context
*ctx
)
428 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
429 return (tree
) n
->value
;
433 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
436 n
= splay_tree_lookup (ctx
->sfield_map
437 ? ctx
->sfield_map
: ctx
->field_map
, key
);
438 return (tree
) n
->value
;
442 lookup_sfield (tree var
, omp_context
*ctx
)
444 return lookup_sfield ((splay_tree_key
) var
, ctx
);
448 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
451 n
= splay_tree_lookup (ctx
->field_map
, key
);
452 return n
? (tree
) n
->value
: NULL_TREE
;
456 maybe_lookup_field (tree var
, omp_context
*ctx
)
458 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
461 /* Return true if DECL should be copied by pointer. SHARED_CTX is
462 the parallel context if DECL is to be shared. */
465 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
467 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
468 || TYPE_ATOMIC (TREE_TYPE (decl
)))
471 /* We can only use copy-in/copy-out semantics for shared variables
472 when we know the value is not accessible from an outer scope. */
475 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
477 /* ??? Trivially accessible from anywhere. But why would we even
478 be passing an address in this case? Should we simply assert
479 this to be false, or should we have a cleanup pass that removes
480 these from the list of mappings? */
481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
484 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
485 without analyzing the expression whether or not its location
486 is accessible to anyone else. In the case of nested parallel
487 regions it certainly may be. */
488 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
491 /* Do not use copy-in/copy-out for variables that have their
493 if (is_global_var (decl
))
495 /* For file scope vars, track whether we've seen them as
496 non-addressable initially and in that case, keep the same
497 answer for the duration of the pass, even when they are made
498 addressable later on e.g. through reduction expansion. Global
499 variables which weren't addressable before the pass will not
500 have their privatized copies address taken. See PR91216. */
501 if (!TREE_ADDRESSABLE (decl
))
503 if (!global_nonaddressable_vars
)
504 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
505 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
507 else if (!global_nonaddressable_vars
508 || !bitmap_bit_p (global_nonaddressable_vars
,
512 else if (TREE_ADDRESSABLE (decl
))
515 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 if (TREE_READONLY (decl
)
518 || ((TREE_CODE (decl
) == RESULT_DECL
519 || TREE_CODE (decl
) == PARM_DECL
)
520 && DECL_BY_REFERENCE (decl
)))
523 /* Disallow copy-in/out in nested parallel if
524 decl is shared in outer parallel, otherwise
525 each thread could store the shared variable
526 in its own copy-in location, making the
527 variable no longer really shared. */
528 if (shared_ctx
->is_nested
)
532 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
533 if ((is_taskreg_ctx (up
)
534 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
535 && is_gimple_omp_offloaded (up
->stmt
)))
536 && maybe_lookup_decl (decl
, up
))
543 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
545 for (c
= gimple_omp_target_clauses (up
->stmt
);
546 c
; c
= OMP_CLAUSE_CHAIN (c
))
547 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
548 && OMP_CLAUSE_DECL (c
) == decl
)
552 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
553 c
; c
= OMP_CLAUSE_CHAIN (c
))
554 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
555 && OMP_CLAUSE_DECL (c
) == decl
)
559 goto maybe_mark_addressable_and_ret
;
563 /* For tasks avoid using copy-in/out. As tasks can be
564 deferred or executed in different thread, when GOMP_task
565 returns, the task hasn't necessarily terminated. */
566 if (is_task_ctx (shared_ctx
))
569 maybe_mark_addressable_and_ret
:
570 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
571 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
573 /* Taking address of OUTER in lower_send_shared_vars
574 might need regimplification of everything that uses the
576 if (!make_addressable_vars
)
577 make_addressable_vars
= BITMAP_ALLOC (NULL
);
578 bitmap_set_bit (make_addressable_vars
, DECL_UID (outer
));
579 TREE_ADDRESSABLE (outer
) = 1;
588 /* Construct a new automatic decl similar to VAR. */
591 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
593 tree copy
= copy_var_decl (var
, name
, type
);
595 DECL_CONTEXT (copy
) = current_function_decl
;
599 DECL_CHAIN (copy
) = ctx
->block_vars
;
600 ctx
->block_vars
= copy
;
605 /* If VAR is listed in make_addressable_vars, it wasn't
606 originally addressable, but was only later made so.
607 We don't need to take address of privatizations
609 if (TREE_ADDRESSABLE (var
)
610 && ((make_addressable_vars
611 && bitmap_bit_p (make_addressable_vars
, DECL_UID (var
)))
612 || (global_nonaddressable_vars
613 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
614 TREE_ADDRESSABLE (copy
) = 0;
620 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
622 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
625 /* Build tree nodes to access the field for VAR on the receiver side. */
628 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
630 tree x
, field
= lookup_field (var
, ctx
);
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x
= maybe_lookup_field (field
, ctx
);
638 x
= build_simple_mem_ref (ctx
->receiver_decl
);
639 TREE_THIS_NOTRAP (x
) = 1;
640 x
= omp_build_component_ref (x
, field
);
643 x
= build_simple_mem_ref (x
);
644 TREE_THIS_NOTRAP (x
) = 1;
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
655 build_outer_var_ref (tree var
, omp_context
*ctx
,
656 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
659 omp_context
*outer
= ctx
->outer
;
660 for (; outer
; outer
= outer
->outer
)
662 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
664 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
665 && !maybe_lookup_decl (var
, outer
))
670 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
672 else if (is_variable_sized (var
))
674 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
675 x
= build_outer_var_ref (x
, ctx
, code
);
676 x
= build_simple_mem_ref (x
);
678 else if (is_taskreg_ctx (ctx
))
680 bool by_ref
= use_pointer_for_field (var
, NULL
);
681 x
= build_receiver_ref (var
, by_ref
, ctx
);
683 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
684 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
686 || code
== OMP_CLAUSE_ALLOCATE
687 || (code
== OMP_CLAUSE_PRIVATE
688 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
689 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
690 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
692 /* #pragma omp simd isn't a worksharing construct, and can reference
693 even private vars in its linear etc. clauses.
694 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
695 to private vars in all worksharing constructs. */
697 if (outer
&& is_taskreg_ctx (outer
))
698 x
= lookup_decl (var
, outer
);
700 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
704 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
708 = splay_tree_lookup (outer
->field_map
,
709 (splay_tree_key
) &DECL_UID (var
));
712 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
715 x
= lookup_decl (var
, outer
);
719 tree field
= (tree
) n
->value
;
720 /* If the receiver record type was remapped in the child function,
721 remap the field into the new record type. */
722 x
= maybe_lookup_field (field
, outer
);
726 x
= build_simple_mem_ref (outer
->receiver_decl
);
727 x
= omp_build_component_ref (x
, field
);
728 if (use_pointer_for_field (var
, outer
))
729 x
= build_simple_mem_ref (x
);
733 x
= lookup_decl (var
, outer
);
734 else if (omp_privatize_by_reference (var
))
735 /* This can happen with orphaned constructs. If var is reference, it is
736 possible it is shared and as such valid. */
738 else if (omp_member_access_dummy_var (var
))
745 tree t
= omp_member_access_dummy_var (var
);
748 x
= DECL_VALUE_EXPR (var
);
749 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
751 x
= unshare_and_remap (x
, t
, o
);
753 x
= unshare_expr (x
);
757 if (omp_privatize_by_reference (var
))
758 x
= build_simple_mem_ref (x
);
763 /* Build tree nodes to access the field for VAR on the sender side. */
766 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
768 tree field
= lookup_sfield (key
, ctx
);
769 return omp_build_component_ref (ctx
->sender_decl
, field
);
773 build_sender_ref (tree var
, omp_context
*ctx
)
775 return build_sender_ref ((splay_tree_key
) var
, ctx
);
778 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
779 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
784 tree field
, type
, sfield
= NULL_TREE
;
785 splay_tree_key key
= (splay_tree_key
) var
;
787 if ((mask
& 16) != 0)
789 key
= (splay_tree_key
) &DECL_NAME (var
);
790 gcc_checking_assert (key
!= (splay_tree_key
) var
);
794 key
= (splay_tree_key
) &DECL_UID (var
);
795 gcc_checking_assert (key
!= (splay_tree_key
) var
);
797 gcc_assert ((mask
& 1) == 0
798 || !splay_tree_lookup (ctx
->field_map
, key
));
799 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
800 || !splay_tree_lookup (ctx
->sfield_map
, key
));
801 gcc_assert ((mask
& 3) == 3
802 || !is_gimple_omp_oacc (ctx
->stmt
));
804 type
= TREE_TYPE (var
);
805 if ((mask
& 16) != 0)
806 type
= lang_hooks
.decls
.omp_array_data (var
, true);
808 /* Prevent redeclaring the var in the split-off function with a restrict
809 pointer type. Note that we only clear type itself, restrict qualifiers in
810 the pointed-to type will be ignored by points-to analysis. */
811 if (POINTER_TYPE_P (type
)
812 && TYPE_RESTRICT (type
))
813 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
817 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
818 type
= build_pointer_type (build_pointer_type (type
));
821 type
= build_pointer_type (type
);
822 else if ((mask
& (32 | 3)) == 1
823 && omp_privatize_by_reference (var
))
824 type
= TREE_TYPE (type
);
826 field
= build_decl (DECL_SOURCE_LOCATION (var
),
827 FIELD_DECL
, DECL_NAME (var
), type
);
829 /* Remember what variable this field was created for. This does have a
830 side effect of making dwarf2out ignore this member, so for helpful
831 debugging we clear it later in delete_omp_context. */
832 DECL_ABSTRACT_ORIGIN (field
) = var
;
833 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
835 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
836 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
837 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
840 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
844 insert_field_into_struct (ctx
->record_type
, field
);
845 if (ctx
->srecord_type
)
847 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
848 FIELD_DECL
, DECL_NAME (var
), type
);
849 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
850 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
851 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
852 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
853 insert_field_into_struct (ctx
->srecord_type
, sfield
);
858 if (ctx
->srecord_type
== NULL_TREE
)
862 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
863 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
864 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
866 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
867 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
868 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
869 insert_field_into_struct (ctx
->srecord_type
, sfield
);
870 splay_tree_insert (ctx
->sfield_map
,
871 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
872 (splay_tree_value
) sfield
);
876 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
877 : ctx
->srecord_type
, field
);
881 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
882 if ((mask
& 2) && ctx
->sfield_map
)
883 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
887 install_var_local (tree var
, omp_context
*ctx
)
889 tree new_var
= omp_copy_decl_1 (var
, ctx
);
890 insert_decl_map (&ctx
->cb
, var
, new_var
);
894 /* Adjust the replacement for DECL in CTX for the new context. This means
895 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
902 new_decl
= lookup_decl (decl
, ctx
);
904 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
906 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
907 && DECL_HAS_VALUE_EXPR_P (decl
))
909 tree ve
= DECL_VALUE_EXPR (decl
);
910 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
911 SET_DECL_VALUE_EXPR (new_decl
, ve
);
912 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
915 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
917 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
918 if (size
== error_mark_node
)
919 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
920 DECL_SIZE (new_decl
) = size
;
922 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
923 if (size
== error_mark_node
)
924 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
925 DECL_SIZE_UNIT (new_decl
) = size
;
929 /* The callback for remap_decl. Search all containing contexts for a
930 mapping of the variable; this avoids having to duplicate the splay
931 tree ahead of time. We know a mapping doesn't already exist in the
932 given context. Create new mappings to implement default semantics. */
935 omp_copy_decl (tree var
, copy_body_data
*cb
)
937 omp_context
*ctx
= (omp_context
*) cb
;
940 if (TREE_CODE (var
) == LABEL_DECL
)
942 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
944 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
945 DECL_CONTEXT (new_var
) = current_function_decl
;
946 insert_decl_map (&ctx
->cb
, var
, new_var
);
950 while (!is_taskreg_ctx (ctx
))
955 new_var
= maybe_lookup_decl (var
, ctx
);
960 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
963 return error_mark_node
;
966 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
971 omp_context
*ctx
= XCNEW (omp_context
);
973 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
974 (splay_tree_value
) ctx
);
979 ctx
->outer
= outer_ctx
;
980 ctx
->cb
= outer_ctx
->cb
;
981 ctx
->cb
.block
= NULL
;
982 ctx
->depth
= outer_ctx
->depth
+ 1;
986 ctx
->cb
.src_fn
= current_function_decl
;
987 ctx
->cb
.dst_fn
= current_function_decl
;
988 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
989 gcc_checking_assert (ctx
->cb
.src_node
);
990 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
991 ctx
->cb
.src_cfun
= cfun
;
992 ctx
->cb
.copy_decl
= omp_copy_decl
;
993 ctx
->cb
.eh_lp_nr
= 0;
994 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
995 ctx
->cb
.adjust_array_error_bounds
= true;
996 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1000 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1005 static gimple_seq
maybe_catch_exception (gimple_seq
);
1007 /* Finalize task copyfn. */
1010 finalize_task_copyfn (gomp_task
*task_stmt
)
1012 struct function
*child_cfun
;
1014 gimple_seq seq
= NULL
, new_seq
;
1017 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1018 if (child_fn
== NULL_TREE
)
1021 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1022 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1024 push_cfun (child_cfun
);
1025 bind
= gimplify_body (child_fn
, false);
1026 gimple_seq_add_stmt (&seq
, bind
);
1027 new_seq
= maybe_catch_exception (seq
);
1030 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1032 gimple_seq_add_stmt (&seq
, bind
);
1034 gimple_set_body (child_fn
, seq
);
1037 /* Inform the callgraph about the new function. */
1038 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1039 node
->parallelized_function
= 1;
1040 cgraph_node::add_new_function (child_fn
, false);
1043 /* Destroy a omp_context data structures. Called through the splay tree
1044 value delete callback. */
1047 delete_omp_context (splay_tree_value value
)
1049 omp_context
*ctx
= (omp_context
*) value
;
1051 delete ctx
->cb
.decl_map
;
1054 splay_tree_delete (ctx
->field_map
);
1055 if (ctx
->sfield_map
)
1056 splay_tree_delete (ctx
->sfield_map
);
1058 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1059 it produces corrupt debug information. */
1060 if (ctx
->record_type
)
1063 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1064 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1066 if (ctx
->srecord_type
)
1069 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1070 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1073 if (ctx
->task_reduction_map
)
1075 ctx
->task_reductions
.release ();
1076 delete ctx
->task_reduction_map
;
1079 delete ctx
->lastprivate_conditional_map
;
1080 delete ctx
->allocate_map
;
1085 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1089 fixup_child_record_type (omp_context
*ctx
)
1091 tree f
, type
= ctx
->record_type
;
1093 if (!ctx
->receiver_decl
)
1095 /* ??? It isn't sufficient to just call remap_type here, because
1096 variably_modified_type_p doesn't work the way we expect for
1097 record types. Testing each field for whether it needs remapping
1098 and creating a new record by hand works, however. */
1099 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1100 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1104 tree name
, new_fields
= NULL
;
1106 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1107 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1108 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1109 TYPE_DECL
, name
, type
);
1110 TYPE_NAME (type
) = name
;
1112 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1114 tree new_f
= copy_node (f
);
1115 DECL_CONTEXT (new_f
) = type
;
1116 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1117 DECL_CHAIN (new_f
) = new_fields
;
1118 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1119 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1121 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1125 /* Arrange to be able to look up the receiver field
1126 given the sender field. */
1127 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1128 (splay_tree_value
) new_f
);
1130 TYPE_FIELDS (type
) = nreverse (new_fields
);
1134 /* In a target region we never modify any of the pointers in *.omp_data_i,
1135 so attempt to help the optimizers. */
1136 if (is_gimple_omp_offloaded (ctx
->stmt
))
1137 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1139 TREE_TYPE (ctx
->receiver_decl
)
1140 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1143 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1144 specified by CLAUSES. */
1147 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1150 bool scan_array_reductions
= false;
1152 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1154 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1155 /* omp_default_mem_alloc is 1 */
1156 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1157 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1159 /* The allocate clauses that appear on a target construct or on
1160 constructs in a target region must specify an allocator expression
1161 unless a requires directive with the dynamic_allocators clause
1162 is present in the same compilation unit. */
1163 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1164 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
) == 0)
1165 && omp_maybe_offloaded_ctx (ctx
))
1166 error_at (OMP_CLAUSE_LOCATION (c
), "%<allocate%> clause must"
1167 " specify an allocator here");
1168 if (ctx
->allocate_map
== NULL
)
1169 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1170 tree val
= integer_zero_node
;
1171 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1172 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1173 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1174 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1175 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1178 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1182 switch (OMP_CLAUSE_CODE (c
))
1184 case OMP_CLAUSE_PRIVATE
:
1185 decl
= OMP_CLAUSE_DECL (c
);
1186 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1188 else if (!is_variable_sized (decl
))
1189 install_var_local (decl
, ctx
);
1192 case OMP_CLAUSE_SHARED
:
1193 decl
= OMP_CLAUSE_DECL (c
);
1194 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1195 ctx
->allocate_map
->remove (decl
);
1196 /* Ignore shared directives in teams construct inside of
1197 target construct. */
1198 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1199 && !is_host_teams_ctx (ctx
))
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1204 if (is_global_var (odecl
))
1206 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1209 gcc_assert (is_taskreg_ctx (ctx
));
1210 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1211 || !is_variable_sized (decl
));
1212 /* Global variables don't need to be copied,
1213 the receiver side will use them directly. */
1214 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1216 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1218 use_pointer_for_field (decl
, ctx
);
1221 by_ref
= use_pointer_for_field (decl
, NULL
);
1222 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1223 || TREE_ADDRESSABLE (decl
)
1225 || omp_privatize_by_reference (decl
))
1227 by_ref
= use_pointer_for_field (decl
, ctx
);
1228 install_var_field (decl
, by_ref
, 3, ctx
);
1229 install_var_local (decl
, ctx
);
1232 /* We don't need to copy const scalar vars back. */
1233 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1236 case OMP_CLAUSE_REDUCTION
:
1237 /* Collect 'reduction' clauses on OpenACC compute construct. */
1238 if (is_gimple_omp_oacc (ctx
->stmt
)
1239 && is_gimple_omp_offloaded (ctx
->stmt
))
1241 /* No 'reduction' clauses on OpenACC 'kernels'. */
1242 gcc_checking_assert (!is_oacc_kernels (ctx
));
1243 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1244 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1246 ctx
->local_reduction_clauses
1247 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1251 case OMP_CLAUSE_IN_REDUCTION
:
1252 decl
= OMP_CLAUSE_DECL (c
);
1253 if (ctx
->allocate_map
1254 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1255 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1256 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1257 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1258 || is_task_ctx (ctx
)))
1261 if (ctx
->allocate_map
->get (decl
))
1262 ctx
->allocate_map
->remove (decl
);
1264 if (TREE_CODE (decl
) == MEM_REF
)
1266 tree t
= TREE_OPERAND (decl
, 0);
1267 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1268 t
= TREE_OPERAND (t
, 0);
1269 if (TREE_CODE (t
) == INDIRECT_REF
1270 || TREE_CODE (t
) == ADDR_EXPR
)
1271 t
= TREE_OPERAND (t
, 0);
1272 if (is_omp_target (ctx
->stmt
))
1274 if (is_variable_sized (t
))
1276 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1277 t
= DECL_VALUE_EXPR (t
);
1278 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1279 t
= TREE_OPERAND (t
, 0);
1280 gcc_assert (DECL_P (t
));
1284 scan_omp_op (&at
, ctx
->outer
);
1285 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1286 splay_tree_insert (ctx
->field_map
,
1287 (splay_tree_key
) &DECL_CONTEXT (t
),
1288 (splay_tree_value
) nt
);
1290 splay_tree_insert (ctx
->field_map
,
1291 (splay_tree_key
) &DECL_CONTEXT (at
),
1292 (splay_tree_value
) nt
);
1295 install_var_local (t
, ctx
);
1296 if (is_taskreg_ctx (ctx
)
1297 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1298 || (is_task_ctx (ctx
)
1299 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1300 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1302 == POINTER_TYPE
)))))
1303 && !is_variable_sized (t
)
1304 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1305 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1306 && !is_task_ctx (ctx
))))
1308 by_ref
= use_pointer_for_field (t
, NULL
);
1309 if (is_task_ctx (ctx
)
1310 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1311 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1313 install_var_field (t
, false, 1, ctx
);
1314 install_var_field (t
, by_ref
, 2, ctx
);
1317 install_var_field (t
, by_ref
, 3, ctx
);
1321 if (is_omp_target (ctx
->stmt
))
1325 scan_omp_op (&at
, ctx
->outer
);
1326 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1327 splay_tree_insert (ctx
->field_map
,
1328 (splay_tree_key
) &DECL_CONTEXT (decl
),
1329 (splay_tree_value
) nt
);
1331 splay_tree_insert (ctx
->field_map
,
1332 (splay_tree_key
) &DECL_CONTEXT (at
),
1333 (splay_tree_value
) nt
);
1336 if (is_task_ctx (ctx
)
1337 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1338 && OMP_CLAUSE_REDUCTION_TASK (c
)
1339 && is_parallel_ctx (ctx
)))
1341 /* Global variables don't need to be copied,
1342 the receiver side will use them directly. */
1343 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1345 by_ref
= use_pointer_for_field (decl
, ctx
);
1346 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1347 install_var_field (decl
, by_ref
, 3, ctx
);
1349 install_var_local (decl
, ctx
);
1352 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1353 && OMP_CLAUSE_REDUCTION_TASK (c
))
1355 install_var_local (decl
, ctx
);
1360 case OMP_CLAUSE_LASTPRIVATE
:
1361 /* Let the corresponding firstprivate clause create
1363 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1367 case OMP_CLAUSE_FIRSTPRIVATE
:
1368 case OMP_CLAUSE_LINEAR
:
1369 decl
= OMP_CLAUSE_DECL (c
);
1371 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1372 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1373 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1374 && is_gimple_omp_offloaded (ctx
->stmt
))
1376 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1377 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
1378 && lang_hooks
.decls
.omp_array_data (decl
, true)))
1380 by_ref
= !omp_privatize_by_reference (decl
);
1381 install_var_field (decl
, by_ref
, 3, ctx
);
1383 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1385 if (TREE_CODE (decl
) == INDIRECT_REF
)
1386 decl
= TREE_OPERAND (decl
, 0);
1387 install_var_field (decl
, true, 3, ctx
);
1389 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1390 install_var_field (decl
, true, 3, ctx
);
1392 install_var_field (decl
, false, 3, ctx
);
1394 if (is_variable_sized (decl
))
1396 if (is_task_ctx (ctx
))
1398 if (ctx
->allocate_map
1399 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1402 if (ctx
->allocate_map
->get (decl
))
1403 ctx
->allocate_map
->remove (decl
);
1405 install_var_field (decl
, false, 1, ctx
);
1409 else if (is_taskreg_ctx (ctx
))
1412 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1413 by_ref
= use_pointer_for_field (decl
, NULL
);
1415 if (is_task_ctx (ctx
)
1416 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1418 if (ctx
->allocate_map
1419 && ctx
->allocate_map
->get (decl
))
1420 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1422 install_var_field (decl
, false, 1, ctx
);
1424 install_var_field (decl
, by_ref
, 2, ctx
);
1427 install_var_field (decl
, by_ref
, 3, ctx
);
1429 install_var_local (decl
, ctx
);
1430 /* For descr arrays on target: firstprivatize data + attach ptr. */
1431 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1432 && is_gimple_omp_offloaded (ctx
->stmt
)
1433 && !is_gimple_omp_oacc (ctx
->stmt
)
1434 && lang_hooks
.decls
.omp_array_data (decl
, true))
1436 install_var_field (decl
, false, 16 | 3, ctx
);
1437 install_var_field (decl
, true, 8 | 3, ctx
);
1441 case OMP_CLAUSE_USE_DEVICE_PTR
:
1442 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1443 decl
= OMP_CLAUSE_DECL (c
);
1445 /* Fortran array descriptors. */
1446 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1447 install_var_field (decl
, false, 19, ctx
);
1448 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1449 && !omp_privatize_by_reference (decl
)
1450 && !omp_is_allocatable_or_ptr (decl
))
1451 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1452 install_var_field (decl
, true, 11, ctx
);
1454 install_var_field (decl
, false, 11, ctx
);
1455 if (DECL_SIZE (decl
)
1456 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1458 tree decl2
= DECL_VALUE_EXPR (decl
);
1459 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1460 decl2
= TREE_OPERAND (decl2
, 0);
1461 gcc_assert (DECL_P (decl2
));
1462 install_var_local (decl2
, ctx
);
1464 install_var_local (decl
, ctx
);
1467 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1468 decl
= OMP_CLAUSE_DECL (c
);
1469 while (TREE_CODE (decl
) == INDIRECT_REF
1470 || TREE_CODE (decl
) == ARRAY_REF
)
1471 decl
= TREE_OPERAND (decl
, 0);
1474 case OMP_CLAUSE_IS_DEVICE_PTR
:
1475 decl
= OMP_CLAUSE_DECL (c
);
1478 case OMP_CLAUSE__LOOPTEMP_
:
1479 case OMP_CLAUSE__REDUCTEMP_
:
1480 gcc_assert (is_taskreg_ctx (ctx
));
1481 decl
= OMP_CLAUSE_DECL (c
);
1482 install_var_field (decl
, false, 3, ctx
);
1483 install_var_local (decl
, ctx
);
1486 case OMP_CLAUSE_COPYPRIVATE
:
1487 case OMP_CLAUSE_COPYIN
:
1488 decl
= OMP_CLAUSE_DECL (c
);
1489 by_ref
= use_pointer_for_field (decl
, NULL
);
1490 install_var_field (decl
, by_ref
, 3, ctx
);
1493 case OMP_CLAUSE_FINAL
:
1495 case OMP_CLAUSE_NUM_THREADS
:
1496 case OMP_CLAUSE_NUM_TEAMS
:
1497 case OMP_CLAUSE_THREAD_LIMIT
:
1498 case OMP_CLAUSE_DEVICE
:
1499 case OMP_CLAUSE_SCHEDULE
:
1500 case OMP_CLAUSE_DIST_SCHEDULE
:
1501 case OMP_CLAUSE_DEPEND
:
1502 case OMP_CLAUSE_PRIORITY
:
1503 case OMP_CLAUSE_GRAINSIZE
:
1504 case OMP_CLAUSE_NUM_TASKS
:
1505 case OMP_CLAUSE_NUM_GANGS
:
1506 case OMP_CLAUSE_NUM_WORKERS
:
1507 case OMP_CLAUSE_VECTOR_LENGTH
:
1508 case OMP_CLAUSE_DETACH
:
1509 case OMP_CLAUSE_FILTER
:
1511 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1515 case OMP_CLAUSE_FROM
:
1516 case OMP_CLAUSE_MAP
:
1518 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1519 decl
= OMP_CLAUSE_DECL (c
);
1520 /* If requested, make 'decl' addressable. */
1521 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1522 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
))
1524 gcc_checking_assert (DECL_P (decl
));
1526 bool decl_addressable
= TREE_ADDRESSABLE (decl
);
1527 if (!decl_addressable
)
1529 if (!make_addressable_vars
)
1530 make_addressable_vars
= BITMAP_ALLOC (NULL
);
1531 bitmap_set_bit (make_addressable_vars
, DECL_UID (decl
));
1532 TREE_ADDRESSABLE (decl
) = 1;
1535 if (dump_enabled_p ())
1537 location_t loc
= OMP_CLAUSE_LOCATION (c
);
1538 const dump_user_location_t d_u_loc
1539 = dump_user_location_t::from_location_t (loc
);
1540 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 # pragma GCC diagnostic push
1543 # pragma GCC diagnostic ignored "-Wformat"
1545 if (!decl_addressable
)
1546 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1548 " made addressable\n",
1551 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1553 " already made addressable\n",
1556 # pragma GCC diagnostic pop
1561 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
) = 0;
1563 /* Global variables with "omp declare target" attribute
1564 don't need to be copied, the receiver side will use them
1565 directly. However, global variables with "omp declare target link"
1566 attribute need to be copied. Or when ALWAYS modifier is used. */
1567 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1569 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1570 && (OMP_CLAUSE_MAP_KIND (c
)
1571 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1572 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1573 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1574 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1575 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1576 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1577 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1578 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1579 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1580 && varpool_node::get_create (decl
)->offloadable
1581 && !lookup_attribute ("omp declare target link",
1582 DECL_ATTRIBUTES (decl
)))
1584 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1585 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1587 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1588 not offloaded; there is nothing to map for those. */
1589 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1590 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1591 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1594 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1596 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1597 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1598 && is_omp_target (ctx
->stmt
))
1600 /* If this is an offloaded region, an attach operation should
1601 only exist when the pointer variable is mapped in a prior
1603 if (is_gimple_omp_offloaded (ctx
->stmt
))
1605 (maybe_lookup_decl (decl
, ctx
)
1606 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1607 && lookup_attribute ("omp declare target",
1608 DECL_ATTRIBUTES (decl
))));
1610 /* By itself, attach/detach is generated as part of pointer
1611 variable mapping and should not create new variables in the
1612 offloaded region, however sender refs for it must be created
1613 for its address to be passed to the runtime. */
1615 = build_decl (OMP_CLAUSE_LOCATION (c
),
1616 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1617 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1618 insert_field_into_struct (ctx
->record_type
, field
);
1619 /* To not clash with a map of the pointer variable itself,
1620 attach/detach maps have their field looked up by the *clause*
1621 tree expression, not the decl. */
1622 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1623 (splay_tree_key
) c
));
1624 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1625 (splay_tree_value
) field
);
1628 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1629 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1630 || (OMP_CLAUSE_MAP_KIND (c
)
1631 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1633 if (TREE_CODE (decl
) == COMPONENT_REF
1634 || (TREE_CODE (decl
) == INDIRECT_REF
1635 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1636 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1637 == REFERENCE_TYPE
)))
1639 if (DECL_SIZE (decl
)
1640 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1642 tree decl2
= DECL_VALUE_EXPR (decl
);
1643 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1644 decl2
= TREE_OPERAND (decl2
, 0);
1645 gcc_assert (DECL_P (decl2
));
1646 install_var_local (decl2
, ctx
);
1648 install_var_local (decl
, ctx
);
1653 if (DECL_SIZE (decl
)
1654 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1656 tree decl2
= DECL_VALUE_EXPR (decl
);
1657 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1658 decl2
= TREE_OPERAND (decl2
, 0);
1659 gcc_assert (DECL_P (decl2
));
1660 install_var_field (decl2
, true, 3, ctx
);
1661 install_var_local (decl2
, ctx
);
1662 install_var_local (decl
, ctx
);
1666 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1667 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1668 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1669 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1670 install_var_field (decl
, true, 7, ctx
);
1672 install_var_field (decl
, true, 3, ctx
);
1673 if (is_gimple_omp_offloaded (ctx
->stmt
)
1674 && !(is_gimple_omp_oacc (ctx
->stmt
)
1675 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1676 install_var_local (decl
, ctx
);
1681 tree base
= get_base_address (decl
);
1682 tree nc
= OMP_CLAUSE_CHAIN (c
);
1685 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1686 && OMP_CLAUSE_DECL (nc
) == base
1687 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1688 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1690 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1691 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1697 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1698 decl
= OMP_CLAUSE_DECL (c
);
1700 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1701 (splay_tree_key
) decl
));
1703 = build_decl (OMP_CLAUSE_LOCATION (c
),
1704 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1705 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1706 insert_field_into_struct (ctx
->record_type
, field
);
1707 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1708 (splay_tree_value
) field
);
1713 case OMP_CLAUSE_ORDER
:
1714 ctx
->order_concurrent
= true;
1717 case OMP_CLAUSE_BIND
:
1721 case OMP_CLAUSE_NOWAIT
:
1722 case OMP_CLAUSE_ORDERED
:
1723 case OMP_CLAUSE_COLLAPSE
:
1724 case OMP_CLAUSE_UNTIED
:
1725 case OMP_CLAUSE_MERGEABLE
:
1726 case OMP_CLAUSE_PROC_BIND
:
1727 case OMP_CLAUSE_SAFELEN
:
1728 case OMP_CLAUSE_SIMDLEN
:
1729 case OMP_CLAUSE_THREADS
:
1730 case OMP_CLAUSE_SIMD
:
1731 case OMP_CLAUSE_NOGROUP
:
1732 case OMP_CLAUSE_DEFAULTMAP
:
1733 case OMP_CLAUSE_ASYNC
:
1734 case OMP_CLAUSE_WAIT
:
1735 case OMP_CLAUSE_GANG
:
1736 case OMP_CLAUSE_WORKER
:
1737 case OMP_CLAUSE_VECTOR
:
1738 case OMP_CLAUSE_INDEPENDENT
:
1739 case OMP_CLAUSE_AUTO
:
1740 case OMP_CLAUSE_SEQ
:
1741 case OMP_CLAUSE_TILE
:
1742 case OMP_CLAUSE__SIMT_
:
1743 case OMP_CLAUSE_DEFAULT
:
1744 case OMP_CLAUSE_NONTEMPORAL
:
1745 case OMP_CLAUSE_IF_PRESENT
:
1746 case OMP_CLAUSE_FINALIZE
:
1747 case OMP_CLAUSE_TASK_REDUCTION
:
1748 case OMP_CLAUSE_ALLOCATE
:
1751 case OMP_CLAUSE_ALIGNED
:
1752 decl
= OMP_CLAUSE_DECL (c
);
1753 if (is_global_var (decl
)
1754 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1755 install_var_local (decl
, ctx
);
1758 case OMP_CLAUSE__CONDTEMP_
:
1759 decl
= OMP_CLAUSE_DECL (c
);
1760 if (is_parallel_ctx (ctx
))
1762 install_var_field (decl
, false, 3, ctx
);
1763 install_var_local (decl
, ctx
);
1765 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1766 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1767 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1768 install_var_local (decl
, ctx
);
1771 case OMP_CLAUSE__CACHE_
:
1772 case OMP_CLAUSE_NOHOST
:
1778 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1780 switch (OMP_CLAUSE_CODE (c
))
1782 case OMP_CLAUSE_LASTPRIVATE
:
1783 /* Let the corresponding firstprivate clause create
1785 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1786 scan_array_reductions
= true;
1787 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1791 case OMP_CLAUSE_FIRSTPRIVATE
:
1792 case OMP_CLAUSE_PRIVATE
:
1793 case OMP_CLAUSE_LINEAR
:
1794 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1795 case OMP_CLAUSE_IS_DEVICE_PTR
:
1796 decl
= OMP_CLAUSE_DECL (c
);
1797 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1799 while (TREE_CODE (decl
) == INDIRECT_REF
1800 || TREE_CODE (decl
) == ARRAY_REF
)
1801 decl
= TREE_OPERAND (decl
, 0);
1804 if (is_variable_sized (decl
))
1806 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1807 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1808 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1809 && is_gimple_omp_offloaded (ctx
->stmt
))
1811 tree decl2
= DECL_VALUE_EXPR (decl
);
1812 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1813 decl2
= TREE_OPERAND (decl2
, 0);
1814 gcc_assert (DECL_P (decl2
));
1815 install_var_local (decl2
, ctx
);
1816 fixup_remapped_decl (decl2
, ctx
, false);
1818 install_var_local (decl
, ctx
);
1820 fixup_remapped_decl (decl
, ctx
,
1821 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1822 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1823 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1824 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1825 scan_array_reductions
= true;
1828 case OMP_CLAUSE_REDUCTION
:
1829 case OMP_CLAUSE_IN_REDUCTION
:
1830 decl
= OMP_CLAUSE_DECL (c
);
1831 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1833 if (is_variable_sized (decl
))
1834 install_var_local (decl
, ctx
);
1835 fixup_remapped_decl (decl
, ctx
, false);
1837 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1838 scan_array_reductions
= true;
1841 case OMP_CLAUSE_TASK_REDUCTION
:
1842 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1843 scan_array_reductions
= true;
1846 case OMP_CLAUSE_SHARED
:
1847 /* Ignore shared directives in teams construct inside of
1848 target construct. */
1849 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1850 && !is_host_teams_ctx (ctx
))
1852 decl
= OMP_CLAUSE_DECL (c
);
1853 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1855 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1857 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1860 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1861 install_var_field (decl
, by_ref
, 11, ctx
);
1864 fixup_remapped_decl (decl
, ctx
, false);
1867 case OMP_CLAUSE_MAP
:
1868 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1870 decl
= OMP_CLAUSE_DECL (c
);
1872 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1873 && (OMP_CLAUSE_MAP_KIND (c
)
1874 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1875 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1876 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1877 && varpool_node::get_create (decl
)->offloadable
)
1879 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1880 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1881 && is_omp_target (ctx
->stmt
)
1882 && !is_gimple_omp_offloaded (ctx
->stmt
))
1886 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1887 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1888 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1889 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1891 tree new_decl
= lookup_decl (decl
, ctx
);
1892 TREE_TYPE (new_decl
)
1893 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1895 else if (DECL_SIZE (decl
)
1896 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1898 tree decl2
= DECL_VALUE_EXPR (decl
);
1899 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1900 decl2
= TREE_OPERAND (decl2
, 0);
1901 gcc_assert (DECL_P (decl2
));
1902 fixup_remapped_decl (decl2
, ctx
, false);
1903 fixup_remapped_decl (decl
, ctx
, true);
1906 fixup_remapped_decl (decl
, ctx
, false);
1910 case OMP_CLAUSE_COPYPRIVATE
:
1911 case OMP_CLAUSE_COPYIN
:
1912 case OMP_CLAUSE_DEFAULT
:
1914 case OMP_CLAUSE_NUM_THREADS
:
1915 case OMP_CLAUSE_NUM_TEAMS
:
1916 case OMP_CLAUSE_THREAD_LIMIT
:
1917 case OMP_CLAUSE_DEVICE
:
1918 case OMP_CLAUSE_SCHEDULE
:
1919 case OMP_CLAUSE_DIST_SCHEDULE
:
1920 case OMP_CLAUSE_NOWAIT
:
1921 case OMP_CLAUSE_ORDERED
:
1922 case OMP_CLAUSE_COLLAPSE
:
1923 case OMP_CLAUSE_UNTIED
:
1924 case OMP_CLAUSE_FINAL
:
1925 case OMP_CLAUSE_MERGEABLE
:
1926 case OMP_CLAUSE_PROC_BIND
:
1927 case OMP_CLAUSE_SAFELEN
:
1928 case OMP_CLAUSE_SIMDLEN
:
1929 case OMP_CLAUSE_ALIGNED
:
1930 case OMP_CLAUSE_DEPEND
:
1931 case OMP_CLAUSE_DETACH
:
1932 case OMP_CLAUSE_ALLOCATE
:
1933 case OMP_CLAUSE__LOOPTEMP_
:
1934 case OMP_CLAUSE__REDUCTEMP_
:
1936 case OMP_CLAUSE_FROM
:
1937 case OMP_CLAUSE_PRIORITY
:
1938 case OMP_CLAUSE_GRAINSIZE
:
1939 case OMP_CLAUSE_NUM_TASKS
:
1940 case OMP_CLAUSE_THREADS
:
1941 case OMP_CLAUSE_SIMD
:
1942 case OMP_CLAUSE_NOGROUP
:
1943 case OMP_CLAUSE_DEFAULTMAP
:
1944 case OMP_CLAUSE_ORDER
:
1945 case OMP_CLAUSE_BIND
:
1946 case OMP_CLAUSE_USE_DEVICE_PTR
:
1947 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1948 case OMP_CLAUSE_NONTEMPORAL
:
1949 case OMP_CLAUSE_ASYNC
:
1950 case OMP_CLAUSE_WAIT
:
1951 case OMP_CLAUSE_NUM_GANGS
:
1952 case OMP_CLAUSE_NUM_WORKERS
:
1953 case OMP_CLAUSE_VECTOR_LENGTH
:
1954 case OMP_CLAUSE_GANG
:
1955 case OMP_CLAUSE_WORKER
:
1956 case OMP_CLAUSE_VECTOR
:
1957 case OMP_CLAUSE_INDEPENDENT
:
1958 case OMP_CLAUSE_AUTO
:
1959 case OMP_CLAUSE_SEQ
:
1960 case OMP_CLAUSE_TILE
:
1961 case OMP_CLAUSE__SIMT_
:
1962 case OMP_CLAUSE_IF_PRESENT
:
1963 case OMP_CLAUSE_FINALIZE
:
1964 case OMP_CLAUSE_FILTER
:
1965 case OMP_CLAUSE__CONDTEMP_
:
1968 case OMP_CLAUSE__CACHE_
:
1969 case OMP_CLAUSE_NOHOST
:
1975 gcc_checking_assert (!scan_array_reductions
1976 || !is_gimple_omp_oacc (ctx
->stmt
));
1977 if (scan_array_reductions
)
1979 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1980 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1981 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1982 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1983 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1985 omp_context
*rctx
= ctx
;
1986 if (is_omp_target (ctx
->stmt
))
1988 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1989 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1991 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1992 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1993 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1994 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1995 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1996 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
2000 /* Create a new name for omp child function. Returns an identifier. */
2003 create_omp_child_function_name (bool task_copy
)
2005 return clone_function_name_numbered (current_function_decl
,
2006 task_copy
? "_omp_cpyfn" : "_omp_fn");
2009 /* Return true if CTX may belong to offloaded code: either if current function
2010 is offloaded, or any enclosing context corresponds to a target region. */
2013 omp_maybe_offloaded_ctx (omp_context
*ctx
)
2015 if (cgraph_node::get (current_function_decl
)->offloadable
)
2017 for (; ctx
; ctx
= ctx
->outer
)
2018 if (is_gimple_omp_offloaded (ctx
->stmt
))
2023 /* Build a decl for the omp child function. It'll not contain a body
2024 yet, just the bare decl. */
2027 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
2029 tree decl
, type
, name
, t
;
2031 name
= create_omp_child_function_name (task_copy
);
2033 type
= build_function_type_list (void_type_node
, ptr_type_node
,
2034 ptr_type_node
, NULL_TREE
);
2036 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2038 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
2040 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
2043 ctx
->cb
.dst_fn
= decl
;
2045 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
2047 TREE_STATIC (decl
) = 1;
2048 TREE_USED (decl
) = 1;
2049 DECL_ARTIFICIAL (decl
) = 1;
2050 DECL_IGNORED_P (decl
) = 0;
2051 TREE_PUBLIC (decl
) = 0;
2052 DECL_UNINLINABLE (decl
) = 1;
2053 DECL_EXTERNAL (decl
) = 0;
2054 DECL_CONTEXT (decl
) = NULL_TREE
;
2055 DECL_INITIAL (decl
) = make_node (BLOCK
);
2056 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
2057 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
2058 /* Remove omp declare simd attribute from the new attributes. */
2059 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
2061 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
2064 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
2065 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
2066 *p
= TREE_CHAIN (*p
);
2069 tree chain
= TREE_CHAIN (*p
);
2070 *p
= copy_node (*p
);
2071 p
= &TREE_CHAIN (*p
);
2075 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2076 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2077 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2078 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2079 DECL_FUNCTION_VERSIONED (decl
)
2080 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2082 if (omp_maybe_offloaded_ctx (ctx
))
2084 cgraph_node::get_create (decl
)->offloadable
= 1;
2085 if (ENABLE_OFFLOADING
)
2086 g
->have_offload
= true;
2089 if (cgraph_node::get_create (decl
)->offloadable
)
2091 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2092 ? "omp target entrypoint"
2093 : "omp declare target");
2094 if (lookup_attribute ("omp declare target",
2095 DECL_ATTRIBUTES (current_function_decl
)))
2097 if (is_gimple_omp_offloaded (ctx
->stmt
))
2098 DECL_ATTRIBUTES (decl
)
2099 = remove_attribute ("omp declare target",
2100 copy_list (DECL_ATTRIBUTES (decl
)));
2105 DECL_ATTRIBUTES (decl
)
2106 = tree_cons (get_identifier (target_attr
),
2107 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2110 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2111 RESULT_DECL
, NULL_TREE
, void_type_node
);
2112 DECL_ARTIFICIAL (t
) = 1;
2113 DECL_IGNORED_P (t
) = 1;
2114 DECL_CONTEXT (t
) = decl
;
2115 DECL_RESULT (decl
) = t
;
2117 tree data_name
= get_identifier (".omp_data_i");
2118 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2120 DECL_ARTIFICIAL (t
) = 1;
2121 DECL_NAMELESS (t
) = 1;
2122 DECL_ARG_TYPE (t
) = ptr_type_node
;
2123 DECL_CONTEXT (t
) = current_function_decl
;
2125 TREE_READONLY (t
) = 1;
2126 DECL_ARGUMENTS (decl
) = t
;
2128 ctx
->receiver_decl
= t
;
2131 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2132 PARM_DECL
, get_identifier (".omp_data_o"),
2134 DECL_ARTIFICIAL (t
) = 1;
2135 DECL_NAMELESS (t
) = 1;
2136 DECL_ARG_TYPE (t
) = ptr_type_node
;
2137 DECL_CONTEXT (t
) = current_function_decl
;
2139 TREE_ADDRESSABLE (t
) = 1;
2140 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2141 DECL_ARGUMENTS (decl
) = t
;
2144 /* Allocate memory for the function structure. The call to
2145 allocate_struct_function clobbers CFUN, so we need to restore
2147 push_struct_function (decl
);
2148 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2149 init_tree_ssa (cfun
);
2153 /* Callback for walk_gimple_seq. Check if combined parallel
2154 contains gimple_omp_for_combined_into_p OMP_FOR. */
2157 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2158 bool *handled_ops_p
,
2159 struct walk_stmt_info
*wi
)
2161 gimple
*stmt
= gsi_stmt (*gsi_p
);
2163 *handled_ops_p
= true;
2164 switch (gimple_code (stmt
))
2168 case GIMPLE_OMP_FOR
:
2169 if (gimple_omp_for_combined_into_p (stmt
)
2170 && gimple_omp_for_kind (stmt
)
2171 == *(const enum gf_mask
*) (wi
->info
))
2174 return integer_zero_node
;
2183 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2186 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2187 omp_context
*outer_ctx
)
2189 struct walk_stmt_info wi
;
2191 memset (&wi
, 0, sizeof (wi
));
2193 wi
.info
= (void *) &msk
;
2194 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2195 if (wi
.info
!= (void *) &msk
)
2197 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2198 struct omp_for_data fd
;
2199 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2200 /* We need two temporaries with fd.loop.v type (istart/iend)
2201 and then (fd.collapse - 1) temporaries with the same
2202 type for count2 ... countN-1 vars if not constant. */
2203 size_t count
= 2, i
;
2204 tree type
= fd
.iter_type
;
2206 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2208 count
+= fd
.collapse
- 1;
2209 /* If there are lastprivate clauses on the inner
2210 GIMPLE_OMP_FOR, add one more temporaries for the total number
2211 of iterations (product of count1 ... countN-1). */
2212 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2213 OMP_CLAUSE_LASTPRIVATE
)
2214 || (msk
== GF_OMP_FOR_KIND_FOR
2215 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2216 OMP_CLAUSE_LASTPRIVATE
)))
2218 tree temp
= create_tmp_var (type
);
2219 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2220 OMP_CLAUSE__LOOPTEMP_
);
2221 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2222 OMP_CLAUSE_DECL (c
) = temp
;
2223 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2224 gimple_omp_taskreg_set_clauses (stmt
, c
);
2227 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2228 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2229 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2231 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2232 tree type2
= TREE_TYPE (v
);
2234 for (i
= 0; i
< 3; i
++)
2236 tree temp
= create_tmp_var (type2
);
2237 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2238 OMP_CLAUSE__LOOPTEMP_
);
2239 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2240 OMP_CLAUSE_DECL (c
) = temp
;
2241 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2242 gimple_omp_taskreg_set_clauses (stmt
, c
);
2246 for (i
= 0; i
< count
; i
++)
2248 tree temp
= create_tmp_var (type
);
2249 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2250 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2251 OMP_CLAUSE_DECL (c
) = temp
;
2252 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2253 gimple_omp_taskreg_set_clauses (stmt
, c
);
2256 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2257 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2258 OMP_CLAUSE_REDUCTION
))
2260 tree type
= build_pointer_type (pointer_sized_int_node
);
2261 tree temp
= create_tmp_var (type
);
2262 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2263 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2264 OMP_CLAUSE_DECL (c
) = temp
;
2265 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2266 gimple_omp_task_set_clauses (stmt
, c
);
2270 /* Scan an OpenMP parallel directive. */
2273 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2277 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2279 /* Ignore parallel directives with empty bodies, unless there
2280 are copyin clauses. */
2282 && empty_body_p (gimple_omp_body (stmt
))
2283 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2284 OMP_CLAUSE_COPYIN
) == NULL
)
2286 gsi_replace (gsi
, gimple_build_nop (), false);
2290 if (gimple_omp_parallel_combined_p (stmt
))
2291 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2292 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2293 OMP_CLAUSE_REDUCTION
);
2294 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2295 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2297 tree type
= build_pointer_type (pointer_sized_int_node
);
2298 tree temp
= create_tmp_var (type
);
2299 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2301 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2302 OMP_CLAUSE_DECL (c
) = temp
;
2303 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2304 gimple_omp_parallel_set_clauses (stmt
, c
);
2307 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2310 ctx
= new_omp_context (stmt
, outer_ctx
);
2311 taskreg_contexts
.safe_push (ctx
);
2312 if (taskreg_nesting_level
> 1)
2313 ctx
->is_nested
= true;
2314 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2315 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2316 name
= create_tmp_var_name (".omp_data_s");
2317 name
= build_decl (gimple_location (stmt
),
2318 TYPE_DECL
, name
, ctx
->record_type
);
2319 DECL_ARTIFICIAL (name
) = 1;
2320 DECL_NAMELESS (name
) = 1;
2321 TYPE_NAME (ctx
->record_type
) = name
;
2322 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2323 create_omp_child_function (ctx
, false);
2324 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2326 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2327 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2329 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2330 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2333 /* Scan an OpenMP task directive. */
2336 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2340 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2342 /* Ignore task directives with empty bodies, unless they have depend
2345 && gimple_omp_body (stmt
)
2346 && empty_body_p (gimple_omp_body (stmt
))
2347 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2349 gsi_replace (gsi
, gimple_build_nop (), false);
2353 if (gimple_omp_task_taskloop_p (stmt
))
2354 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2356 ctx
= new_omp_context (stmt
, outer_ctx
);
2358 if (gimple_omp_task_taskwait_p (stmt
))
2360 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2364 taskreg_contexts
.safe_push (ctx
);
2365 if (taskreg_nesting_level
> 1)
2366 ctx
->is_nested
= true;
2367 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2368 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2369 name
= create_tmp_var_name (".omp_data_s");
2370 name
= build_decl (gimple_location (stmt
),
2371 TYPE_DECL
, name
, ctx
->record_type
);
2372 DECL_ARTIFICIAL (name
) = 1;
2373 DECL_NAMELESS (name
) = 1;
2374 TYPE_NAME (ctx
->record_type
) = name
;
2375 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2376 create_omp_child_function (ctx
, false);
2377 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2379 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2381 if (ctx
->srecord_type
)
2383 name
= create_tmp_var_name (".omp_data_a");
2384 name
= build_decl (gimple_location (stmt
),
2385 TYPE_DECL
, name
, ctx
->srecord_type
);
2386 DECL_ARTIFICIAL (name
) = 1;
2387 DECL_NAMELESS (name
) = 1;
2388 TYPE_NAME (ctx
->srecord_type
) = name
;
2389 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2390 create_omp_child_function (ctx
, true);
2393 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2395 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2397 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2398 t
= build_int_cst (long_integer_type_node
, 0);
2399 gimple_omp_task_set_arg_size (stmt
, t
);
2400 t
= build_int_cst (long_integer_type_node
, 1);
2401 gimple_omp_task_set_arg_align (stmt
, t
);
2405 /* Helper function for finish_taskreg_scan, called through walk_tree.
2406 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2407 tree, replace it in the expression. */
2410 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2414 omp_context
*ctx
= (omp_context
*) data
;
2415 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2418 if (DECL_HAS_VALUE_EXPR_P (t
))
2419 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2424 else if (IS_TYPE_OR_DECL_P (*tp
))
2429 /* If any decls have been made addressable during scan_omp,
2430 adjust their fields if needed, and layout record types
2431 of parallel/task constructs. */
2434 finish_taskreg_scan (omp_context
*ctx
)
2436 if (ctx
->record_type
== NULL_TREE
)
2439 /* If any make_addressable_vars were needed, verify all
2440 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2441 statements if use_pointer_for_field hasn't changed
2442 because of that. If it did, update field types now. */
2443 if (make_addressable_vars
)
2447 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2448 c
; c
= OMP_CLAUSE_CHAIN (c
))
2449 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2450 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2452 tree decl
= OMP_CLAUSE_DECL (c
);
2454 /* Global variables don't need to be copied,
2455 the receiver side will use them directly. */
2456 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2458 if (!bitmap_bit_p (make_addressable_vars
, DECL_UID (decl
))
2459 || !use_pointer_for_field (decl
, ctx
))
2461 tree field
= lookup_field (decl
, ctx
);
2462 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2463 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2465 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2466 TREE_THIS_VOLATILE (field
) = 0;
2467 DECL_USER_ALIGN (field
) = 0;
2468 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2469 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2470 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2471 if (ctx
->srecord_type
)
2473 tree sfield
= lookup_sfield (decl
, ctx
);
2474 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2475 TREE_THIS_VOLATILE (sfield
) = 0;
2476 DECL_USER_ALIGN (sfield
) = 0;
2477 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2478 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2479 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2484 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2486 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2487 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2490 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2491 expects to find it at the start of data. */
2492 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2493 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2497 *p
= DECL_CHAIN (*p
);
2501 p
= &DECL_CHAIN (*p
);
2502 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2503 TYPE_FIELDS (ctx
->record_type
) = f
;
2505 layout_type (ctx
->record_type
);
2506 fixup_child_record_type (ctx
);
2508 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2510 layout_type (ctx
->record_type
);
2511 fixup_child_record_type (ctx
);
2515 location_t loc
= gimple_location (ctx
->stmt
);
2516 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2518 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2520 /* Move VLA fields to the end. */
2521 p
= &TYPE_FIELDS (ctx
->record_type
);
2523 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2524 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2527 *p
= TREE_CHAIN (*p
);
2528 TREE_CHAIN (*q
) = NULL_TREE
;
2529 q
= &TREE_CHAIN (*q
);
2532 p
= &DECL_CHAIN (*p
);
2534 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2536 /* Move fields corresponding to first and second _looptemp_
2537 clause first. There are filled by GOMP_taskloop
2538 and thus need to be in specific positions. */
2539 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2540 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2541 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2542 OMP_CLAUSE__LOOPTEMP_
);
2543 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2544 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2545 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2546 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2547 p
= &TYPE_FIELDS (ctx
->record_type
);
2549 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2550 *p
= DECL_CHAIN (*p
);
2552 p
= &DECL_CHAIN (*p
);
2553 DECL_CHAIN (f1
) = f2
;
2556 DECL_CHAIN (f2
) = f3
;
2557 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2560 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2561 TYPE_FIELDS (ctx
->record_type
) = f1
;
2562 if (ctx
->srecord_type
)
2564 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2565 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2567 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2568 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2570 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2571 *p
= DECL_CHAIN (*p
);
2573 p
= &DECL_CHAIN (*p
);
2574 DECL_CHAIN (f1
) = f2
;
2575 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2578 DECL_CHAIN (f2
) = f3
;
2579 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2582 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2583 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2590 /* Look for a firstprivate clause with the detach event handle. */
2591 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2592 c
; c
= OMP_CLAUSE_CHAIN (c
))
2594 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2596 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2597 == OMP_CLAUSE_DECL (detach_clause
))
2602 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2604 /* Move field corresponding to the detach clause first.
2605 This is filled by GOMP_task and needs to be in a
2606 specific position. */
2607 p
= &TYPE_FIELDS (ctx
->record_type
);
2610 *p
= DECL_CHAIN (*p
);
2612 p
= &DECL_CHAIN (*p
);
2613 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2614 TYPE_FIELDS (ctx
->record_type
) = field
;
2615 if (ctx
->srecord_type
)
2617 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2618 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2621 *p
= DECL_CHAIN (*p
);
2623 p
= &DECL_CHAIN (*p
);
2624 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2625 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2628 layout_type (ctx
->record_type
);
2629 fixup_child_record_type (ctx
);
2630 if (ctx
->srecord_type
)
2631 layout_type (ctx
->srecord_type
);
2632 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2633 TYPE_SIZE_UNIT (ctx
->record_type
));
2634 if (TREE_CODE (t
) != INTEGER_CST
)
2636 t
= unshare_expr (t
);
2637 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2639 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2640 t
= build_int_cst (long_integer_type_node
,
2641 TYPE_ALIGN_UNIT (ctx
->record_type
));
2642 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2646 /* Find the enclosing offload context. */
2648 static omp_context
*
2649 enclosing_target_ctx (omp_context
*ctx
)
2651 for (; ctx
; ctx
= ctx
->outer
)
2652 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2658 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2660 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2663 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2665 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2667 gimple
*stmt
= ctx
->stmt
;
2668 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2669 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2676 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2677 (This doesn't include OpenACC 'kernels' decomposed parts.)
2678 Until kernels handling moves to use the same loop indirection
2679 scheme as parallel, we need to do this checking early. */
2682 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2684 bool checking
= true;
2685 unsigned outer_mask
= 0;
2686 unsigned this_mask
= 0;
2687 bool has_seq
= false, has_auto
= false;
2690 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2694 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2696 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2699 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2701 switch (OMP_CLAUSE_CODE (c
))
2703 case OMP_CLAUSE_GANG
:
2704 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2706 case OMP_CLAUSE_WORKER
:
2707 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2709 case OMP_CLAUSE_VECTOR
:
2710 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2712 case OMP_CLAUSE_SEQ
:
2715 case OMP_CLAUSE_AUTO
:
2725 if (has_seq
&& (this_mask
|| has_auto
))
2726 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2727 " OpenACC loop specifiers");
2728 else if (has_auto
&& this_mask
)
2729 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2730 " OpenACC loop specifiers");
2732 if (this_mask
& outer_mask
)
2733 error_at (gimple_location (stmt
), "inner loop uses same"
2734 " OpenACC parallelism as containing loop");
2737 return outer_mask
| this_mask
;
2740 /* Scan a GIMPLE_OMP_FOR. */
2742 static omp_context
*
2743 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2747 tree clauses
= gimple_omp_for_clauses (stmt
);
2749 ctx
= new_omp_context (stmt
, outer_ctx
);
2751 if (is_gimple_omp_oacc (stmt
))
2753 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2755 if (!(tgt
&& is_oacc_kernels (tgt
)))
2756 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2759 switch (OMP_CLAUSE_CODE (c
))
2761 case OMP_CLAUSE_GANG
:
2762 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2765 case OMP_CLAUSE_WORKER
:
2766 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2769 case OMP_CLAUSE_VECTOR
:
2770 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2779 /* By construction, this is impossible for OpenACC 'kernels'
2780 decomposed parts. */
2781 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2783 error_at (OMP_CLAUSE_LOCATION (c
),
2784 "argument not permitted on %qs clause",
2785 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2787 inform (gimple_location (tgt
->stmt
),
2788 "enclosing parent compute construct");
2789 else if (oacc_get_fn_attrib (current_function_decl
))
2790 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2791 "enclosing routine");
2797 if (tgt
&& is_oacc_kernels (tgt
))
2798 check_oacc_kernel_gwv (stmt
, ctx
);
2800 /* Collect all variables named in reductions on this loop. Ensure
2801 that, if this loop has a reduction on some variable v, and there is
2802 a reduction on v somewhere in an outer context, then there is a
2803 reduction on v on all intervening loops as well. */
2804 tree local_reduction_clauses
= NULL
;
2805 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2807 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2808 local_reduction_clauses
2809 = tree_cons (NULL
, c
, local_reduction_clauses
);
2811 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2812 ctx
->outer_reduction_clauses
2813 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2814 ctx
->outer
->outer_reduction_clauses
);
2815 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2816 tree local_iter
= local_reduction_clauses
;
2817 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2819 tree local_clause
= TREE_VALUE (local_iter
);
2820 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2821 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2822 bool have_outer_reduction
= false;
2823 tree ctx_iter
= outer_reduction_clauses
;
2824 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2826 tree outer_clause
= TREE_VALUE (ctx_iter
);
2827 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2828 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2829 if (outer_var
== local_var
&& outer_op
!= local_op
)
2831 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2832 "conflicting reduction operations for %qE",
2834 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2835 "location of the previous reduction for %qE",
2838 if (outer_var
== local_var
)
2840 have_outer_reduction
= true;
2844 if (have_outer_reduction
)
2846 /* There is a reduction on outer_var both on this loop and on
2847 some enclosing loop. Walk up the context tree until such a
2848 loop with a reduction on outer_var is found, and complain
2849 about all intervening loops that do not have such a
2851 struct omp_context
*curr_loop
= ctx
->outer
;
2853 while (curr_loop
!= NULL
)
2855 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2856 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2858 tree curr_clause
= TREE_VALUE (curr_iter
);
2859 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2860 if (curr_var
== local_var
)
2867 warning_at (gimple_location (curr_loop
->stmt
), 0,
2868 "nested loop in reduction needs "
2869 "reduction clause for %qE",
2873 curr_loop
= curr_loop
->outer
;
2877 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2878 ctx
->outer_reduction_clauses
2879 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2880 ctx
->outer_reduction_clauses
);
2882 if (tgt
&& is_oacc_kernels (tgt
))
2884 /* Strip out reductions, as they are not handled yet. */
2885 tree
*prev_ptr
= &clauses
;
2887 while (tree probe
= *prev_ptr
)
2889 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2891 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2892 *prev_ptr
= *next_ptr
;
2894 prev_ptr
= next_ptr
;
2897 gimple_omp_for_set_clauses (stmt
, clauses
);
2901 scan_sharing_clauses (clauses
, ctx
);
2903 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2904 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2906 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2907 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2908 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2909 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2911 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2915 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2918 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2919 omp_context
*outer_ctx
)
2921 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2922 gsi_replace (gsi
, bind
, false);
2923 gimple_seq seq
= NULL
;
2924 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2925 tree cond
= create_tmp_var_raw (integer_type_node
);
2926 DECL_CONTEXT (cond
) = current_function_decl
;
2927 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2928 gimple_bind_set_vars (bind
, cond
);
2929 gimple_call_set_lhs (g
, cond
);
2930 gimple_seq_add_stmt (&seq
, g
);
2931 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2932 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2933 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2934 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2935 gimple_seq_add_stmt (&seq
, g
);
2936 g
= gimple_build_label (lab1
);
2937 gimple_seq_add_stmt (&seq
, g
);
2938 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2939 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2940 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2941 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2942 gimple_omp_for_set_clauses (new_stmt
, clause
);
2943 gimple_seq_add_stmt (&seq
, new_stmt
);
2944 g
= gimple_build_goto (lab3
);
2945 gimple_seq_add_stmt (&seq
, g
);
2946 g
= gimple_build_label (lab2
);
2947 gimple_seq_add_stmt (&seq
, g
);
2948 gimple_seq_add_stmt (&seq
, stmt
);
2949 g
= gimple_build_label (lab3
);
2950 gimple_seq_add_stmt (&seq
, g
);
2951 gimple_bind_set_body (bind
, seq
);
2953 scan_omp_for (new_stmt
, outer_ctx
);
2954 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2957 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2958 struct walk_stmt_info
*);
2959 static omp_context
*maybe_lookup_ctx (gimple
*);
2961 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2962 for scan phase loop. */
2965 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2966 omp_context
*outer_ctx
)
2968 /* The only change between inclusive and exclusive scan will be
2969 within the first simd loop, so just use inclusive in the
2970 worksharing loop. */
2971 outer_ctx
->scan_inclusive
= true;
2972 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2973 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2975 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2976 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2977 gsi_replace (gsi
, input_stmt
, false);
2978 gimple_seq input_body
= NULL
;
2979 gimple_seq_add_stmt (&input_body
, stmt
);
2980 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2982 gimple_stmt_iterator input1_gsi
= gsi_none ();
2983 struct walk_stmt_info wi
;
2984 memset (&wi
, 0, sizeof (wi
));
2986 wi
.info
= (void *) &input1_gsi
;
2987 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2988 gcc_assert (!gsi_end_p (input1_gsi
));
2990 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2991 gsi_next (&input1_gsi
);
2992 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2993 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2994 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2995 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2996 std::swap (input_stmt1
, scan_stmt1
);
2998 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2999 gimple_omp_set_body (input_stmt1
, NULL
);
3001 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
3002 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
3004 gimple_omp_set_body (input_stmt1
, input_body1
);
3005 gimple_omp_set_body (scan_stmt1
, NULL
);
3007 gimple_stmt_iterator input2_gsi
= gsi_none ();
3008 memset (&wi
, 0, sizeof (wi
));
3010 wi
.info
= (void *) &input2_gsi
;
3011 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
3013 gcc_assert (!gsi_end_p (input2_gsi
));
3015 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
3016 gsi_next (&input2_gsi
);
3017 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
3018 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
3019 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3020 std::swap (input_stmt2
, scan_stmt2
);
3022 gimple_omp_set_body (input_stmt2
, NULL
);
3024 gimple_omp_set_body (input_stmt
, input_body
);
3025 gimple_omp_set_body (scan_stmt
, scan_body
);
3027 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
3028 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
3030 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
3031 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
3033 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
3036 /* Scan an OpenMP sections directive. */
3039 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
3043 ctx
= new_omp_context (stmt
, outer_ctx
);
3044 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
3045 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3048 /* Scan an OpenMP single directive. */
3051 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
3056 ctx
= new_omp_context (stmt
, outer_ctx
);
3057 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3058 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3059 name
= create_tmp_var_name (".omp_copy_s");
3060 name
= build_decl (gimple_location (stmt
),
3061 TYPE_DECL
, name
, ctx
->record_type
);
3062 TYPE_NAME (ctx
->record_type
) = name
;
3064 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
3065 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3067 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3068 ctx
->record_type
= NULL
;
3070 layout_type (ctx
->record_type
);
3073 /* Scan a GIMPLE_OMP_TARGET. */
3076 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3080 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3081 tree clauses
= gimple_omp_target_clauses (stmt
);
3083 ctx
= new_omp_context (stmt
, outer_ctx
);
3084 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3085 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3086 name
= create_tmp_var_name (".omp_data_t");
3087 name
= build_decl (gimple_location (stmt
),
3088 TYPE_DECL
, name
, ctx
->record_type
);
3089 DECL_ARTIFICIAL (name
) = 1;
3090 DECL_NAMELESS (name
) = 1;
3091 TYPE_NAME (ctx
->record_type
) = name
;
3092 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3096 create_omp_child_function (ctx
, false);
3097 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3100 scan_sharing_clauses (clauses
, ctx
);
3101 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3103 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3104 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3107 TYPE_FIELDS (ctx
->record_type
)
3108 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3111 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3112 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3114 field
= DECL_CHAIN (field
))
3115 gcc_assert (DECL_ALIGN (field
) == align
);
3117 layout_type (ctx
->record_type
);
3119 fixup_child_record_type (ctx
);
3122 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3124 error_at (gimple_location (stmt
),
3125 "%<target%> construct with nested %<teams%> construct "
3126 "contains directives outside of the %<teams%> construct");
3127 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3131 /* Scan an OpenMP teams directive. */
3134 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3136 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3138 if (!gimple_omp_teams_host (stmt
))
3140 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3141 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3144 taskreg_contexts
.safe_push (ctx
);
3145 gcc_assert (taskreg_nesting_level
== 1);
3146 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3147 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3148 tree name
= create_tmp_var_name (".omp_data_s");
3149 name
= build_decl (gimple_location (stmt
),
3150 TYPE_DECL
, name
, ctx
->record_type
);
3151 DECL_ARTIFICIAL (name
) = 1;
3152 DECL_NAMELESS (name
) = 1;
3153 TYPE_NAME (ctx
->record_type
) = name
;
3154 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3155 create_omp_child_function (ctx
, false);
3156 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3158 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3159 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3161 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3162 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3165 /* Check nesting restrictions. */
3167 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3171 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3172 inside an OpenACC CTX. */
3173 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3174 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3175 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3177 else if (!(is_gimple_omp (stmt
)
3178 && is_gimple_omp_oacc (stmt
)))
3180 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3182 error_at (gimple_location (stmt
),
3183 "non-OpenACC construct inside of OpenACC routine");
3187 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3188 if (is_gimple_omp (octx
->stmt
)
3189 && is_gimple_omp_oacc (octx
->stmt
))
3191 error_at (gimple_location (stmt
),
3192 "non-OpenACC construct inside of OpenACC region");
3199 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3200 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3202 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3204 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3206 error_at (gimple_location (stmt
),
3207 "OpenMP constructs are not allowed in target region "
3208 "with %<ancestor%>");
3212 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3213 ctx
->teams_nested_p
= true;
3215 ctx
->nonteams_nested_p
= true;
3217 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3219 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3221 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3222 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3226 if (ctx
->order_concurrent
3227 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3228 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3229 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3231 error_at (gimple_location (stmt
),
3232 "OpenMP constructs other than %<parallel%>, %<loop%>"
3233 " or %<simd%> may not be nested inside a region with"
3234 " the %<order(concurrent)%> clause");
3237 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3239 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3240 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3242 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3243 && (ctx
->outer
== NULL
3244 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3245 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3246 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3247 != GF_OMP_FOR_KIND_FOR
)
3248 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3250 error_at (gimple_location (stmt
),
3251 "%<ordered simd threads%> must be closely "
3252 "nested inside of %<%s simd%> region",
3253 lang_GNU_Fortran () ? "do" : "for");
3259 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3260 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3261 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3263 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3264 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3266 error_at (gimple_location (stmt
),
3267 "OpenMP constructs other than "
3268 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3269 "not be nested inside %<simd%> region");
3272 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3274 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3275 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3276 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3277 OMP_CLAUSE_BIND
) == NULL_TREE
))
3278 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3280 error_at (gimple_location (stmt
),
3281 "only %<distribute%>, %<parallel%> or %<loop%> "
3282 "regions are allowed to be strictly nested inside "
3283 "%<teams%> region");
3287 else if (ctx
->order_concurrent
3288 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3289 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3290 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3291 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3294 error_at (gimple_location (stmt
),
3295 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3296 "%<simd%> may not be nested inside a %<loop%> region");
3298 error_at (gimple_location (stmt
),
3299 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3300 "%<simd%> may not be nested inside a region with "
3301 "the %<order(concurrent)%> clause");
3305 switch (gimple_code (stmt
))
3307 case GIMPLE_OMP_FOR
:
3308 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3310 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3312 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3314 error_at (gimple_location (stmt
),
3315 "%<distribute%> region must be strictly nested "
3316 "inside %<teams%> construct");
3321 /* We split taskloop into task and nested taskloop in it. */
3322 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3324 /* For now, hope this will change and loop bind(parallel) will not
3325 be allowed in lots of contexts. */
3326 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3327 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3329 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3334 switch (gimple_code (ctx
->stmt
))
3336 case GIMPLE_OMP_FOR
:
3337 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3338 == GF_OMP_FOR_KIND_OACC_LOOP
);
3341 case GIMPLE_OMP_TARGET
:
3342 switch (gimple_omp_target_kind (ctx
->stmt
))
3344 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3345 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3346 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3347 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3348 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3359 else if (oacc_get_fn_attrib (current_function_decl
))
3363 error_at (gimple_location (stmt
),
3364 "OpenACC loop directive must be associated with"
3365 " an OpenACC compute region");
3371 if (is_gimple_call (stmt
)
3372 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3373 == BUILT_IN_GOMP_CANCEL
3374 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3375 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3377 const char *bad
= NULL
;
3378 const char *kind
= NULL
;
3379 const char *construct
3380 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3381 == BUILT_IN_GOMP_CANCEL
)
3383 : "cancellation point";
3386 error_at (gimple_location (stmt
), "orphaned %qs construct",
3390 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3391 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3395 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3397 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3398 == BUILT_IN_GOMP_CANCEL
3399 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3400 ctx
->cancellable
= true;
3404 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3405 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3411 ctx
->cancellable
= true;
3412 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3414 warning_at (gimple_location (stmt
), 0,
3415 "%<cancel for%> inside "
3416 "%<nowait%> for construct");
3417 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3418 OMP_CLAUSE_ORDERED
))
3419 warning_at (gimple_location (stmt
), 0,
3420 "%<cancel for%> inside "
3421 "%<ordered%> for construct");
3426 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3427 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3429 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3430 == BUILT_IN_GOMP_CANCEL
3431 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3433 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3435 ctx
->cancellable
= true;
3436 if (omp_find_clause (gimple_omp_sections_clauses
3439 warning_at (gimple_location (stmt
), 0,
3440 "%<cancel sections%> inside "
3441 "%<nowait%> sections construct");
3445 gcc_assert (ctx
->outer
3446 && gimple_code (ctx
->outer
->stmt
)
3447 == GIMPLE_OMP_SECTIONS
);
3448 ctx
->outer
->cancellable
= true;
3449 if (omp_find_clause (gimple_omp_sections_clauses
3452 warning_at (gimple_location (stmt
), 0,
3453 "%<cancel sections%> inside "
3454 "%<nowait%> sections construct");
3460 if (!is_task_ctx (ctx
)
3461 && (!is_taskloop_ctx (ctx
)
3462 || ctx
->outer
== NULL
3463 || !is_task_ctx (ctx
->outer
)))
3467 for (omp_context
*octx
= ctx
->outer
;
3468 octx
; octx
= octx
->outer
)
3470 switch (gimple_code (octx
->stmt
))
3472 case GIMPLE_OMP_TASKGROUP
:
3474 case GIMPLE_OMP_TARGET
:
3475 if (gimple_omp_target_kind (octx
->stmt
)
3476 != GF_OMP_TARGET_KIND_REGION
)
3479 case GIMPLE_OMP_PARALLEL
:
3480 case GIMPLE_OMP_TEAMS
:
3481 error_at (gimple_location (stmt
),
3482 "%<%s taskgroup%> construct not closely "
3483 "nested inside of %<taskgroup%> region",
3486 case GIMPLE_OMP_TASK
:
3487 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3489 && is_taskloop_ctx (octx
->outer
))
3492 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3493 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3502 ctx
->cancellable
= true;
3507 error_at (gimple_location (stmt
), "invalid arguments");
3512 error_at (gimple_location (stmt
),
3513 "%<%s %s%> construct not closely nested inside of %qs",
3514 construct
, kind
, bad
);
3519 case GIMPLE_OMP_SECTIONS
:
3520 case GIMPLE_OMP_SINGLE
:
3521 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3522 switch (gimple_code (ctx
->stmt
))
3524 case GIMPLE_OMP_FOR
:
3525 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3526 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3529 case GIMPLE_OMP_SECTIONS
:
3530 case GIMPLE_OMP_SINGLE
:
3531 case GIMPLE_OMP_ORDERED
:
3532 case GIMPLE_OMP_MASTER
:
3533 case GIMPLE_OMP_MASKED
:
3534 case GIMPLE_OMP_TASK
:
3535 case GIMPLE_OMP_CRITICAL
:
3536 if (is_gimple_call (stmt
))
3538 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3539 != BUILT_IN_GOMP_BARRIER
)
3541 error_at (gimple_location (stmt
),
3542 "barrier region may not be closely nested inside "
3543 "of work-sharing, %<loop%>, %<critical%>, "
3544 "%<ordered%>, %<master%>, %<masked%>, explicit "
3545 "%<task%> or %<taskloop%> region");
3548 error_at (gimple_location (stmt
),
3549 "work-sharing region may not be closely nested inside "
3550 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3551 "%<master%>, %<masked%>, explicit %<task%> or "
3552 "%<taskloop%> region");
3554 case GIMPLE_OMP_PARALLEL
:
3555 case GIMPLE_OMP_TEAMS
:
3557 case GIMPLE_OMP_TARGET
:
3558 if (gimple_omp_target_kind (ctx
->stmt
)
3559 == GF_OMP_TARGET_KIND_REGION
)
3566 case GIMPLE_OMP_MASTER
:
3567 case GIMPLE_OMP_MASKED
:
3568 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3569 switch (gimple_code (ctx
->stmt
))
3571 case GIMPLE_OMP_FOR
:
3572 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3573 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3576 case GIMPLE_OMP_SECTIONS
:
3577 case GIMPLE_OMP_SINGLE
:
3578 case GIMPLE_OMP_TASK
:
3579 error_at (gimple_location (stmt
),
3580 "%qs region may not be closely nested inside "
3581 "of work-sharing, %<loop%>, explicit %<task%> or "
3582 "%<taskloop%> region",
3583 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3584 ? "master" : "masked");
3586 case GIMPLE_OMP_PARALLEL
:
3587 case GIMPLE_OMP_TEAMS
:
3589 case GIMPLE_OMP_TARGET
:
3590 if (gimple_omp_target_kind (ctx
->stmt
)
3591 == GF_OMP_TARGET_KIND_REGION
)
3598 case GIMPLE_OMP_SCOPE
:
3599 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3600 switch (gimple_code (ctx
->stmt
))
3602 case GIMPLE_OMP_FOR
:
3603 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3604 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3607 case GIMPLE_OMP_SECTIONS
:
3608 case GIMPLE_OMP_SINGLE
:
3609 case GIMPLE_OMP_TASK
:
3610 case GIMPLE_OMP_CRITICAL
:
3611 case GIMPLE_OMP_ORDERED
:
3612 case GIMPLE_OMP_MASTER
:
3613 case GIMPLE_OMP_MASKED
:
3614 error_at (gimple_location (stmt
),
3615 "%<scope%> region may not be closely nested inside "
3616 "of work-sharing, %<loop%>, explicit %<task%>, "
3617 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3618 "or %<masked%> region");
3620 case GIMPLE_OMP_PARALLEL
:
3621 case GIMPLE_OMP_TEAMS
:
3623 case GIMPLE_OMP_TARGET
:
3624 if (gimple_omp_target_kind (ctx
->stmt
)
3625 == GF_OMP_TARGET_KIND_REGION
)
3632 case GIMPLE_OMP_TASK
:
3633 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3634 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3635 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3636 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3638 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3639 error_at (OMP_CLAUSE_LOCATION (c
),
3640 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3641 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3645 case GIMPLE_OMP_ORDERED
:
3646 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3647 c
; c
= OMP_CLAUSE_CHAIN (c
))
3649 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3651 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3652 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3655 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3656 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3657 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3660 /* Look for containing ordered(N) loop. */
3662 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3664 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3665 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3667 error_at (OMP_CLAUSE_LOCATION (c
),
3668 "%<ordered%> construct with %<depend%> clause "
3669 "must be closely nested inside an %<ordered%> "
3673 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3675 error_at (OMP_CLAUSE_LOCATION (c
),
3676 "%<ordered%> construct with %<depend%> clause "
3677 "must be closely nested inside a loop with "
3678 "%<ordered%> clause with a parameter");
3684 error_at (OMP_CLAUSE_LOCATION (c
),
3685 "invalid depend kind in omp %<ordered%> %<depend%>");
3689 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3690 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3692 /* ordered simd must be closely nested inside of simd region,
3693 and simd region must not encounter constructs other than
3694 ordered simd, therefore ordered simd may be either orphaned,
3695 or ctx->stmt must be simd. The latter case is handled already
3699 error_at (gimple_location (stmt
),
3700 "%<ordered%> %<simd%> must be closely nested inside "
3705 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3706 switch (gimple_code (ctx
->stmt
))
3708 case GIMPLE_OMP_CRITICAL
:
3709 case GIMPLE_OMP_TASK
:
3710 case GIMPLE_OMP_ORDERED
:
3711 ordered_in_taskloop
:
3712 error_at (gimple_location (stmt
),
3713 "%<ordered%> region may not be closely nested inside "
3714 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3715 "%<taskloop%> region");
3717 case GIMPLE_OMP_FOR
:
3718 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3719 goto ordered_in_taskloop
;
3721 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3722 OMP_CLAUSE_ORDERED
);
3725 error_at (gimple_location (stmt
),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3730 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3731 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3733 error_at (gimple_location (stmt
),
3734 "%<ordered%> region without %<depend%> clause may "
3735 "not be closely nested inside a loop region with "
3736 "an %<ordered%> clause with a parameter");
3740 case GIMPLE_OMP_TARGET
:
3741 if (gimple_omp_target_kind (ctx
->stmt
)
3742 != GF_OMP_TARGET_KIND_REGION
)
3745 case GIMPLE_OMP_PARALLEL
:
3746 case GIMPLE_OMP_TEAMS
:
3747 error_at (gimple_location (stmt
),
3748 "%<ordered%> region must be closely nested inside "
3749 "a loop region with an %<ordered%> clause");
3755 case GIMPLE_OMP_CRITICAL
:
3758 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3759 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3760 if (gomp_critical
*other_crit
3761 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3762 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3764 error_at (gimple_location (stmt
),
3765 "%<critical%> region may not be nested inside "
3766 "a %<critical%> region with the same name");
3771 case GIMPLE_OMP_TEAMS
:
3774 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3775 || (gimple_omp_target_kind (ctx
->stmt
)
3776 != GF_OMP_TARGET_KIND_REGION
))
3778 /* Teams construct can appear either strictly nested inside of
3779 target construct with no intervening stmts, or can be encountered
3780 only by initial task (so must not appear inside any OpenMP
3782 error_at (gimple_location (stmt
),
3783 "%<teams%> construct must be closely nested inside of "
3784 "%<target%> construct or not nested in any OpenMP "
3789 case GIMPLE_OMP_TARGET
:
3790 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3791 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3792 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3793 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3795 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3796 error_at (OMP_CLAUSE_LOCATION (c
),
3797 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3798 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3801 if (is_gimple_omp_offloaded (stmt
)
3802 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3804 error_at (gimple_location (stmt
),
3805 "OpenACC region inside of OpenACC routine, nested "
3806 "parallelism not supported yet");
3809 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3811 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3813 if (is_gimple_omp (stmt
)
3814 && is_gimple_omp_oacc (stmt
)
3815 && is_gimple_omp (ctx
->stmt
))
3817 error_at (gimple_location (stmt
),
3818 "OpenACC construct inside of non-OpenACC region");
3824 const char *stmt_name
, *ctx_stmt_name
;
3825 switch (gimple_omp_target_kind (stmt
))
3827 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3828 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3829 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3830 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3831 stmt_name
= "target enter data"; break;
3832 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3833 stmt_name
= "target exit data"; break;
3834 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3835 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3836 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3837 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3838 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3839 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3840 stmt_name
= "enter data"; break;
3841 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3842 stmt_name
= "exit data"; break;
3843 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3844 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3846 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3847 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3848 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3849 /* OpenACC 'kernels' decomposed parts. */
3850 stmt_name
= "kernels"; break;
3851 default: gcc_unreachable ();
3853 switch (gimple_omp_target_kind (ctx
->stmt
))
3855 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3856 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3857 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3858 ctx_stmt_name
= "parallel"; break;
3859 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3860 ctx_stmt_name
= "kernels"; break;
3861 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3862 ctx_stmt_name
= "serial"; break;
3863 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3865 ctx_stmt_name
= "host_data"; break;
3866 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3867 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3868 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3869 /* OpenACC 'kernels' decomposed parts. */
3870 ctx_stmt_name
= "kernels"; break;
3871 default: gcc_unreachable ();
3874 /* OpenACC/OpenMP mismatch? */
3875 if (is_gimple_omp_oacc (stmt
)
3876 != is_gimple_omp_oacc (ctx
->stmt
))
3878 error_at (gimple_location (stmt
),
3879 "%s %qs construct inside of %s %qs region",
3880 (is_gimple_omp_oacc (stmt
)
3881 ? "OpenACC" : "OpenMP"), stmt_name
,
3882 (is_gimple_omp_oacc (ctx
->stmt
)
3883 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3886 if (is_gimple_omp_offloaded (ctx
->stmt
))
3888 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3889 if (is_gimple_omp_oacc (ctx
->stmt
))
3891 error_at (gimple_location (stmt
),
3892 "%qs construct inside of %qs region",
3893 stmt_name
, ctx_stmt_name
);
3898 if ((gimple_omp_target_kind (ctx
->stmt
)
3899 == GF_OMP_TARGET_KIND_REGION
)
3900 && (gimple_omp_target_kind (stmt
)
3901 == GF_OMP_TARGET_KIND_REGION
))
3903 c
= omp_find_clause (gimple_omp_target_clauses (stmt
),
3905 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3908 warning_at (gimple_location (stmt
), 0,
3909 "%qs construct inside of %qs region",
3910 stmt_name
, ctx_stmt_name
);
3922 /* Helper function scan_omp.
3924 Callback for walk_tree or operators in walk_gimple_stmt used to
3925 scan for OMP directives in TP. */
3928 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3930 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3931 omp_context
*ctx
= (omp_context
*) wi
->info
;
3934 switch (TREE_CODE (t
))
3942 tree repl
= remap_decl (t
, &ctx
->cb
);
3943 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3949 if (ctx
&& TYPE_P (t
))
3950 *tp
= remap_type (t
, &ctx
->cb
);
3951 else if (!DECL_P (t
))
3956 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3957 if (tem
!= TREE_TYPE (t
))
3959 if (TREE_CODE (t
) == INTEGER_CST
)
3960 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3962 TREE_TYPE (t
) = tem
;
3972 /* Return true if FNDECL is a setjmp or a longjmp. */
3975 setjmp_or_longjmp_p (const_tree fndecl
)
3977 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3978 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3981 tree declname
= DECL_NAME (fndecl
);
3983 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3984 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3985 || !TREE_PUBLIC (fndecl
))
3988 const char *name
= IDENTIFIER_POINTER (declname
);
3989 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3992 /* Return true if FNDECL is an omp_* runtime API call. */
3995 omp_runtime_api_call (const_tree fndecl
)
3997 tree declname
= DECL_NAME (fndecl
);
3999 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4000 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4001 || !TREE_PUBLIC (fndecl
))
4004 const char *name
= IDENTIFIER_POINTER (declname
);
4005 if (!startswith (name
, "omp_"))
4008 static const char *omp_runtime_apis
[] =
4010 /* This array has 3 sections. First omp_* calls that don't
4011 have any suffixes. */
4020 "target_associate_ptr",
4021 "target_disassociate_ptr",
4023 "target_is_accessible",
4024 "target_is_present",
4026 "target_memcpy_async",
4027 "target_memcpy_rect",
4028 "target_memcpy_rect_async",
4030 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4031 DECL_NAME is always omp_* without tailing underscore. */
4033 "destroy_allocator",
4035 "destroy_nest_lock",
4039 "get_affinity_format",
4041 "get_default_allocator",
4042 "get_default_device",
4045 "get_initial_device",
4047 "get_max_active_levels",
4048 "get_max_task_priority",
4057 "get_partition_num_places",
4060 "get_supported_active_levels",
4062 "get_teams_thread_limit",
4071 "is_initial_device",
4073 "pause_resource_all",
4074 "set_affinity_format",
4075 "set_default_allocator",
4083 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4084 as DECL_NAME only omp_* and omp_*_8 appear. */
4086 "get_ancestor_thread_num",
4088 "get_partition_place_nums",
4089 "get_place_num_procs",
4090 "get_place_proc_ids",
4093 "set_default_device",
4095 "set_max_active_levels",
4100 "set_teams_thread_limit"
4104 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4106 if (omp_runtime_apis
[i
] == NULL
)
4111 size_t len
= strlen (omp_runtime_apis
[i
]);
4112 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4113 && (name
[4 + len
] == '\0'
4114 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4120 /* Helper function for scan_omp.
4122 Callback for walk_gimple_stmt used to scan for OMP directives in
4123 the current statement in GSI. */
4126 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4127 struct walk_stmt_info
*wi
)
4129 gimple
*stmt
= gsi_stmt (*gsi
);
4130 omp_context
*ctx
= (omp_context
*) wi
->info
;
4132 if (gimple_has_location (stmt
))
4133 input_location
= gimple_location (stmt
);
4135 /* Check the nesting restrictions. */
4136 bool remove
= false;
4137 if (is_gimple_omp (stmt
))
4138 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4139 else if (is_gimple_call (stmt
))
4141 tree fndecl
= gimple_call_fndecl (stmt
);
4145 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4146 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4147 && setjmp_or_longjmp_p (fndecl
)
4151 error_at (gimple_location (stmt
),
4152 "setjmp/longjmp inside %<simd%> construct");
4154 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4155 switch (DECL_FUNCTION_CODE (fndecl
))
4157 case BUILT_IN_GOMP_BARRIER
:
4158 case BUILT_IN_GOMP_CANCEL
:
4159 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4160 case BUILT_IN_GOMP_TASKYIELD
:
4161 case BUILT_IN_GOMP_TASKWAIT
:
4162 case BUILT_IN_GOMP_TASKGROUP_START
:
4163 case BUILT_IN_GOMP_TASKGROUP_END
:
4164 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4171 omp_context
*octx
= ctx
;
4172 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4174 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4177 error_at (gimple_location (stmt
),
4178 "OpenMP runtime API call %qD in a region with "
4179 "%<order(concurrent)%> clause", fndecl
);
4181 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4182 && omp_runtime_api_call (fndecl
)
4183 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4184 != strlen ("omp_get_num_teams"))
4185 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4186 "omp_get_num_teams") != 0)
4187 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4188 != strlen ("omp_get_team_num"))
4189 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4190 "omp_get_team_num") != 0))
4193 error_at (gimple_location (stmt
),
4194 "OpenMP runtime API call %qD strictly nested in a "
4195 "%<teams%> region", fndecl
);
4197 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4198 && (gimple_omp_target_kind (ctx
->stmt
)
4199 == GF_OMP_TARGET_KIND_REGION
)
4200 && omp_runtime_api_call (fndecl
))
4202 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4203 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4204 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4205 error_at (gimple_location (stmt
),
4206 "OpenMP runtime API call %qD in a region with "
4207 "%<device(ancestor)%> clause", fndecl
);
4214 stmt
= gimple_build_nop ();
4215 gsi_replace (gsi
, stmt
, false);
4218 *handled_ops_p
= true;
4220 switch (gimple_code (stmt
))
4222 case GIMPLE_OMP_PARALLEL
:
4223 taskreg_nesting_level
++;
4224 scan_omp_parallel (gsi
, ctx
);
4225 taskreg_nesting_level
--;
4228 case GIMPLE_OMP_TASK
:
4229 taskreg_nesting_level
++;
4230 scan_omp_task (gsi
, ctx
);
4231 taskreg_nesting_level
--;
4234 case GIMPLE_OMP_FOR
:
4235 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4236 == GF_OMP_FOR_KIND_SIMD
)
4237 && gimple_omp_for_combined_into_p (stmt
)
4238 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4240 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4241 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4242 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4244 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4248 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4249 == GF_OMP_FOR_KIND_SIMD
)
4250 && omp_maybe_offloaded_ctx (ctx
)
4251 && omp_max_simt_vf ()
4252 && gimple_omp_for_collapse (stmt
) == 1)
4253 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4255 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4258 case GIMPLE_OMP_SCOPE
:
4259 ctx
= new_omp_context (stmt
, ctx
);
4260 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4261 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4264 case GIMPLE_OMP_SECTIONS
:
4265 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4268 case GIMPLE_OMP_SINGLE
:
4269 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4272 case GIMPLE_OMP_SCAN
:
4273 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4275 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4276 ctx
->scan_inclusive
= true;
4277 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4278 ctx
->scan_exclusive
= true;
4281 case GIMPLE_OMP_SECTION
:
4282 case GIMPLE_OMP_MASTER
:
4283 case GIMPLE_OMP_ORDERED
:
4284 case GIMPLE_OMP_CRITICAL
:
4285 ctx
= new_omp_context (stmt
, ctx
);
4286 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4289 case GIMPLE_OMP_MASKED
:
4290 ctx
= new_omp_context (stmt
, ctx
);
4291 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4292 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4295 case GIMPLE_OMP_TASKGROUP
:
4296 ctx
= new_omp_context (stmt
, ctx
);
4297 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4298 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4301 case GIMPLE_OMP_TARGET
:
4302 if (is_gimple_omp_offloaded (stmt
))
4304 taskreg_nesting_level
++;
4305 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4306 taskreg_nesting_level
--;
4309 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4312 case GIMPLE_OMP_TEAMS
:
4313 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4315 taskreg_nesting_level
++;
4316 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4317 taskreg_nesting_level
--;
4320 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4327 *handled_ops_p
= false;
4329 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4331 var
= DECL_CHAIN (var
))
4332 insert_decl_map (&ctx
->cb
, var
, var
);
4336 *handled_ops_p
= false;
4344 /* Scan all the statements starting at the current statement. CTX
4345 contains context information about the OMP directives and
4346 clauses found during the scan. */
4349 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4351 location_t saved_location
;
4352 struct walk_stmt_info wi
;
4354 memset (&wi
, 0, sizeof (wi
));
4356 wi
.want_locations
= true;
4358 saved_location
= input_location
;
4359 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4360 input_location
= saved_location
;
4363 /* Re-gimplification and code generation routines. */
4365 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4366 of BIND if in a method. */
4369 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4371 if (DECL_ARGUMENTS (current_function_decl
)
4372 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4373 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4376 tree vars
= gimple_bind_vars (bind
);
4377 for (tree
*pvar
= &vars
; *pvar
; )
4378 if (omp_member_access_dummy_var (*pvar
))
4379 *pvar
= DECL_CHAIN (*pvar
);
4381 pvar
= &DECL_CHAIN (*pvar
);
4382 gimple_bind_set_vars (bind
, vars
);
4386 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4387 block and its subblocks. */
4390 remove_member_access_dummy_vars (tree block
)
4392 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4393 if (omp_member_access_dummy_var (*pvar
))
4394 *pvar
= DECL_CHAIN (*pvar
);
4396 pvar
= &DECL_CHAIN (*pvar
);
4398 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4399 remove_member_access_dummy_vars (block
);
4402 /* If a context was created for STMT when it was scanned, return it. */
4404 static omp_context
*
4405 maybe_lookup_ctx (gimple
*stmt
)
4408 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4409 return n
? (omp_context
*) n
->value
: NULL
;
4413 /* Find the mapping for DECL in CTX or the immediately enclosing
4414 context that has a mapping for DECL.
4416 If CTX is a nested parallel directive, we may have to use the decl
4417 mappings created in CTX's parent context. Suppose that we have the
4418 following parallel nesting (variable UIDs showed for clarity):
4421 #omp parallel shared(iD.1562) -> outer parallel
4422 iD.1562 = iD.1562 + 1;
4424 #omp parallel shared (iD.1562) -> inner parallel
4425 iD.1562 = iD.1562 - 1;
4427 Each parallel structure will create a distinct .omp_data_s structure
4428 for copying iD.1562 in/out of the directive:
4430 outer parallel .omp_data_s.1.i -> iD.1562
4431 inner parallel .omp_data_s.2.i -> iD.1562
4433 A shared variable mapping will produce a copy-out operation before
4434 the parallel directive and a copy-in operation after it. So, in
4435 this case we would have:
4438 .omp_data_o.1.i = iD.1562;
4439 #omp parallel shared(iD.1562) -> outer parallel
4440 .omp_data_i.1 = &.omp_data_o.1
4441 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4443 .omp_data_o.2.i = iD.1562; -> **
4444 #omp parallel shared(iD.1562) -> inner parallel
4445 .omp_data_i.2 = &.omp_data_o.2
4446 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4449 ** This is a problem. The symbol iD.1562 cannot be referenced
4450 inside the body of the outer parallel region. But since we are
4451 emitting this copy operation while expanding the inner parallel
4452 directive, we need to access the CTX structure of the outer
4453 parallel directive to get the correct mapping:
4455 .omp_data_o.2.i = .omp_data_i.1->i
4457 Since there may be other workshare or parallel directives enclosing
4458 the parallel directive, it may be necessary to walk up the context
4459 parent chain. This is not a problem in general because nested
4460 parallelism happens only rarely. */
4463 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4468 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4469 t
= maybe_lookup_decl (decl
, up
);
4471 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4473 return t
? t
: decl
;
4477 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4478 in outer contexts. */
4481 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4486 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4487 t
= maybe_lookup_decl (decl
, up
);
4489 return t
? t
: decl
;
4493 /* Construct the initialization value for reduction operation OP. */
4496 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4505 case TRUTH_ORIF_EXPR
:
4506 case TRUTH_XOR_EXPR
:
4508 return build_zero_cst (type
);
4511 case TRUTH_AND_EXPR
:
4512 case TRUTH_ANDIF_EXPR
:
4514 return fold_convert_loc (loc
, type
, integer_one_node
);
4517 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4520 if (SCALAR_FLOAT_TYPE_P (type
))
4522 REAL_VALUE_TYPE max
, min
;
4523 if (HONOR_INFINITIES (type
))
4526 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4529 real_maxval (&min
, 1, TYPE_MODE (type
));
4530 return build_real (type
, min
);
4532 else if (POINTER_TYPE_P (type
))
4535 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4536 return wide_int_to_tree (type
, min
);
4540 gcc_assert (INTEGRAL_TYPE_P (type
));
4541 return TYPE_MIN_VALUE (type
);
4545 if (SCALAR_FLOAT_TYPE_P (type
))
4547 REAL_VALUE_TYPE max
;
4548 if (HONOR_INFINITIES (type
))
4551 real_maxval (&max
, 0, TYPE_MODE (type
));
4552 return build_real (type
, max
);
4554 else if (POINTER_TYPE_P (type
))
4557 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4558 return wide_int_to_tree (type
, max
);
4562 gcc_assert (INTEGRAL_TYPE_P (type
));
4563 return TYPE_MAX_VALUE (type
);
4571 /* Construct the initialization value for reduction CLAUSE. */
4574 omp_reduction_init (tree clause
, tree type
)
4576 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4577 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4580 /* Return alignment to be assumed for var in CLAUSE, which should be
4581 OMP_CLAUSE_ALIGNED. */
4584 omp_clause_aligned_alignment (tree clause
)
4586 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4587 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4589 /* Otherwise return implementation defined alignment. */
4590 unsigned int al
= 1;
4591 opt_scalar_mode mode_iter
;
4592 auto_vector_modes modes
;
4593 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4594 static enum mode_class classes
[]
4595 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4596 for (int i
= 0; i
< 4; i
+= 2)
4597 /* The for loop above dictates that we only walk through scalar classes. */
4598 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4600 scalar_mode mode
= mode_iter
.require ();
4601 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4602 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4604 machine_mode alt_vmode
;
4605 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4606 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4607 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4610 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4611 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4613 type
= build_vector_type_for_mode (type
, vmode
);
4614 if (TYPE_MODE (type
) != vmode
)
4616 if (TYPE_ALIGN_UNIT (type
) > al
)
4617 al
= TYPE_ALIGN_UNIT (type
);
4619 return build_int_cst (integer_type_node
, al
);
4623 /* This structure is part of the interface between lower_rec_simd_input_clauses
4624 and lower_rec_input_clauses. */
4626 class omplow_simd_context
{
4628 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4632 vec
<tree
, va_heap
> simt_eargs
;
4633 gimple_seq simt_dlist
;
4634 poly_uint64_pod max_vf
;
4638 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4642 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4643 omplow_simd_context
*sctx
, tree
&ivar
,
4644 tree
&lvar
, tree
*rvar
= NULL
,
4647 if (known_eq (sctx
->max_vf
, 0U))
4649 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4650 if (maybe_gt (sctx
->max_vf
, 1U))
4652 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4653 OMP_CLAUSE_SAFELEN
);
4656 poly_uint64 safe_len
;
4657 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4658 || maybe_lt (safe_len
, 1U))
4661 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4664 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4666 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4667 c
= OMP_CLAUSE_CHAIN (c
))
4669 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4672 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4674 /* UDR reductions are not supported yet for SIMT, disable
4680 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4681 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4683 /* Doing boolean operations on non-integral types is
4684 for conformance only, it's not worth supporting this
4691 if (maybe_gt (sctx
->max_vf
, 1U))
4693 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4694 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4697 if (known_eq (sctx
->max_vf
, 1U))
4702 if (is_gimple_reg (new_var
))
4704 ivar
= lvar
= new_var
;
4707 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4708 ivar
= lvar
= create_tmp_var (type
);
4709 TREE_ADDRESSABLE (ivar
) = 1;
4710 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4711 NULL
, DECL_ATTRIBUTES (ivar
));
4712 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4713 tree clobber
= build_clobber (type
);
4714 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4715 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4719 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4720 tree avar
= create_tmp_var_raw (atype
);
4721 if (TREE_ADDRESSABLE (new_var
))
4722 TREE_ADDRESSABLE (avar
) = 1;
4723 DECL_ATTRIBUTES (avar
)
4724 = tree_cons (get_identifier ("omp simd array"), NULL
,
4725 DECL_ATTRIBUTES (avar
));
4726 gimple_add_tmp_var (avar
);
4728 if (rvar
&& !ctx
->for_simd_scan_phase
)
4730 /* For inscan reductions, create another array temporary,
4731 which will hold the reduced value. */
4732 iavar
= create_tmp_var_raw (atype
);
4733 if (TREE_ADDRESSABLE (new_var
))
4734 TREE_ADDRESSABLE (iavar
) = 1;
4735 DECL_ATTRIBUTES (iavar
)
4736 = tree_cons (get_identifier ("omp simd array"), NULL
,
4737 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4738 DECL_ATTRIBUTES (iavar
)));
4739 gimple_add_tmp_var (iavar
);
4740 ctx
->cb
.decl_map
->put (avar
, iavar
);
4741 if (sctx
->lastlane
== NULL_TREE
)
4742 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4743 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4744 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4745 TREE_THIS_NOTRAP (*rvar
) = 1;
4747 if (ctx
->scan_exclusive
)
4749 /* And for exclusive scan yet another one, which will
4750 hold the value during the scan phase. */
4751 tree savar
= create_tmp_var_raw (atype
);
4752 if (TREE_ADDRESSABLE (new_var
))
4753 TREE_ADDRESSABLE (savar
) = 1;
4754 DECL_ATTRIBUTES (savar
)
4755 = tree_cons (get_identifier ("omp simd array"), NULL
,
4756 tree_cons (get_identifier ("omp simd inscan "
4758 DECL_ATTRIBUTES (savar
)));
4759 gimple_add_tmp_var (savar
);
4760 ctx
->cb
.decl_map
->put (iavar
, savar
);
4761 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4762 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4763 TREE_THIS_NOTRAP (*rvar2
) = 1;
4766 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4767 NULL_TREE
, NULL_TREE
);
4768 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4769 NULL_TREE
, NULL_TREE
);
4770 TREE_THIS_NOTRAP (ivar
) = 1;
4771 TREE_THIS_NOTRAP (lvar
) = 1;
4773 if (DECL_P (new_var
))
4775 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4776 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4781 /* Helper function of lower_rec_input_clauses. For a reference
4782 in simd reduction, add an underlying variable it will reference. */
4785 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4787 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4788 if (TREE_CONSTANT (z
))
4790 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4791 get_name (new_vard
));
4792 gimple_add_tmp_var (z
);
4793 TREE_ADDRESSABLE (z
) = 1;
4794 z
= build_fold_addr_expr_loc (loc
, z
);
4795 gimplify_assign (new_vard
, z
, ilist
);
4799 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4800 code to emit (type) (tskred_temp[idx]). */
4803 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4806 unsigned HOST_WIDE_INT sz
4807 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4808 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4809 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4811 tree v
= create_tmp_var (pointer_sized_int_node
);
4812 gimple
*g
= gimple_build_assign (v
, r
);
4813 gimple_seq_add_stmt (ilist
, g
);
4814 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4816 v
= create_tmp_var (type
);
4817 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4818 gimple_seq_add_stmt (ilist
, g
);
4823 /* Lower early initialization of privatized variable NEW_VAR
4824 if it needs an allocator (has allocate clause). */
4827 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4828 tree
&allocate_ptr
, gimple_seq
*ilist
,
4829 omp_context
*ctx
, bool is_ref
, tree size
)
4833 gcc_assert (allocate_ptr
== NULL_TREE
);
4834 if (ctx
->allocate_map
4835 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4836 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4837 allocator
= *allocatorp
;
4838 if (allocator
== NULL_TREE
)
4840 if (!is_ref
&& omp_privatize_by_reference (var
))
4842 allocator
= NULL_TREE
;
4846 unsigned HOST_WIDE_INT ialign
= 0;
4847 if (TREE_CODE (allocator
) == TREE_LIST
)
4849 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4850 allocator
= TREE_PURPOSE (allocator
);
4852 if (TREE_CODE (allocator
) != INTEGER_CST
)
4853 allocator
= build_outer_var_ref (allocator
, ctx
, OMP_CLAUSE_ALLOCATE
);
4854 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4855 if (TREE_CODE (allocator
) != INTEGER_CST
)
4857 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4858 gimplify_assign (var
, allocator
, ilist
);
4862 tree ptr_type
, align
, sz
= size
;
4863 if (TYPE_P (new_var
))
4865 ptr_type
= build_pointer_type (new_var
);
4866 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4870 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4871 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4875 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4876 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4877 if (sz
== NULL_TREE
)
4878 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4880 align
= build_int_cst (size_type_node
, ialign
);
4881 if (TREE_CODE (sz
) != INTEGER_CST
)
4883 tree szvar
= create_tmp_var (size_type_node
);
4884 gimplify_assign (szvar
, sz
, ilist
);
4887 allocate_ptr
= create_tmp_var (ptr_type
);
4888 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4889 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4890 gimple_call_set_lhs (g
, allocate_ptr
);
4891 gimple_seq_add_stmt (ilist
, g
);
4894 tree x
= build_simple_mem_ref (allocate_ptr
);
4895 TREE_THIS_NOTRAP (x
) = 1;
4896 SET_DECL_VALUE_EXPR (new_var
, x
);
4897 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4902 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4903 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4904 private variables. Initialization statements go in ILIST, while calls
4905 to destructors go in DLIST. */
4908 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4909 omp_context
*ctx
, struct omp_for_data
*fd
)
4911 tree c
, copyin_seq
, x
, ptr
;
4912 bool copyin_by_ref
= false;
4913 bool lastprivate_firstprivate
= false;
4914 bool reduction_omp_orig_ref
= false;
4916 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4917 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4918 omplow_simd_context sctx
= omplow_simd_context ();
4919 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4920 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4921 gimple_seq llist
[4] = { };
4922 tree nonconst_simd_if
= NULL_TREE
;
4925 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4927 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4928 with data sharing clauses referencing variable sized vars. That
4929 is unnecessarily hard to support and very unlikely to result in
4930 vectorized code anyway. */
4932 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4933 switch (OMP_CLAUSE_CODE (c
))
4935 case OMP_CLAUSE_LINEAR
:
4936 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4939 case OMP_CLAUSE_PRIVATE
:
4940 case OMP_CLAUSE_FIRSTPRIVATE
:
4941 case OMP_CLAUSE_LASTPRIVATE
:
4942 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4944 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4946 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4947 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4951 case OMP_CLAUSE_REDUCTION
:
4952 case OMP_CLAUSE_IN_REDUCTION
:
4953 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4954 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4956 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4958 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4959 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4964 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4966 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4967 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4969 case OMP_CLAUSE_SIMDLEN
:
4970 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4973 case OMP_CLAUSE__CONDTEMP_
:
4974 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4982 /* Add a placeholder for simduid. */
4983 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4984 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4986 unsigned task_reduction_cnt
= 0;
4987 unsigned task_reduction_cntorig
= 0;
4988 unsigned task_reduction_cnt_full
= 0;
4989 unsigned task_reduction_cntorig_full
= 0;
4990 unsigned task_reduction_other_cnt
= 0;
4991 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4992 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4993 /* Do all the fixed sized types in the first pass, and the variable sized
4994 types in the second pass. This makes sure that the scalar arguments to
4995 the variable sized types are processed before we use them in the
4996 variable sized operations. For task reductions we use 4 passes, in the
4997 first two we ignore them, in the third one gather arguments for
4998 GOMP_task_reduction_remap call and in the last pass actually handle
4999 the task reductions. */
5000 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
5003 if (pass
== 2 && task_reduction_cnt
)
5006 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
5007 + task_reduction_cntorig
);
5008 tskred_avar
= create_tmp_var_raw (tskred_atype
);
5009 gimple_add_tmp_var (tskred_avar
);
5010 TREE_ADDRESSABLE (tskred_avar
) = 1;
5011 task_reduction_cnt_full
= task_reduction_cnt
;
5012 task_reduction_cntorig_full
= task_reduction_cntorig
;
5014 else if (pass
== 3 && task_reduction_cnt
)
5016 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
5018 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
5019 size_int (task_reduction_cntorig
),
5020 build_fold_addr_expr (tskred_avar
));
5021 gimple_seq_add_stmt (ilist
, g
);
5023 if (pass
== 3 && task_reduction_other_cnt
)
5025 /* For reduction clauses, build
5026 tskred_base = (void *) tskred_temp[2]
5027 + omp_get_thread_num () * tskred_temp[1]
5028 or if tskred_temp[1] is known to be constant, that constant
5029 directly. This is the start of the private reduction copy block
5030 for the current thread. */
5031 tree v
= create_tmp_var (integer_type_node
);
5032 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
5033 gimple
*g
= gimple_build_call (x
, 0);
5034 gimple_call_set_lhs (g
, v
);
5035 gimple_seq_add_stmt (ilist
, g
);
5036 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
5037 tskred_temp
= OMP_CLAUSE_DECL (c
);
5038 if (is_taskreg_ctx (ctx
))
5039 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
5040 tree v2
= create_tmp_var (sizetype
);
5041 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
5042 gimple_seq_add_stmt (ilist
, g
);
5043 if (ctx
->task_reductions
[0])
5044 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
5046 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
5047 tree v3
= create_tmp_var (sizetype
);
5048 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
5049 gimple_seq_add_stmt (ilist
, g
);
5050 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
5051 tskred_base
= create_tmp_var (ptr_type_node
);
5052 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
5053 gimple_seq_add_stmt (ilist
, g
);
5055 task_reduction_cnt
= 0;
5056 task_reduction_cntorig
= 0;
5057 task_reduction_other_cnt
= 0;
5058 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5060 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
5063 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5064 bool task_reduction_p
= false;
5065 bool task_reduction_needs_orig_p
= false;
5066 tree cond
= NULL_TREE
;
5067 tree allocator
, allocate_ptr
;
5071 case OMP_CLAUSE_PRIVATE
:
5072 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
5075 case OMP_CLAUSE_SHARED
:
5076 /* Ignore shared directives in teams construct inside
5077 of target construct. */
5078 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5079 && !is_host_teams_ctx (ctx
))
5081 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
5083 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
5084 || is_global_var (OMP_CLAUSE_DECL (c
)));
5087 case OMP_CLAUSE_FIRSTPRIVATE
:
5088 case OMP_CLAUSE_COPYIN
:
5090 case OMP_CLAUSE_LINEAR
:
5091 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5092 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5093 lastprivate_firstprivate
= true;
5095 case OMP_CLAUSE_REDUCTION
:
5096 case OMP_CLAUSE_IN_REDUCTION
:
5097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5098 || is_task_ctx (ctx
)
5099 || OMP_CLAUSE_REDUCTION_TASK (c
))
5101 task_reduction_p
= true;
5102 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5104 task_reduction_other_cnt
++;
5109 task_reduction_cnt
++;
5110 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5112 var
= OMP_CLAUSE_DECL (c
);
5113 /* If var is a global variable that isn't privatized
5114 in outer contexts, we don't need to look up the
5115 original address, it is always the address of the
5116 global variable itself. */
5118 || omp_privatize_by_reference (var
)
5120 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5122 task_reduction_needs_orig_p
= true;
5123 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5124 task_reduction_cntorig
++;
5128 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5129 reduction_omp_orig_ref
= true;
5131 case OMP_CLAUSE__REDUCTEMP_
:
5132 if (!is_taskreg_ctx (ctx
))
5135 case OMP_CLAUSE__LOOPTEMP_
:
5136 /* Handle _looptemp_/_reductemp_ clauses only on
5141 case OMP_CLAUSE_LASTPRIVATE
:
5142 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5144 lastprivate_firstprivate
= true;
5145 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5148 /* Even without corresponding firstprivate, if
5149 decl is Fortran allocatable, it needs outer var
5152 && lang_hooks
.decls
.omp_private_outer_ref
5153 (OMP_CLAUSE_DECL (c
)))
5154 lastprivate_firstprivate
= true;
5156 case OMP_CLAUSE_ALIGNED
:
5159 var
= OMP_CLAUSE_DECL (c
);
5160 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5161 && !is_global_var (var
))
5163 new_var
= maybe_lookup_decl (var
, ctx
);
5164 if (new_var
== NULL_TREE
)
5165 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5166 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5167 tree alarg
= omp_clause_aligned_alignment (c
);
5168 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5169 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5170 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5171 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5172 gimplify_and_add (x
, ilist
);
5174 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5175 && is_global_var (var
))
5177 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5178 new_var
= lookup_decl (var
, ctx
);
5179 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5180 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5181 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5182 tree alarg
= omp_clause_aligned_alignment (c
);
5183 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5184 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5185 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5186 x
= create_tmp_var (ptype
);
5187 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5188 gimplify_and_add (t
, ilist
);
5189 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5190 SET_DECL_VALUE_EXPR (new_var
, t
);
5191 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5194 case OMP_CLAUSE__CONDTEMP_
:
5195 if (is_parallel_ctx (ctx
)
5196 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5203 if (task_reduction_p
!= (pass
>= 2))
5206 allocator
= NULL_TREE
;
5207 allocate_ptr
= NULL_TREE
;
5208 new_var
= var
= OMP_CLAUSE_DECL (c
);
5209 if ((c_kind
== OMP_CLAUSE_REDUCTION
5210 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5211 && TREE_CODE (var
) == MEM_REF
)
5213 var
= TREE_OPERAND (var
, 0);
5214 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5215 var
= TREE_OPERAND (var
, 0);
5216 if (TREE_CODE (var
) == INDIRECT_REF
5217 || TREE_CODE (var
) == ADDR_EXPR
)
5218 var
= TREE_OPERAND (var
, 0);
5219 if (is_variable_sized (var
))
5221 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5222 var
= DECL_VALUE_EXPR (var
);
5223 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5224 var
= TREE_OPERAND (var
, 0);
5225 gcc_assert (DECL_P (var
));
5229 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5231 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5232 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5234 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5235 new_var
= lookup_decl (var
, ctx
);
5237 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5242 /* C/C++ array section reductions. */
5243 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5244 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5245 && var
!= OMP_CLAUSE_DECL (c
))
5250 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5251 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5253 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5255 tree b
= TREE_OPERAND (orig_var
, 1);
5256 if (is_omp_target (ctx
->stmt
))
5259 b
= maybe_lookup_decl (b
, ctx
);
5262 b
= TREE_OPERAND (orig_var
, 1);
5263 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5265 if (integer_zerop (bias
))
5269 bias
= fold_convert_loc (clause_loc
,
5270 TREE_TYPE (b
), bias
);
5271 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5272 TREE_TYPE (b
), b
, bias
);
5274 orig_var
= TREE_OPERAND (orig_var
, 0);
5278 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5279 if (is_global_var (out
)
5280 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5281 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5282 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5285 else if (is_omp_target (ctx
->stmt
))
5289 bool by_ref
= use_pointer_for_field (var
, NULL
);
5290 x
= build_receiver_ref (var
, by_ref
, ctx
);
5291 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5294 x
= build_fold_addr_expr (x
);
5296 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5297 x
= build_simple_mem_ref (x
);
5298 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5300 if (var
== TREE_OPERAND (orig_var
, 0))
5301 x
= build_fold_addr_expr (x
);
5303 bias
= fold_convert (sizetype
, bias
);
5304 x
= fold_convert (ptr_type_node
, x
);
5305 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5306 TREE_TYPE (x
), x
, bias
);
5307 unsigned cnt
= task_reduction_cnt
- 1;
5308 if (!task_reduction_needs_orig_p
)
5309 cnt
+= (task_reduction_cntorig_full
5310 - task_reduction_cntorig
);
5312 cnt
= task_reduction_cntorig
- 1;
5313 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5314 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5315 gimplify_assign (r
, x
, ilist
);
5319 if (TREE_CODE (orig_var
) == INDIRECT_REF
5320 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5321 orig_var
= TREE_OPERAND (orig_var
, 0);
5322 tree d
= OMP_CLAUSE_DECL (c
);
5323 tree type
= TREE_TYPE (d
);
5324 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5325 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5327 const char *name
= get_name (orig_var
);
5328 if (pass
!= 3 && !TREE_CONSTANT (v
))
5331 if (is_omp_target (ctx
->stmt
))
5334 t
= maybe_lookup_decl (v
, ctx
);
5338 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5339 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5340 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5342 build_int_cst (TREE_TYPE (v
), 1));
5343 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5345 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5349 tree xv
= create_tmp_var (ptr_type_node
);
5350 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5352 unsigned cnt
= task_reduction_cnt
- 1;
5353 if (!task_reduction_needs_orig_p
)
5354 cnt
+= (task_reduction_cntorig_full
5355 - task_reduction_cntorig
);
5357 cnt
= task_reduction_cntorig
- 1;
5358 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5359 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5361 gimple
*g
= gimple_build_assign (xv
, x
);
5362 gimple_seq_add_stmt (ilist
, g
);
5366 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5368 if (ctx
->task_reductions
[1 + idx
])
5369 off
= fold_convert (sizetype
,
5370 ctx
->task_reductions
[1 + idx
]);
5372 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5374 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5376 gimple_seq_add_stmt (ilist
, g
);
5378 x
= fold_convert (build_pointer_type (boolean_type_node
),
5380 if (TREE_CONSTANT (v
))
5381 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5382 TYPE_SIZE_UNIT (type
));
5386 if (is_omp_target (ctx
->stmt
))
5389 t
= maybe_lookup_decl (v
, ctx
);
5393 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5394 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5396 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5398 build_int_cst (TREE_TYPE (v
), 1));
5399 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5401 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5402 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5404 cond
= create_tmp_var (TREE_TYPE (x
));
5405 gimplify_assign (cond
, x
, ilist
);
5408 else if (lower_private_allocate (var
, type
, allocator
,
5409 allocate_ptr
, ilist
, ctx
,
5412 ? TYPE_SIZE_UNIT (type
)
5415 else if (TREE_CONSTANT (v
))
5417 x
= create_tmp_var_raw (type
, name
);
5418 gimple_add_tmp_var (x
);
5419 TREE_ADDRESSABLE (x
) = 1;
5420 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5425 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5426 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5427 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5430 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5431 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5432 tree y
= create_tmp_var (ptype
, name
);
5433 gimplify_assign (y
, x
, ilist
);
5437 if (!integer_zerop (bias
))
5439 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5441 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5443 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5444 pointer_sized_int_node
, yb
, bias
);
5445 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5446 yb
= create_tmp_var (ptype
, name
);
5447 gimplify_assign (yb
, x
, ilist
);
5451 d
= TREE_OPERAND (d
, 0);
5452 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5453 d
= TREE_OPERAND (d
, 0);
5454 if (TREE_CODE (d
) == ADDR_EXPR
)
5456 if (orig_var
!= var
)
5458 gcc_assert (is_variable_sized (orig_var
));
5459 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5461 gimplify_assign (new_var
, x
, ilist
);
5462 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5463 tree t
= build_fold_indirect_ref (new_var
);
5464 DECL_IGNORED_P (new_var
) = 0;
5465 TREE_THIS_NOTRAP (t
) = 1;
5466 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5467 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5471 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5472 build_int_cst (ptype
, 0));
5473 SET_DECL_VALUE_EXPR (new_var
, x
);
5474 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5479 gcc_assert (orig_var
== var
);
5480 if (TREE_CODE (d
) == INDIRECT_REF
)
5482 x
= create_tmp_var (ptype
, name
);
5483 TREE_ADDRESSABLE (x
) = 1;
5484 gimplify_assign (x
, yb
, ilist
);
5485 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5487 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5488 gimplify_assign (new_var
, x
, ilist
);
5490 /* GOMP_taskgroup_reduction_register memsets the whole
5491 array to zero. If the initializer is zero, we don't
5492 need to initialize it again, just mark it as ever
5493 used unconditionally, i.e. cond = true. */
5495 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5496 && initializer_zerop (omp_reduction_init (c
,
5499 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5501 gimple_seq_add_stmt (ilist
, g
);
5504 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5508 if (!is_parallel_ctx (ctx
))
5510 tree condv
= create_tmp_var (boolean_type_node
);
5511 g
= gimple_build_assign (condv
,
5512 build_simple_mem_ref (cond
));
5513 gimple_seq_add_stmt (ilist
, g
);
5514 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5515 g
= gimple_build_cond (NE_EXPR
, condv
,
5516 boolean_false_node
, end
, lab1
);
5517 gimple_seq_add_stmt (ilist
, g
);
5518 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5520 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5522 gimple_seq_add_stmt (ilist
, g
);
5525 tree y1
= create_tmp_var (ptype
);
5526 gimplify_assign (y1
, y
, ilist
);
5527 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5528 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5529 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5530 if (task_reduction_needs_orig_p
)
5532 y3
= create_tmp_var (ptype
);
5534 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5535 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5536 size_int (task_reduction_cnt_full
5537 + task_reduction_cntorig
- 1),
5538 NULL_TREE
, NULL_TREE
);
5541 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5542 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5545 gimplify_assign (y3
, ref
, ilist
);
5547 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5551 y2
= create_tmp_var (ptype
);
5552 gimplify_assign (y2
, y
, ilist
);
5554 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5556 tree ref
= build_outer_var_ref (var
, ctx
);
5557 /* For ref build_outer_var_ref already performs this. */
5558 if (TREE_CODE (d
) == INDIRECT_REF
)
5559 gcc_assert (omp_privatize_by_reference (var
));
5560 else if (TREE_CODE (d
) == ADDR_EXPR
)
5561 ref
= build_fold_addr_expr (ref
);
5562 else if (omp_privatize_by_reference (var
))
5563 ref
= build_fold_addr_expr (ref
);
5564 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5565 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5566 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5568 y3
= create_tmp_var (ptype
);
5569 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5573 y4
= create_tmp_var (ptype
);
5574 gimplify_assign (y4
, ref
, dlist
);
5578 tree i
= create_tmp_var (TREE_TYPE (v
));
5579 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5580 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5581 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5584 i2
= create_tmp_var (TREE_TYPE (v
));
5585 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5586 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5587 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5588 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5590 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5592 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5593 tree decl_placeholder
5594 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5595 SET_DECL_VALUE_EXPR (decl_placeholder
,
5596 build_simple_mem_ref (y1
));
5597 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5598 SET_DECL_VALUE_EXPR (placeholder
,
5599 y3
? build_simple_mem_ref (y3
)
5601 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5602 x
= lang_hooks
.decls
.omp_clause_default_ctor
5603 (c
, build_simple_mem_ref (y1
),
5604 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5606 gimplify_and_add (x
, ilist
);
5607 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5609 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5610 lower_omp (&tseq
, ctx
);
5611 gimple_seq_add_seq (ilist
, tseq
);
5613 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5616 SET_DECL_VALUE_EXPR (decl_placeholder
,
5617 build_simple_mem_ref (y2
));
5618 SET_DECL_VALUE_EXPR (placeholder
,
5619 build_simple_mem_ref (y4
));
5620 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5621 lower_omp (&tseq
, ctx
);
5622 gimple_seq_add_seq (dlist
, tseq
);
5623 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5625 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5626 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5629 x
= lang_hooks
.decls
.omp_clause_dtor
5630 (c
, build_simple_mem_ref (y2
));
5632 gimplify_and_add (x
, dlist
);
5637 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5638 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5640 /* reduction(-:var) sums up the partial results, so it
5641 acts identically to reduction(+:var). */
5642 if (code
== MINUS_EXPR
)
5645 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5648 x
= build2 (code
, TREE_TYPE (type
),
5649 build_simple_mem_ref (y4
),
5650 build_simple_mem_ref (y2
));
5651 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5655 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5656 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5657 gimple_seq_add_stmt (ilist
, g
);
5660 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5661 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5662 gimple_seq_add_stmt (ilist
, g
);
5664 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5665 build_int_cst (TREE_TYPE (i
), 1));
5666 gimple_seq_add_stmt (ilist
, g
);
5667 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5668 gimple_seq_add_stmt (ilist
, g
);
5669 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5672 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5673 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5674 gimple_seq_add_stmt (dlist
, g
);
5677 g
= gimple_build_assign
5678 (y4
, POINTER_PLUS_EXPR
, y4
,
5679 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5680 gimple_seq_add_stmt (dlist
, g
);
5682 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5683 build_int_cst (TREE_TYPE (i2
), 1));
5684 gimple_seq_add_stmt (dlist
, g
);
5685 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5686 gimple_seq_add_stmt (dlist
, g
);
5687 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5691 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5692 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5693 gimple_seq_add_stmt (dlist
, g
);
5699 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5700 if (is_global_var (out
))
5702 else if (is_omp_target (ctx
->stmt
))
5706 bool by_ref
= use_pointer_for_field (var
, ctx
);
5707 x
= build_receiver_ref (var
, by_ref
, ctx
);
5709 if (!omp_privatize_by_reference (var
))
5710 x
= build_fold_addr_expr (x
);
5711 x
= fold_convert (ptr_type_node
, x
);
5712 unsigned cnt
= task_reduction_cnt
- 1;
5713 if (!task_reduction_needs_orig_p
)
5714 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5716 cnt
= task_reduction_cntorig
- 1;
5717 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5718 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5719 gimplify_assign (r
, x
, ilist
);
5724 tree type
= TREE_TYPE (new_var
);
5725 if (!omp_privatize_by_reference (var
))
5726 type
= build_pointer_type (type
);
5727 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5729 unsigned cnt
= task_reduction_cnt
- 1;
5730 if (!task_reduction_needs_orig_p
)
5731 cnt
+= (task_reduction_cntorig_full
5732 - task_reduction_cntorig
);
5734 cnt
= task_reduction_cntorig
- 1;
5735 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5736 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5740 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5742 if (ctx
->task_reductions
[1 + idx
])
5743 off
= fold_convert (sizetype
,
5744 ctx
->task_reductions
[1 + idx
]);
5746 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5748 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5751 x
= fold_convert (type
, x
);
5753 if (omp_privatize_by_reference (var
))
5755 gimplify_assign (new_var
, x
, ilist
);
5757 new_var
= build_simple_mem_ref (new_var
);
5761 t
= create_tmp_var (type
);
5762 gimplify_assign (t
, x
, ilist
);
5763 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5764 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5766 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5767 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5768 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5769 cond
= create_tmp_var (TREE_TYPE (t
));
5770 gimplify_assign (cond
, t
, ilist
);
5772 else if (is_variable_sized (var
))
5774 /* For variable sized types, we need to allocate the
5775 actual storage here. Call alloca and store the
5776 result in the pointer decl that we created elsewhere. */
5780 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5784 ptr
= DECL_VALUE_EXPR (new_var
);
5785 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5786 ptr
= TREE_OPERAND (ptr
, 0);
5787 gcc_assert (DECL_P (ptr
));
5788 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5790 if (lower_private_allocate (var
, new_var
, allocator
,
5791 allocate_ptr
, ilist
, ctx
,
5796 /* void *tmp = __builtin_alloca */
5798 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5800 = gimple_build_call (atmp
, 2, x
,
5801 size_int (DECL_ALIGN (var
)));
5802 cfun
->calls_alloca
= 1;
5803 tmp
= create_tmp_var_raw (ptr_type_node
);
5804 gimple_add_tmp_var (tmp
);
5805 gimple_call_set_lhs (stmt
, tmp
);
5807 gimple_seq_add_stmt (ilist
, stmt
);
5810 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5811 gimplify_assign (ptr
, x
, ilist
);
5814 else if (omp_privatize_by_reference (var
)
5815 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5816 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5818 /* For references that are being privatized for Fortran,
5819 allocate new backing storage for the new pointer
5820 variable. This allows us to avoid changing all the
5821 code that expects a pointer to something that expects
5822 a direct variable. */
5826 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5827 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5829 x
= build_receiver_ref (var
, false, ctx
);
5830 if (ctx
->allocate_map
)
5831 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5833 allocator
= *allocatep
;
5834 if (TREE_CODE (allocator
) == TREE_LIST
)
5835 allocator
= TREE_PURPOSE (allocator
);
5836 if (TREE_CODE (allocator
) != INTEGER_CST
)
5837 allocator
= build_outer_var_ref (allocator
, ctx
);
5838 allocator
= fold_convert (pointer_sized_int_node
,
5840 allocate_ptr
= unshare_expr (x
);
5842 if (allocator
== NULL_TREE
)
5843 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5845 else if (lower_private_allocate (var
, new_var
, allocator
,
5847 ilist
, ctx
, true, x
))
5849 else if (TREE_CONSTANT (x
))
5851 /* For reduction in SIMD loop, defer adding the
5852 initialization of the reference, because if we decide
5853 to use SIMD array for it, the initilization could cause
5854 expansion ICE. Ditto for other privatization clauses. */
5859 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5861 gimple_add_tmp_var (x
);
5862 TREE_ADDRESSABLE (x
) = 1;
5863 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5869 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5870 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5871 tree al
= size_int (TYPE_ALIGN (rtype
));
5872 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5877 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5878 gimplify_assign (new_var
, x
, ilist
);
5881 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5883 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5884 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5885 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5893 switch (OMP_CLAUSE_CODE (c
))
5895 case OMP_CLAUSE_SHARED
:
5896 /* Ignore shared directives in teams construct inside
5897 target construct. */
5898 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5899 && !is_host_teams_ctx (ctx
))
5901 /* Shared global vars are just accessed directly. */
5902 if (is_global_var (new_var
))
5904 /* For taskloop firstprivate/lastprivate, represented
5905 as firstprivate and shared clause on the task, new_var
5906 is the firstprivate var. */
5907 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5909 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5910 needs to be delayed until after fixup_child_record_type so
5911 that we get the correct type during the dereference. */
5912 by_ref
= use_pointer_for_field (var
, ctx
);
5913 x
= build_receiver_ref (var
, by_ref
, ctx
);
5914 SET_DECL_VALUE_EXPR (new_var
, x
);
5915 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5917 /* ??? If VAR is not passed by reference, and the variable
5918 hasn't been initialized yet, then we'll get a warning for
5919 the store into the omp_data_s structure. Ideally, we'd be
5920 able to notice this and not store anything at all, but
5921 we're generating code too early. Suppress the warning. */
5923 suppress_warning (var
, OPT_Wuninitialized
);
5926 case OMP_CLAUSE__CONDTEMP_
:
5927 if (is_parallel_ctx (ctx
))
5929 x
= build_receiver_ref (var
, false, ctx
);
5930 SET_DECL_VALUE_EXPR (new_var
, x
);
5931 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5933 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5935 x
= build_zero_cst (TREE_TYPE (var
));
5940 case OMP_CLAUSE_LASTPRIVATE
:
5941 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5945 case OMP_CLAUSE_PRIVATE
:
5946 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5947 x
= build_outer_var_ref (var
, ctx
);
5948 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5950 if (is_task_ctx (ctx
))
5951 x
= build_receiver_ref (var
, false, ctx
);
5953 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5961 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5962 ilist
, ctx
, false, NULL_TREE
);
5963 nx
= unshare_expr (new_var
);
5965 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5966 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5969 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5971 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5974 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5975 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5976 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5977 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5978 || (gimple_omp_for_index (ctx
->stmt
, 0)
5980 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5981 || omp_privatize_by_reference (var
))
5982 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5985 if (omp_privatize_by_reference (var
))
5987 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5988 tree new_vard
= TREE_OPERAND (new_var
, 0);
5989 gcc_assert (DECL_P (new_vard
));
5990 SET_DECL_VALUE_EXPR (new_vard
,
5991 build_fold_addr_expr (lvar
));
5992 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5997 tree iv
= unshare_expr (ivar
);
5999 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
6002 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
6006 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
6008 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
6009 unshare_expr (ivar
), x
);
6013 gimplify_and_add (x
, &llist
[0]);
6014 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6015 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6020 gcc_assert (TREE_CODE (v
) == MEM_REF
);
6021 v
= TREE_OPERAND (v
, 0);
6022 gcc_assert (DECL_P (v
));
6024 v
= *ctx
->lastprivate_conditional_map
->get (v
);
6025 tree t
= create_tmp_var (TREE_TYPE (v
));
6026 tree z
= build_zero_cst (TREE_TYPE (v
));
6028 = build_outer_var_ref (var
, ctx
,
6029 OMP_CLAUSE_LASTPRIVATE
);
6030 gimple_seq_add_stmt (dlist
,
6031 gimple_build_assign (t
, z
));
6032 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
6033 tree civar
= DECL_VALUE_EXPR (v
);
6034 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
6035 civar
= unshare_expr (civar
);
6036 TREE_OPERAND (civar
, 1) = sctx
.idx
;
6037 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
6038 unshare_expr (civar
));
6039 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
6040 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
6041 orig_v
, unshare_expr (ivar
)));
6042 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
6044 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
6046 gimple_seq tseq
= NULL
;
6047 gimplify_and_add (x
, &tseq
);
6049 lower_omp (&tseq
, ctx
->outer
);
6050 gimple_seq_add_seq (&llist
[1], tseq
);
6052 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6053 && ctx
->for_simd_scan_phase
)
6055 x
= unshare_expr (ivar
);
6057 = build_outer_var_ref (var
, ctx
,
6058 OMP_CLAUSE_LASTPRIVATE
);
6059 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6061 gimplify_and_add (x
, &llist
[0]);
6065 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6067 gimplify_and_add (y
, &llist
[1]);
6071 if (omp_privatize_by_reference (var
))
6073 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6074 tree new_vard
= TREE_OPERAND (new_var
, 0);
6075 gcc_assert (DECL_P (new_vard
));
6076 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6077 x
= TYPE_SIZE_UNIT (type
);
6078 if (TREE_CONSTANT (x
))
6080 x
= create_tmp_var_raw (type
, get_name (var
));
6081 gimple_add_tmp_var (x
);
6082 TREE_ADDRESSABLE (x
) = 1;
6083 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6084 x
= fold_convert_loc (clause_loc
,
6085 TREE_TYPE (new_vard
), x
);
6086 gimplify_assign (new_vard
, x
, ilist
);
6091 gimplify_and_add (nx
, ilist
);
6092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6094 && ctx
->for_simd_scan_phase
)
6096 tree orig_v
= build_outer_var_ref (var
, ctx
,
6097 OMP_CLAUSE_LASTPRIVATE
);
6098 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6100 gimplify_and_add (x
, ilist
);
6105 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6107 gimplify_and_add (x
, dlist
);
6110 if (!is_gimple_val (allocator
))
6112 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6113 gimplify_assign (avar
, allocator
, dlist
);
6116 if (!is_gimple_val (allocate_ptr
))
6118 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6119 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6120 allocate_ptr
= apvar
;
6122 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6124 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6125 gimple_seq_add_stmt (dlist
, g
);
6129 case OMP_CLAUSE_LINEAR
:
6130 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6131 goto do_firstprivate
;
6132 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6135 x
= build_outer_var_ref (var
, ctx
);
6138 case OMP_CLAUSE_FIRSTPRIVATE
:
6139 if (is_task_ctx (ctx
))
6141 if ((omp_privatize_by_reference (var
)
6142 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6143 || is_variable_sized (var
))
6145 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6147 || use_pointer_for_field (var
, NULL
))
6149 x
= build_receiver_ref (var
, false, ctx
);
6150 if (ctx
->allocate_map
)
6151 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6153 allocator
= *allocatep
;
6154 if (TREE_CODE (allocator
) == TREE_LIST
)
6155 allocator
= TREE_PURPOSE (allocator
);
6156 if (TREE_CODE (allocator
) != INTEGER_CST
)
6157 allocator
= build_outer_var_ref (allocator
, ctx
);
6158 allocator
= fold_convert (pointer_sized_int_node
,
6160 allocate_ptr
= unshare_expr (x
);
6161 x
= build_simple_mem_ref (x
);
6162 TREE_THIS_NOTRAP (x
) = 1;
6164 SET_DECL_VALUE_EXPR (new_var
, x
);
6165 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6169 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6170 && omp_privatize_by_reference (var
))
6172 x
= build_outer_var_ref (var
, ctx
);
6173 gcc_assert (TREE_CODE (x
) == MEM_REF
6174 && integer_zerop (TREE_OPERAND (x
, 1)));
6175 x
= TREE_OPERAND (x
, 0);
6176 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6177 (c
, unshare_expr (new_var
), x
);
6178 gimplify_and_add (x
, ilist
);
6182 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6183 ilist
, ctx
, false, NULL_TREE
);
6184 x
= build_outer_var_ref (var
, ctx
);
6187 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6188 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6190 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6191 tree stept
= TREE_TYPE (t
);
6192 tree ct
= omp_find_clause (clauses
,
6193 OMP_CLAUSE__LOOPTEMP_
);
6195 tree l
= OMP_CLAUSE_DECL (ct
);
6196 tree n1
= fd
->loop
.n1
;
6197 tree step
= fd
->loop
.step
;
6198 tree itype
= TREE_TYPE (l
);
6199 if (POINTER_TYPE_P (itype
))
6200 itype
= signed_type_for (itype
);
6201 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6202 if (TYPE_UNSIGNED (itype
)
6203 && fd
->loop
.cond_code
== GT_EXPR
)
6204 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6205 fold_build1 (NEGATE_EXPR
, itype
, l
),
6206 fold_build1 (NEGATE_EXPR
,
6209 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6210 t
= fold_build2 (MULT_EXPR
, stept
,
6211 fold_convert (stept
, l
), t
);
6213 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6215 if (omp_privatize_by_reference (var
))
6217 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6218 tree new_vard
= TREE_OPERAND (new_var
, 0);
6219 gcc_assert (DECL_P (new_vard
));
6220 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6221 nx
= TYPE_SIZE_UNIT (type
);
6222 if (TREE_CONSTANT (nx
))
6224 nx
= create_tmp_var_raw (type
,
6226 gimple_add_tmp_var (nx
);
6227 TREE_ADDRESSABLE (nx
) = 1;
6228 nx
= build_fold_addr_expr_loc (clause_loc
,
6230 nx
= fold_convert_loc (clause_loc
,
6231 TREE_TYPE (new_vard
),
6233 gimplify_assign (new_vard
, nx
, ilist
);
6237 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6239 gimplify_and_add (x
, ilist
);
6243 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6244 x
= fold_build2 (POINTER_PLUS_EXPR
,
6245 TREE_TYPE (x
), x
, t
);
6247 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
6250 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6251 || TREE_ADDRESSABLE (new_var
)
6252 || omp_privatize_by_reference (var
))
6253 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6256 if (omp_privatize_by_reference (var
))
6258 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6259 tree new_vard
= TREE_OPERAND (new_var
, 0);
6260 gcc_assert (DECL_P (new_vard
));
6261 SET_DECL_VALUE_EXPR (new_vard
,
6262 build_fold_addr_expr (lvar
));
6263 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6265 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6267 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6268 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6269 gimplify_and_add (x
, ilist
);
6270 gimple_stmt_iterator gsi
6271 = gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6273 = gimple_build_assign (unshare_expr (lvar
), iv
);
6274 gsi_insert_before_without_update (&gsi
, g
,
6276 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6277 enum tree_code code
= PLUS_EXPR
;
6278 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6279 code
= POINTER_PLUS_EXPR
;
6280 g
= gimple_build_assign (iv
, code
, iv
, t
);
6281 gsi_insert_before_without_update (&gsi
, g
,
6285 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6286 (c
, unshare_expr (ivar
), x
);
6287 gimplify_and_add (x
, &llist
[0]);
6288 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6290 gimplify_and_add (x
, &llist
[1]);
6293 if (omp_privatize_by_reference (var
))
6295 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6296 tree new_vard
= TREE_OPERAND (new_var
, 0);
6297 gcc_assert (DECL_P (new_vard
));
6298 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6299 nx
= TYPE_SIZE_UNIT (type
);
6300 if (TREE_CONSTANT (nx
))
6302 nx
= create_tmp_var_raw (type
, get_name (var
));
6303 gimple_add_tmp_var (nx
);
6304 TREE_ADDRESSABLE (nx
) = 1;
6305 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6306 nx
= fold_convert_loc (clause_loc
,
6307 TREE_TYPE (new_vard
), nx
);
6308 gimplify_assign (new_vard
, nx
, ilist
);
6312 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6313 (c
, unshare_expr (new_var
), x
);
6314 gimplify_and_add (x
, ilist
);
6317 case OMP_CLAUSE__LOOPTEMP_
:
6318 case OMP_CLAUSE__REDUCTEMP_
:
6319 gcc_assert (is_taskreg_ctx (ctx
));
6320 x
= build_outer_var_ref (var
, ctx
);
6321 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6322 gimplify_and_add (x
, ilist
);
6325 case OMP_CLAUSE_COPYIN
:
6326 by_ref
= use_pointer_for_field (var
, NULL
);
6327 x
= build_receiver_ref (var
, by_ref
, ctx
);
6328 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6329 append_to_statement_list (x
, ©in_seq
);
6330 copyin_by_ref
|= by_ref
;
6333 case OMP_CLAUSE_REDUCTION
:
6334 case OMP_CLAUSE_IN_REDUCTION
:
6335 /* OpenACC reductions are initialized using the
6336 GOACC_REDUCTION internal function. */
6337 if (is_gimple_omp_oacc (ctx
->stmt
))
6339 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6341 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6343 tree ptype
= TREE_TYPE (placeholder
);
6346 x
= error_mark_node
;
6347 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6348 && !task_reduction_needs_orig_p
)
6350 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6352 tree pptype
= build_pointer_type (ptype
);
6353 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6354 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6355 size_int (task_reduction_cnt_full
6356 + task_reduction_cntorig
- 1),
6357 NULL_TREE
, NULL_TREE
);
6361 = *ctx
->task_reduction_map
->get (c
);
6362 x
= task_reduction_read (ilist
, tskred_temp
,
6363 pptype
, 7 + 3 * idx
);
6365 x
= fold_convert (pptype
, x
);
6366 x
= build_simple_mem_ref (x
);
6371 lower_private_allocate (var
, new_var
, allocator
,
6372 allocate_ptr
, ilist
, ctx
, false,
6374 x
= build_outer_var_ref (var
, ctx
);
6376 if (omp_privatize_by_reference (var
)
6377 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6378 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6380 SET_DECL_VALUE_EXPR (placeholder
, x
);
6381 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6382 tree new_vard
= new_var
;
6383 if (omp_privatize_by_reference (var
))
6385 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6386 new_vard
= TREE_OPERAND (new_var
, 0);
6387 gcc_assert (DECL_P (new_vard
));
6389 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6391 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6392 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6395 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6399 if (new_vard
== new_var
)
6401 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6402 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6406 SET_DECL_VALUE_EXPR (new_vard
,
6407 build_fold_addr_expr (ivar
));
6408 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6410 x
= lang_hooks
.decls
.omp_clause_default_ctor
6411 (c
, unshare_expr (ivar
),
6412 build_outer_var_ref (var
, ctx
));
6413 if (rvarp
&& ctx
->for_simd_scan_phase
)
6416 gimplify_and_add (x
, &llist
[0]);
6417 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6419 gimplify_and_add (x
, &llist
[1]);
6426 gimplify_and_add (x
, &llist
[0]);
6428 tree ivar2
= unshare_expr (lvar
);
6429 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6430 x
= lang_hooks
.decls
.omp_clause_default_ctor
6431 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6432 gimplify_and_add (x
, &llist
[0]);
6436 x
= lang_hooks
.decls
.omp_clause_default_ctor
6437 (c
, unshare_expr (rvar2
),
6438 build_outer_var_ref (var
, ctx
));
6439 gimplify_and_add (x
, &llist
[0]);
6442 /* For types that need construction, add another
6443 private var which will be default constructed
6444 and optionally initialized with
6445 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6446 loop we want to assign this value instead of
6447 constructing and destructing it in each
6449 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6450 gimple_add_tmp_var (nv
);
6451 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6455 x
= lang_hooks
.decls
.omp_clause_default_ctor
6456 (c
, nv
, build_outer_var_ref (var
, ctx
));
6457 gimplify_and_add (x
, ilist
);
6459 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6461 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6462 x
= DECL_VALUE_EXPR (new_vard
);
6464 if (new_vard
!= new_var
)
6465 vexpr
= build_fold_addr_expr (nv
);
6466 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6467 lower_omp (&tseq
, ctx
);
6468 SET_DECL_VALUE_EXPR (new_vard
, x
);
6469 gimple_seq_add_seq (ilist
, tseq
);
6470 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6473 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6475 gimplify_and_add (x
, dlist
);
6478 tree ref
= build_outer_var_ref (var
, ctx
);
6479 x
= unshare_expr (ivar
);
6480 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6482 gimplify_and_add (x
, &llist
[0]);
6484 ref
= build_outer_var_ref (var
, ctx
);
6485 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6487 gimplify_and_add (x
, &llist
[3]);
6489 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6490 if (new_vard
== new_var
)
6491 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6493 SET_DECL_VALUE_EXPR (new_vard
,
6494 build_fold_addr_expr (lvar
));
6496 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6498 gimplify_and_add (x
, &llist
[1]);
6500 tree ivar2
= unshare_expr (lvar
);
6501 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6502 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6504 gimplify_and_add (x
, &llist
[1]);
6508 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6510 gimplify_and_add (x
, &llist
[1]);
6515 gimplify_and_add (x
, &llist
[0]);
6516 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6518 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6519 lower_omp (&tseq
, ctx
);
6520 gimple_seq_add_seq (&llist
[0], tseq
);
6522 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6523 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6524 lower_omp (&tseq
, ctx
);
6525 gimple_seq_add_seq (&llist
[1], tseq
);
6526 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6527 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6528 if (new_vard
== new_var
)
6529 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6531 SET_DECL_VALUE_EXPR (new_vard
,
6532 build_fold_addr_expr (lvar
));
6533 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6535 gimplify_and_add (x
, &llist
[1]);
6538 /* If this is a reference to constant size reduction var
6539 with placeholder, we haven't emitted the initializer
6540 for it because it is undesirable if SIMD arrays are used.
6541 But if they aren't used, we need to emit the deferred
6542 initialization now. */
6543 else if (omp_privatize_by_reference (var
) && is_simd
)
6544 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6546 tree lab2
= NULL_TREE
;
6550 if (!is_parallel_ctx (ctx
))
6552 tree condv
= create_tmp_var (boolean_type_node
);
6553 tree m
= build_simple_mem_ref (cond
);
6554 g
= gimple_build_assign (condv
, m
);
6555 gimple_seq_add_stmt (ilist
, g
);
6557 = create_artificial_label (UNKNOWN_LOCATION
);
6558 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6559 g
= gimple_build_cond (NE_EXPR
, condv
,
6562 gimple_seq_add_stmt (ilist
, g
);
6563 gimple_seq_add_stmt (ilist
,
6564 gimple_build_label (lab1
));
6566 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6568 gimple_seq_add_stmt (ilist
, g
);
6570 x
= lang_hooks
.decls
.omp_clause_default_ctor
6571 (c
, unshare_expr (new_var
),
6573 : build_outer_var_ref (var
, ctx
));
6575 gimplify_and_add (x
, ilist
);
6577 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6578 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6580 if (ctx
->for_simd_scan_phase
)
6583 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6585 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6586 gimple_add_tmp_var (nv
);
6587 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6588 x
= lang_hooks
.decls
.omp_clause_default_ctor
6589 (c
, nv
, build_outer_var_ref (var
, ctx
));
6591 gimplify_and_add (x
, ilist
);
6592 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6594 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6596 if (new_vard
!= new_var
)
6597 vexpr
= build_fold_addr_expr (nv
);
6598 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6599 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6600 lower_omp (&tseq
, ctx
);
6601 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6602 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6603 gimple_seq_add_seq (ilist
, tseq
);
6605 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6606 if (is_simd
&& ctx
->scan_exclusive
)
6609 = create_tmp_var_raw (TREE_TYPE (new_var
));
6610 gimple_add_tmp_var (nv2
);
6611 ctx
->cb
.decl_map
->put (nv
, nv2
);
6612 x
= lang_hooks
.decls
.omp_clause_default_ctor
6613 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6614 gimplify_and_add (x
, ilist
);
6615 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6617 gimplify_and_add (x
, dlist
);
6619 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6621 gimplify_and_add (x
, dlist
);
6624 && ctx
->scan_exclusive
6625 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6627 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6628 gimple_add_tmp_var (nv2
);
6629 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6630 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6632 gimplify_and_add (x
, dlist
);
6634 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6638 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6640 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6641 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6642 && is_omp_target (ctx
->stmt
))
6644 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6645 tree oldv
= NULL_TREE
;
6647 if (DECL_HAS_VALUE_EXPR_P (d
))
6648 oldv
= DECL_VALUE_EXPR (d
);
6649 SET_DECL_VALUE_EXPR (d
, new_vard
);
6650 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6651 lower_omp (&tseq
, ctx
);
6653 SET_DECL_VALUE_EXPR (d
, oldv
);
6656 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6657 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6661 lower_omp (&tseq
, ctx
);
6662 gimple_seq_add_seq (ilist
, tseq
);
6664 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6667 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6668 lower_omp (&tseq
, ctx
);
6669 gimple_seq_add_seq (dlist
, tseq
);
6670 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6672 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6676 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6683 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6684 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6685 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6690 tree lab2
= NULL_TREE
;
6691 /* GOMP_taskgroup_reduction_register memsets the whole
6692 array to zero. If the initializer is zero, we don't
6693 need to initialize it again, just mark it as ever
6694 used unconditionally, i.e. cond = true. */
6695 if (initializer_zerop (x
))
6697 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6699 gimple_seq_add_stmt (ilist
, g
);
6704 if (!cond) { cond = true; new_var = x; } */
6705 if (!is_parallel_ctx (ctx
))
6707 tree condv
= create_tmp_var (boolean_type_node
);
6708 tree m
= build_simple_mem_ref (cond
);
6709 g
= gimple_build_assign (condv
, m
);
6710 gimple_seq_add_stmt (ilist
, g
);
6712 = create_artificial_label (UNKNOWN_LOCATION
);
6713 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6714 g
= gimple_build_cond (NE_EXPR
, condv
,
6717 gimple_seq_add_stmt (ilist
, g
);
6718 gimple_seq_add_stmt (ilist
,
6719 gimple_build_label (lab1
));
6721 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6723 gimple_seq_add_stmt (ilist
, g
);
6724 gimplify_assign (new_var
, x
, ilist
);
6726 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6730 /* reduction(-:var) sums up the partial results, so it
6731 acts identically to reduction(+:var). */
6732 if (code
== MINUS_EXPR
)
6736 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6737 tree new_vard
= new_var
;
6738 if (is_simd
&& omp_privatize_by_reference (var
))
6740 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6741 new_vard
= TREE_OPERAND (new_var
, 0);
6742 gcc_assert (DECL_P (new_vard
));
6744 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6746 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6747 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6750 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6754 if (new_vard
!= new_var
)
6756 SET_DECL_VALUE_EXPR (new_vard
,
6757 build_fold_addr_expr (lvar
));
6758 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6761 tree ref
= build_outer_var_ref (var
, ctx
);
6765 if (ctx
->for_simd_scan_phase
)
6767 gimplify_assign (ivar
, ref
, &llist
[0]);
6768 ref
= build_outer_var_ref (var
, ctx
);
6769 gimplify_assign (ref
, rvar
, &llist
[3]);
6773 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6778 simt_lane
= create_tmp_var (unsigned_type_node
);
6779 x
= build_call_expr_internal_loc
6780 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6781 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6782 /* Make sure x is evaluated unconditionally. */
6783 tree bfly_var
= create_tmp_var (TREE_TYPE (ivar
));
6784 gimplify_assign (bfly_var
, x
, &llist
[2]);
6785 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, bfly_var
);
6786 gimplify_assign (ivar
, x
, &llist
[2]);
6792 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6793 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6794 boolean_type_node
, ivar
,
6796 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6797 boolean_type_node
, ref
,
6800 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6802 x
= fold_convert (TREE_TYPE (ref
), x
);
6803 ref
= build_outer_var_ref (var
, ctx
);
6804 gimplify_assign (ref
, x
, &llist
[1]);
6809 lower_private_allocate (var
, new_var
, allocator
,
6810 allocate_ptr
, ilist
, ctx
,
6812 if (omp_privatize_by_reference (var
) && is_simd
)
6813 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6814 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6815 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6817 gimplify_assign (new_var
, x
, ilist
);
6820 tree ref
= build_outer_var_ref (var
, ctx
);
6821 tree new_var2
= new_var
;
6825 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6827 = fold_build2_loc (clause_loc
, NE_EXPR
,
6828 boolean_type_node
, new_var
,
6830 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6831 boolean_type_node
, ref
,
6834 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6836 x
= fold_convert (TREE_TYPE (new_var
), x
);
6837 ref
= build_outer_var_ref (var
, ctx
);
6838 gimplify_assign (ref
, x
, dlist
);
6853 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6854 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6857 if (known_eq (sctx
.max_vf
, 1U))
6859 sctx
.is_simt
= false;
6860 if (ctx
->lastprivate_conditional_map
)
6862 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6864 /* Signal to lower_omp_1 that it should use parent context. */
6865 ctx
->combined_into_simd_safelen1
= true;
6866 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6867 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6868 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6870 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6871 omp_context
*outer
= ctx
->outer
;
6872 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6873 outer
= outer
->outer
;
6874 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6875 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6876 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6882 /* When not vectorized, treat lastprivate(conditional:) like
6883 normal lastprivate, as there will be just one simd lane
6884 writing the privatized variable. */
6885 delete ctx
->lastprivate_conditional_map
;
6886 ctx
->lastprivate_conditional_map
= NULL
;
6891 if (nonconst_simd_if
)
6893 if (sctx
.lane
== NULL_TREE
)
6895 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6896 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6898 /* FIXME: For now. */
6899 sctx
.is_simt
= false;
6902 if (sctx
.lane
|| sctx
.is_simt
)
6904 uid
= create_tmp_var (ptr_type_node
, "simduid");
6905 /* Don't want uninit warnings on simduid, it is always uninitialized,
6906 but we use it not for the value, but for the DECL_UID only. */
6907 suppress_warning (uid
, OPT_Wuninitialized
);
6908 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6909 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6910 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6911 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6913 /* Emit calls denoting privatized variables and initializing a pointer to
6914 structure that holds private variables as fields after ompdevlow pass. */
6917 sctx
.simt_eargs
[0] = uid
;
6919 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6920 gimple_call_set_lhs (g
, uid
);
6921 gimple_seq_add_stmt (ilist
, g
);
6922 sctx
.simt_eargs
.release ();
6924 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6925 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6926 gimple_call_set_lhs (g
, simtrec
);
6927 gimple_seq_add_stmt (ilist
, g
);
6931 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6932 2 + (nonconst_simd_if
!= NULL
),
6933 uid
, integer_zero_node
,
6935 gimple_call_set_lhs (g
, sctx
.lane
);
6936 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6937 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6938 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6939 build_int_cst (unsigned_type_node
, 0));
6940 gimple_seq_add_stmt (ilist
, g
);
6943 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6945 gimple_call_set_lhs (g
, sctx
.lastlane
);
6946 gimple_seq_add_stmt (dlist
, g
);
6947 gimple_seq_add_seq (dlist
, llist
[3]);
6949 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6952 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6953 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6954 gimple_call_set_lhs (g
, simt_vf
);
6955 gimple_seq_add_stmt (dlist
, g
);
6957 tree t
= build_int_cst (unsigned_type_node
, 1);
6958 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6959 gimple_seq_add_stmt (dlist
, g
);
6961 t
= build_int_cst (unsigned_type_node
, 0);
6962 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6963 gimple_seq_add_stmt (dlist
, g
);
6965 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6966 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6967 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6968 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6969 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6971 gimple_seq_add_seq (dlist
, llist
[2]);
6973 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6974 gimple_seq_add_stmt (dlist
, g
);
6976 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6977 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6978 gimple_seq_add_stmt (dlist
, g
);
6980 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6982 for (int i
= 0; i
< 2; i
++)
6985 tree vf
= create_tmp_var (unsigned_type_node
);
6986 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6987 gimple_call_set_lhs (g
, vf
);
6988 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6989 gimple_seq_add_stmt (seq
, g
);
6990 tree t
= build_int_cst (unsigned_type_node
, 0);
6991 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6992 gimple_seq_add_stmt (seq
, g
);
6993 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6994 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6995 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6996 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6997 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6998 gimple_seq_add_seq (seq
, llist
[i
]);
6999 t
= build_int_cst (unsigned_type_node
, 1);
7000 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
7001 gimple_seq_add_stmt (seq
, g
);
7002 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
7003 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
7004 gimple_seq_add_stmt (seq
, g
);
7005 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
7010 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
7012 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
7013 gimple_seq_add_stmt (dlist
, g
);
7016 /* The copyin sequence is not to be executed by the main thread, since
7017 that would result in self-copies. Perhaps not visible to scalars,
7018 but it certainly is to C++ operator=. */
7021 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
7023 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
7024 build_int_cst (TREE_TYPE (x
), 0));
7025 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
7026 gimplify_and_add (x
, ilist
);
7029 /* If any copyin variable is passed by reference, we must ensure the
7030 master thread doesn't modify it before it is copied over in all
7031 threads. Similarly for variables in both firstprivate and
7032 lastprivate clauses we need to ensure the lastprivate copying
7033 happens after firstprivate copying in all threads. And similarly
7034 for UDRs if initializer expression refers to omp_orig. */
7035 if (copyin_by_ref
|| lastprivate_firstprivate
7036 || (reduction_omp_orig_ref
7037 && !ctx
->scan_inclusive
7038 && !ctx
->scan_exclusive
))
7040 /* Don't add any barrier for #pragma omp simd or
7041 #pragma omp distribute. */
7042 if (!is_task_ctx (ctx
)
7043 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
7044 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
7045 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
7048 /* If max_vf is non-zero, then we can use only a vectorization factor
7049 up to the max_vf we chose. So stick it into the safelen clause. */
7050 if (maybe_ne (sctx
.max_vf
, 0U))
7052 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
7053 OMP_CLAUSE_SAFELEN
);
7054 poly_uint64 safe_len
;
7056 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
7057 && maybe_gt (safe_len
, sctx
.max_vf
)))
7059 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
7060 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
7062 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
7063 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
7068 /* Create temporary variables for lastprivate(conditional:) implementation
7069 in context CTX with CLAUSES. */
7072 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
7074 tree iter_type
= NULL_TREE
;
7075 tree cond_ptr
= NULL_TREE
;
7076 tree iter_var
= NULL_TREE
;
7077 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7078 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
7079 tree next
= *clauses
;
7080 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7081 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7082 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
7086 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7088 if (iter_type
== NULL_TREE
)
7090 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7091 iter_var
= create_tmp_var_raw (iter_type
);
7092 DECL_CONTEXT (iter_var
) = current_function_decl
;
7093 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7094 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7095 ctx
->block_vars
= iter_var
;
7097 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7098 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7099 OMP_CLAUSE_DECL (c3
) = iter_var
;
7100 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7102 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7104 next
= OMP_CLAUSE_CHAIN (cc
);
7105 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7106 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7107 ctx
->lastprivate_conditional_map
->put (o
, v
);
7110 if (iter_type
== NULL
)
7112 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7114 struct omp_for_data fd
;
7115 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7117 iter_type
= unsigned_type_for (fd
.iter_type
);
7119 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7120 iter_type
= unsigned_type_node
;
7121 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7125 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7126 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7130 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7131 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7132 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7133 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7134 ctx
->block_vars
= cond_ptr
;
7135 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7136 OMP_CLAUSE__CONDTEMP_
);
7137 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7138 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7141 iter_var
= create_tmp_var_raw (iter_type
);
7142 DECL_CONTEXT (iter_var
) = current_function_decl
;
7143 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7144 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7145 ctx
->block_vars
= iter_var
;
7147 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7148 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7149 OMP_CLAUSE_DECL (c3
) = iter_var
;
7150 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7151 OMP_CLAUSE_CHAIN (c2
) = c3
;
7152 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7154 tree v
= create_tmp_var_raw (iter_type
);
7155 DECL_CONTEXT (v
) = current_function_decl
;
7156 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7157 DECL_CHAIN (v
) = ctx
->block_vars
;
7158 ctx
->block_vars
= v
;
7159 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7160 ctx
->lastprivate_conditional_map
->put (o
, v
);
7165 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7166 both parallel and workshare constructs. PREDICATE may be NULL if it's
7167 always true. BODY_P is the sequence to insert early initialization
7168 if needed, STMT_LIST is where the non-conditional lastprivate handling
7169 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7173 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7174 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7177 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7178 bool par_clauses
= false;
7179 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7180 unsigned HOST_WIDE_INT conditional_off
= 0;
7181 gimple_seq post_stmt_list
= NULL
;
7183 /* Early exit if there are no lastprivate or linear clauses. */
7184 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7185 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7186 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7187 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7189 if (clauses
== NULL
)
7191 /* If this was a workshare clause, see if it had been combined
7192 with its parallel. In that case, look for the clauses on the
7193 parallel statement itself. */
7194 if (is_parallel_ctx (ctx
))
7198 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7201 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7202 OMP_CLAUSE_LASTPRIVATE
);
7203 if (clauses
== NULL
)
7208 bool maybe_simt
= false;
7209 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7210 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7212 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7213 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7215 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7221 tree label_true
, arm1
, arm2
;
7222 enum tree_code pred_code
= TREE_CODE (predicate
);
7224 label
= create_artificial_label (UNKNOWN_LOCATION
);
7225 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7226 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7228 arm1
= TREE_OPERAND (predicate
, 0);
7229 arm2
= TREE_OPERAND (predicate
, 1);
7230 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7231 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7236 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7237 arm2
= boolean_false_node
;
7238 pred_code
= NE_EXPR
;
7242 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7243 c
= fold_convert (integer_type_node
, c
);
7244 simtcond
= create_tmp_var (integer_type_node
);
7245 gimplify_assign (simtcond
, c
, stmt_list
);
7246 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7248 c
= create_tmp_var (integer_type_node
);
7249 gimple_call_set_lhs (g
, c
);
7250 gimple_seq_add_stmt (stmt_list
, g
);
7251 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7255 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7256 gimple_seq_add_stmt (stmt_list
, stmt
);
7257 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7260 tree cond_ptr
= NULL_TREE
;
7261 for (c
= clauses
; c
;)
7264 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7265 gimple_seq
*this_stmt_list
= stmt_list
;
7266 tree lab2
= NULL_TREE
;
7268 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7269 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7270 && ctx
->lastprivate_conditional_map
7271 && !ctx
->combined_into_simd_safelen1
)
7273 gcc_assert (body_p
);
7276 if (cond_ptr
== NULL_TREE
)
7278 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7279 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7281 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7282 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7283 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7284 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7285 this_stmt_list
= cstmt_list
;
7287 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7289 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7290 build_int_cst (TREE_TYPE (cond_ptr
),
7292 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7295 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7296 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7297 tree mem2
= copy_node (mem
);
7298 gimple_seq seq
= NULL
;
7299 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7300 gimple_seq_add_seq (this_stmt_list
, seq
);
7301 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7302 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7303 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7304 gimple_seq_add_stmt (this_stmt_list
, g
);
7305 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7306 gimplify_assign (mem2
, v
, this_stmt_list
);
7309 && ctx
->combined_into_simd_safelen1
7310 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7311 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7312 && ctx
->lastprivate_conditional_map
)
7313 this_stmt_list
= &post_stmt_list
;
7315 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7316 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7317 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7319 var
= OMP_CLAUSE_DECL (c
);
7320 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7321 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7322 && is_taskloop_ctx (ctx
))
7324 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7325 new_var
= lookup_decl (var
, ctx
->outer
);
7329 new_var
= lookup_decl (var
, ctx
);
7330 /* Avoid uninitialized warnings for lastprivate and
7331 for linear iterators. */
7333 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7334 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7335 suppress_warning (new_var
, OPT_Wuninitialized
);
7338 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7340 tree val
= DECL_VALUE_EXPR (new_var
);
7341 if (TREE_CODE (val
) == ARRAY_REF
7342 && VAR_P (TREE_OPERAND (val
, 0))
7343 && lookup_attribute ("omp simd array",
7344 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7347 if (lastlane
== NULL
)
7349 lastlane
= create_tmp_var (unsigned_type_node
);
7351 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7353 TREE_OPERAND (val
, 1));
7354 gimple_call_set_lhs (g
, lastlane
);
7355 gimple_seq_add_stmt (this_stmt_list
, g
);
7357 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7358 TREE_OPERAND (val
, 0), lastlane
,
7359 NULL_TREE
, NULL_TREE
);
7360 TREE_THIS_NOTRAP (new_var
) = 1;
7363 else if (maybe_simt
)
7365 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7366 ? DECL_VALUE_EXPR (new_var
)
7368 if (simtlast
== NULL
)
7370 simtlast
= create_tmp_var (unsigned_type_node
);
7371 gcall
*g
= gimple_build_call_internal
7372 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7373 gimple_call_set_lhs (g
, simtlast
);
7374 gimple_seq_add_stmt (this_stmt_list
, g
);
7376 x
= build_call_expr_internal_loc
7377 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7378 TREE_TYPE (val
), 2, val
, simtlast
);
7379 new_var
= unshare_expr (new_var
);
7380 gimplify_assign (new_var
, x
, this_stmt_list
);
7381 new_var
= unshare_expr (new_var
);
7384 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7385 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7387 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7388 gimple_seq_add_seq (this_stmt_list
,
7389 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7390 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7392 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7393 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7395 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7396 gimple_seq_add_seq (this_stmt_list
,
7397 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7398 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7402 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7403 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7404 && is_taskloop_ctx (ctx
))
7406 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7408 if (is_global_var (ovar
))
7412 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7413 if (omp_privatize_by_reference (var
))
7414 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7415 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7416 gimplify_and_add (x
, this_stmt_list
);
7419 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7423 c
= OMP_CLAUSE_CHAIN (c
);
7424 if (c
== NULL
&& !par_clauses
)
7426 /* If this was a workshare clause, see if it had been combined
7427 with its parallel. In that case, continue looking for the
7428 clauses also on the parallel statement itself. */
7429 if (is_parallel_ctx (ctx
))
7433 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7436 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7437 OMP_CLAUSE_LASTPRIVATE
);
7443 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7444 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7447 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7448 (which might be a placeholder). INNER is true if this is an inner
7449 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7450 join markers. Generate the before-loop forking sequence in
7451 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7452 general form of these sequences is
7454 GOACC_REDUCTION_SETUP
7456 GOACC_REDUCTION_INIT
7458 GOACC_REDUCTION_FINI
7460 GOACC_REDUCTION_TEARDOWN. */
7463 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7464 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7465 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7468 gimple_seq before_fork
= NULL
;
7469 gimple_seq after_fork
= NULL
;
7470 gimple_seq before_join
= NULL
;
7471 gimple_seq after_join
= NULL
;
7472 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7473 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7474 unsigned offset
= 0;
7476 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7477 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7479 /* No 'reduction' clauses on OpenACC 'kernels'. */
7480 gcc_checking_assert (!is_oacc_kernels (ctx
));
7481 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7482 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7484 tree orig
= OMP_CLAUSE_DECL (c
);
7485 tree var
= maybe_lookup_decl (orig
, ctx
);
7486 tree ref_to_res
= NULL_TREE
;
7487 tree incoming
, outgoing
, v1
, v2
, v3
;
7488 bool is_private
= false;
7490 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7491 if (rcode
== MINUS_EXPR
)
7493 else if (rcode
== TRUTH_ANDIF_EXPR
)
7494 rcode
= BIT_AND_EXPR
;
7495 else if (rcode
== TRUTH_ORIF_EXPR
)
7496 rcode
= BIT_IOR_EXPR
;
7497 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7502 incoming
= outgoing
= var
;
7506 /* See if an outer construct also reduces this variable. */
7507 omp_context
*outer
= ctx
;
7509 while (omp_context
*probe
= outer
->outer
)
7511 enum gimple_code type
= gimple_code (probe
->stmt
);
7516 case GIMPLE_OMP_FOR
:
7517 cls
= gimple_omp_for_clauses (probe
->stmt
);
7520 case GIMPLE_OMP_TARGET
:
7521 /* No 'reduction' clauses inside OpenACC 'kernels'
7523 gcc_checking_assert (!is_oacc_kernels (probe
));
7525 if (!is_gimple_omp_offloaded (probe
->stmt
))
7528 cls
= gimple_omp_target_clauses (probe
->stmt
);
7536 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7537 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7538 && orig
== OMP_CLAUSE_DECL (cls
))
7540 incoming
= outgoing
= lookup_decl (orig
, probe
);
7541 goto has_outer_reduction
;
7543 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7544 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7545 && orig
== OMP_CLAUSE_DECL (cls
))
7553 /* This is the outermost construct with this reduction,
7554 see if there's a mapping for it. */
7555 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7556 && maybe_lookup_field (orig
, outer
) && !is_private
)
7558 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7559 if (omp_privatize_by_reference (orig
))
7560 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7562 tree type
= TREE_TYPE (var
);
7563 if (POINTER_TYPE_P (type
))
7564 type
= TREE_TYPE (type
);
7567 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7571 /* Try to look at enclosing contexts for reduction var,
7572 use original if no mapping found. */
7574 omp_context
*c
= ctx
->outer
;
7577 t
= maybe_lookup_decl (orig
, c
);
7580 incoming
= outgoing
= (t
? t
: orig
);
7583 has_outer_reduction
:;
7587 ref_to_res
= integer_zero_node
;
7589 if (omp_privatize_by_reference (orig
))
7591 tree type
= TREE_TYPE (var
);
7592 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7596 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7597 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7600 v1
= create_tmp_var (type
, id
);
7601 v2
= create_tmp_var (type
, id
);
7602 v3
= create_tmp_var (type
, id
);
7604 gimplify_assign (v1
, var
, fork_seq
);
7605 gimplify_assign (v2
, var
, fork_seq
);
7606 gimplify_assign (v3
, var
, fork_seq
);
7608 var
= build_simple_mem_ref (var
);
7609 v1
= build_simple_mem_ref (v1
);
7610 v2
= build_simple_mem_ref (v2
);
7611 v3
= build_simple_mem_ref (v3
);
7612 outgoing
= build_simple_mem_ref (outgoing
);
7614 if (!TREE_CONSTANT (incoming
))
7615 incoming
= build_simple_mem_ref (incoming
);
7620 /* Determine position in reduction buffer, which may be used
7621 by target. The parser has ensured that this is not a
7622 variable-sized type. */
7623 fixed_size_mode mode
7624 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7625 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7626 offset
= (offset
+ align
- 1) & ~(align
- 1);
7627 tree off
= build_int_cst (sizetype
, offset
);
7628 offset
+= GET_MODE_SIZE (mode
);
7632 init_code
= build_int_cst (integer_type_node
,
7633 IFN_GOACC_REDUCTION_INIT
);
7634 fini_code
= build_int_cst (integer_type_node
,
7635 IFN_GOACC_REDUCTION_FINI
);
7636 setup_code
= build_int_cst (integer_type_node
,
7637 IFN_GOACC_REDUCTION_SETUP
);
7638 teardown_code
= build_int_cst (integer_type_node
,
7639 IFN_GOACC_REDUCTION_TEARDOWN
);
7643 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7644 TREE_TYPE (var
), 6, setup_code
,
7645 unshare_expr (ref_to_res
),
7646 incoming
, level
, op
, off
);
7648 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7649 TREE_TYPE (var
), 6, init_code
,
7650 unshare_expr (ref_to_res
),
7651 v1
, level
, op
, off
);
7653 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7654 TREE_TYPE (var
), 6, fini_code
,
7655 unshare_expr (ref_to_res
),
7656 v2
, level
, op
, off
);
7658 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7659 TREE_TYPE (var
), 6, teardown_code
,
7660 ref_to_res
, v3
, level
, op
, off
);
7662 gimplify_assign (v1
, setup_call
, &before_fork
);
7663 gimplify_assign (v2
, init_call
, &after_fork
);
7664 gimplify_assign (v3
, fini_call
, &before_join
);
7665 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7668 /* Now stitch things together. */
7669 gimple_seq_add_seq (fork_seq
, before_fork
);
7671 gimple_seq_add_stmt (fork_seq
, private_marker
);
7673 gimple_seq_add_stmt (fork_seq
, fork
);
7674 gimple_seq_add_seq (fork_seq
, after_fork
);
7676 gimple_seq_add_seq (join_seq
, before_join
);
7678 gimple_seq_add_stmt (join_seq
, join
);
7679 gimple_seq_add_seq (join_seq
, after_join
);
7682 /* Generate code to implement the REDUCTION clauses, append it
7683 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7684 that should be emitted also inside of the critical section,
7685 in that case clear *CLIST afterwards, otherwise leave it as is
7686 and let the caller emit it itself. */
7689 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7690 gimple_seq
*clist
, omp_context
*ctx
)
7692 gimple_seq sub_seq
= NULL
;
7697 /* OpenACC loop reductions are handled elsewhere. */
7698 if (is_gimple_omp_oacc (ctx
->stmt
))
7701 /* SIMD reductions are handled in lower_rec_input_clauses. */
7702 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7703 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7706 /* inscan reductions are handled elsewhere. */
7707 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7710 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7711 update in that case, otherwise use a lock. */
7712 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7713 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7714 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7716 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7717 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7719 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7729 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7731 tree var
, ref
, new_var
, orig_var
;
7732 enum tree_code code
;
7733 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7735 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7736 || OMP_CLAUSE_REDUCTION_TASK (c
))
7739 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7740 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7741 if (TREE_CODE (var
) == MEM_REF
)
7743 var
= TREE_OPERAND (var
, 0);
7744 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7745 var
= TREE_OPERAND (var
, 0);
7746 if (TREE_CODE (var
) == ADDR_EXPR
)
7747 var
= TREE_OPERAND (var
, 0);
7750 /* If this is a pointer or referenced based array
7751 section, the var could be private in the outer
7752 context e.g. on orphaned loop construct. Pretend this
7753 is private variable's outer reference. */
7754 ccode
= OMP_CLAUSE_PRIVATE
;
7755 if (TREE_CODE (var
) == INDIRECT_REF
)
7756 var
= TREE_OPERAND (var
, 0);
7759 if (is_variable_sized (var
))
7761 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7762 var
= DECL_VALUE_EXPR (var
);
7763 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7764 var
= TREE_OPERAND (var
, 0);
7765 gcc_assert (DECL_P (var
));
7768 new_var
= lookup_decl (var
, ctx
);
7769 if (var
== OMP_CLAUSE_DECL (c
)
7770 && omp_privatize_by_reference (var
))
7771 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7772 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7773 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7775 /* reduction(-:var) sums up the partial results, so it acts
7776 identically to reduction(+:var). */
7777 if (code
== MINUS_EXPR
)
7780 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7783 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7785 addr
= save_expr (addr
);
7786 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7787 tree new_var2
= new_var
;
7791 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7792 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7793 boolean_type_node
, new_var
, zero
);
7794 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7797 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7800 x
= fold_convert (TREE_TYPE (new_var
), x
);
7801 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7802 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7803 gimplify_and_add (x
, stmt_seqp
);
7806 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7808 tree d
= OMP_CLAUSE_DECL (c
);
7809 tree type
= TREE_TYPE (d
);
7810 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7811 tree i
= create_tmp_var (TREE_TYPE (v
));
7812 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7813 tree bias
= TREE_OPERAND (d
, 1);
7814 d
= TREE_OPERAND (d
, 0);
7815 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7817 tree b
= TREE_OPERAND (d
, 1);
7818 b
= maybe_lookup_decl (b
, ctx
);
7821 b
= TREE_OPERAND (d
, 1);
7822 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7824 if (integer_zerop (bias
))
7828 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7829 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7830 TREE_TYPE (b
), b
, bias
);
7832 d
= TREE_OPERAND (d
, 0);
7834 /* For ref build_outer_var_ref already performs this, so
7835 only new_var needs a dereference. */
7836 if (TREE_CODE (d
) == INDIRECT_REF
)
7838 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7839 gcc_assert (omp_privatize_by_reference (var
)
7840 && var
== orig_var
);
7842 else if (TREE_CODE (d
) == ADDR_EXPR
)
7844 if (orig_var
== var
)
7846 new_var
= build_fold_addr_expr (new_var
);
7847 ref
= build_fold_addr_expr (ref
);
7852 gcc_assert (orig_var
== var
);
7853 if (omp_privatize_by_reference (var
))
7854 ref
= build_fold_addr_expr (ref
);
7858 tree t
= maybe_lookup_decl (v
, ctx
);
7862 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7863 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7865 if (!integer_zerop (bias
))
7867 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7868 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7869 TREE_TYPE (new_var
), new_var
,
7870 unshare_expr (bias
));
7871 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7872 TREE_TYPE (ref
), ref
, bias
);
7874 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7875 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7876 tree m
= create_tmp_var (ptype
);
7877 gimplify_assign (m
, new_var
, stmt_seqp
);
7879 m
= create_tmp_var (ptype
);
7880 gimplify_assign (m
, ref
, stmt_seqp
);
7882 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7883 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7884 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7885 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7886 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7887 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7888 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7890 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7891 tree decl_placeholder
7892 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7893 SET_DECL_VALUE_EXPR (placeholder
, out
);
7894 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7895 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7896 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7897 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7898 gimple_seq_add_seq (&sub_seq
,
7899 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7900 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7901 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7902 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7910 tree zero
= build_zero_cst (TREE_TYPE (out
));
7911 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7912 boolean_type_node
, out
, zero
);
7913 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7914 boolean_type_node
, priv
, zero
);
7916 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7918 x
= fold_convert (TREE_TYPE (out
), x
);
7919 out
= unshare_expr (out
);
7920 gimplify_assign (out
, x
, &sub_seq
);
7922 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7923 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7924 gimple_seq_add_stmt (&sub_seq
, g
);
7925 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7926 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7927 gimple_seq_add_stmt (&sub_seq
, g
);
7928 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7929 build_int_cst (TREE_TYPE (i
), 1));
7930 gimple_seq_add_stmt (&sub_seq
, g
);
7931 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7932 gimple_seq_add_stmt (&sub_seq
, g
);
7933 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7935 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7937 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7939 if (omp_privatize_by_reference (var
)
7940 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7942 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7943 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7944 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7945 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7946 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7947 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7948 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7952 tree new_var2
= new_var
;
7956 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7957 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7958 boolean_type_node
, new_var
, zero
);
7959 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7962 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7964 x
= fold_convert (TREE_TYPE (new_var
), x
);
7965 ref
= build_outer_var_ref (var
, ctx
);
7966 gimplify_assign (ref
, x
, &sub_seq
);
7970 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7972 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7974 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7978 gimple_seq_add_seq (stmt_seqp
, *clist
);
7982 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7984 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7988 /* Generate code to implement the COPYPRIVATE clauses. */
7991 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7996 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7998 tree var
, new_var
, ref
, x
;
8000 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8002 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
8005 var
= OMP_CLAUSE_DECL (c
);
8006 by_ref
= use_pointer_for_field (var
, NULL
);
8008 ref
= build_sender_ref (var
, ctx
);
8009 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
8012 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
8013 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
8015 gimplify_assign (ref
, x
, slist
);
8017 ref
= build_receiver_ref (var
, false, ctx
);
8020 ref
= fold_convert_loc (clause_loc
,
8021 build_pointer_type (TREE_TYPE (new_var
)),
8023 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
8025 if (omp_privatize_by_reference (var
))
8027 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
8028 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
8029 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8031 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
8032 gimplify_and_add (x
, rlist
);
8037 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8038 and REDUCTION from the sender (aka parent) side. */
8041 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
8045 int ignored_looptemp
= 0;
8046 bool is_taskloop
= false;
8048 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8049 by GOMP_taskloop. */
8050 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
8052 ignored_looptemp
= 2;
8056 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8058 tree val
, ref
, x
, var
;
8059 bool by_ref
, do_in
= false, do_out
= false;
8060 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8062 switch (OMP_CLAUSE_CODE (c
))
8064 case OMP_CLAUSE_PRIVATE
:
8065 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8068 case OMP_CLAUSE_FIRSTPRIVATE
:
8069 case OMP_CLAUSE_COPYIN
:
8070 case OMP_CLAUSE_LASTPRIVATE
:
8071 case OMP_CLAUSE_IN_REDUCTION
:
8072 case OMP_CLAUSE__REDUCTEMP_
:
8074 case OMP_CLAUSE_REDUCTION
:
8075 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
8078 case OMP_CLAUSE_SHARED
:
8079 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8082 case OMP_CLAUSE__LOOPTEMP_
:
8083 if (ignored_looptemp
)
8093 val
= OMP_CLAUSE_DECL (c
);
8094 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8095 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8096 && TREE_CODE (val
) == MEM_REF
)
8098 val
= TREE_OPERAND (val
, 0);
8099 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8100 val
= TREE_OPERAND (val
, 0);
8101 if (TREE_CODE (val
) == INDIRECT_REF
8102 || TREE_CODE (val
) == ADDR_EXPR
)
8103 val
= TREE_OPERAND (val
, 0);
8104 if (is_variable_sized (val
))
8108 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8109 outer taskloop region. */
8110 omp_context
*ctx_for_o
= ctx
;
8112 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8113 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8114 ctx_for_o
= ctx
->outer
;
8116 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8118 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8119 && is_global_var (var
)
8120 && (val
== OMP_CLAUSE_DECL (c
)
8121 || !is_task_ctx (ctx
)
8122 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8123 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8124 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8125 != POINTER_TYPE
)))))
8128 t
= omp_member_access_dummy_var (var
);
8131 var
= DECL_VALUE_EXPR (var
);
8132 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8134 var
= unshare_and_remap (var
, t
, o
);
8136 var
= unshare_expr (var
);
8139 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8141 /* Handle taskloop firstprivate/lastprivate, where the
8142 lastprivate on GIMPLE_OMP_TASK is represented as
8143 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8144 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8145 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8146 if (use_pointer_for_field (val
, ctx
))
8147 var
= build_fold_addr_expr (var
);
8148 gimplify_assign (x
, var
, ilist
);
8149 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8153 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8154 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8155 || val
== OMP_CLAUSE_DECL (c
))
8156 && is_variable_sized (val
))
8158 by_ref
= use_pointer_for_field (val
, NULL
);
8160 switch (OMP_CLAUSE_CODE (c
))
8162 case OMP_CLAUSE_FIRSTPRIVATE
:
8163 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8165 && is_task_ctx (ctx
))
8166 suppress_warning (var
);
8170 case OMP_CLAUSE_PRIVATE
:
8171 case OMP_CLAUSE_COPYIN
:
8172 case OMP_CLAUSE__LOOPTEMP_
:
8173 case OMP_CLAUSE__REDUCTEMP_
:
8177 case OMP_CLAUSE_LASTPRIVATE
:
8178 if (by_ref
|| omp_privatize_by_reference (val
))
8180 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8187 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8192 case OMP_CLAUSE_REDUCTION
:
8193 case OMP_CLAUSE_IN_REDUCTION
:
8195 if (val
== OMP_CLAUSE_DECL (c
))
8197 if (is_task_ctx (ctx
))
8198 by_ref
= use_pointer_for_field (val
, ctx
);
8200 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8203 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8212 ref
= build_sender_ref (val
, ctx
);
8213 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8214 gimplify_assign (ref
, x
, ilist
);
8215 if (is_task_ctx (ctx
))
8216 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8221 ref
= build_sender_ref (val
, ctx
);
8222 gimplify_assign (var
, ref
, olist
);
8227 /* Generate code to implement SHARED from the sender (aka parent)
8228 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8229 list things that got automatically shared. */
8232 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8234 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8236 if (ctx
->record_type
== NULL
)
8239 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8240 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8242 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8243 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8246 nvar
= maybe_lookup_decl (ovar
, ctx
);
8248 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8249 || (ctx
->allocate_map
8250 && ctx
->allocate_map
->get (ovar
)))
8253 /* If CTX is a nested parallel directive. Find the immediately
8254 enclosing parallel or workshare construct that contains a
8255 mapping for OVAR. */
8256 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8258 t
= omp_member_access_dummy_var (var
);
8261 var
= DECL_VALUE_EXPR (var
);
8262 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8264 var
= unshare_and_remap (var
, t
, o
);
8266 var
= unshare_expr (var
);
8269 if (use_pointer_for_field (ovar
, ctx
))
8271 x
= build_sender_ref (ovar
, ctx
);
8272 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8273 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8275 gcc_assert (is_parallel_ctx (ctx
)
8276 && DECL_ARTIFICIAL (ovar
));
8277 /* _condtemp_ clause. */
8278 var
= build_constructor (TREE_TYPE (x
), NULL
);
8281 var
= build_fold_addr_expr (var
);
8282 gimplify_assign (x
, var
, ilist
);
8286 x
= build_sender_ref (ovar
, ctx
);
8287 gimplify_assign (x
, var
, ilist
);
8289 if (!TREE_READONLY (var
)
8290 /* We don't need to receive a new reference to a result
8291 or parm decl. In fact we may not store to it as we will
8292 invalidate any pending RSO and generate wrong gimple
8294 && !((TREE_CODE (var
) == RESULT_DECL
8295 || TREE_CODE (var
) == PARM_DECL
)
8296 && DECL_BY_REFERENCE (var
)))
8298 x
= build_sender_ref (ovar
, ctx
);
8299 gimplify_assign (var
, x
, olist
);
8305 /* Emit an OpenACC head marker call, encapulating the partitioning and
8306 other information that must be processed by the target compiler.
8307 Return the maximum number of dimensions the associated loop might
8308 be partitioned over. */
8311 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8312 gimple_seq
*seq
, omp_context
*ctx
)
8314 unsigned levels
= 0;
8316 tree gang_static
= NULL_TREE
;
8317 auto_vec
<tree
, 5> args
;
8319 args
.quick_push (build_int_cst
8320 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8321 args
.quick_push (ddvar
);
8322 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8324 switch (OMP_CLAUSE_CODE (c
))
8326 case OMP_CLAUSE_GANG
:
8327 tag
|= OLF_DIM_GANG
;
8328 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8329 /* static:* is represented by -1, and we can ignore it, as
8330 scheduling is always static. */
8331 if (gang_static
&& integer_minus_onep (gang_static
))
8332 gang_static
= NULL_TREE
;
8336 case OMP_CLAUSE_WORKER
:
8337 tag
|= OLF_DIM_WORKER
;
8341 case OMP_CLAUSE_VECTOR
:
8342 tag
|= OLF_DIM_VECTOR
;
8346 case OMP_CLAUSE_SEQ
:
8350 case OMP_CLAUSE_AUTO
:
8354 case OMP_CLAUSE_INDEPENDENT
:
8355 tag
|= OLF_INDEPENDENT
;
8358 case OMP_CLAUSE_TILE
:
8362 case OMP_CLAUSE_REDUCTION
:
8363 tag
|= OLF_REDUCTION
;
8373 if (DECL_P (gang_static
))
8374 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8375 tag
|= OLF_GANG_STATIC
;
8378 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8379 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8381 else if (is_oacc_kernels (tgt
))
8382 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8384 else if (is_oacc_kernels_decomposed_part (tgt
))
8389 /* In a parallel region, loops are implicitly INDEPENDENT. */
8390 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8391 tag
|= OLF_INDEPENDENT
;
8393 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8394 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8395 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8397 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8398 gcc_assert (!(tag
& OLF_AUTO
));
8402 /* Tiling could use all 3 levels. */
8406 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8407 Ensure at least one level, or 2 for possible auto
8409 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8410 << OLF_DIM_BASE
) | OLF_SEQ
));
8412 if (levels
< 1u + maybe_auto
)
8413 levels
= 1u + maybe_auto
;
8416 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8417 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8419 args
.quick_push (gang_static
);
8421 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8422 gimple_set_location (call
, loc
);
8423 gimple_set_lhs (call
, ddvar
);
8424 gimple_seq_add_stmt (seq
, call
);
8429 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8430 partitioning level of the enclosed region. */
8433 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8434 tree tofollow
, gimple_seq
*seq
)
8436 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8437 : IFN_UNIQUE_OACC_TAIL_MARK
);
8438 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8439 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8440 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8441 marker
, ddvar
, tofollow
);
8442 gimple_set_location (call
, loc
);
8443 gimple_set_lhs (call
, ddvar
);
8444 gimple_seq_add_stmt (seq
, call
);
8447 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8448 the loop clauses, from which we extract reductions. Initialize
8452 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8453 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8456 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8457 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8459 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8463 gimple_set_location (private_marker
, loc
);
8464 gimple_call_set_lhs (private_marker
, ddvar
);
8465 gimple_call_set_arg (private_marker
, 1, ddvar
);
8468 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8469 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8472 for (unsigned done
= 1; count
; count
--, done
++)
8474 gimple_seq fork_seq
= NULL
;
8475 gimple_seq join_seq
= NULL
;
8477 tree place
= build_int_cst (integer_type_node
, -1);
8478 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8479 fork_kind
, ddvar
, place
);
8480 gimple_set_location (fork
, loc
);
8481 gimple_set_lhs (fork
, ddvar
);
8483 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8484 join_kind
, ddvar
, place
);
8485 gimple_set_location (join
, loc
);
8486 gimple_set_lhs (join
, ddvar
);
8488 /* Mark the beginning of this level sequence. */
8490 lower_oacc_loop_marker (loc
, ddvar
, true,
8491 build_int_cst (integer_type_node
, count
),
8493 lower_oacc_loop_marker (loc
, ddvar
, false,
8494 build_int_cst (integer_type_node
, done
),
8497 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8498 fork
, (count
== 1) ? private_marker
: NULL
,
8499 join
, &fork_seq
, &join_seq
, ctx
);
8501 /* Append this level to head. */
8502 gimple_seq_add_seq (head
, fork_seq
);
8503 /* Prepend it to tail. */
8504 gimple_seq_add_seq (&join_seq
, *tail
);
8510 /* Mark the end of the sequence. */
8511 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8512 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8515 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8516 catch handler and return it. This prevents programs from violating the
8517 structured block semantics with throws. */
8520 maybe_catch_exception (gimple_seq body
)
8525 if (!flag_exceptions
)
8528 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8529 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8531 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8533 g
= gimple_build_eh_must_not_throw (decl
);
8534 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8537 return gimple_seq_alloc_with_stmt (g
);
8541 /* Routines to lower OMP directives into OMP-GIMPLE. */
8543 /* If ctx is a worksharing context inside of a cancellable parallel
8544 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8545 and conditional branch to parallel's cancel_label to handle
8546 cancellation in the implicit barrier. */
8549 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8552 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8553 if (gimple_omp_return_nowait_p (omp_return
))
8555 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8556 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8557 && outer
->cancellable
)
8559 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8560 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8561 tree lhs
= create_tmp_var (c_bool_type
);
8562 gimple_omp_return_set_lhs (omp_return
, lhs
);
8563 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8564 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8565 fold_convert (c_bool_type
,
8566 boolean_false_node
),
8567 outer
->cancel_label
, fallthru_label
);
8568 gimple_seq_add_stmt (body
, g
);
8569 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8571 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8572 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8576 /* Find the first task_reduction or reduction clause or return NULL
8577 if there are none. */
8580 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8581 enum omp_clause_code ccode
)
8585 clauses
= omp_find_clause (clauses
, ccode
);
8586 if (clauses
== NULL_TREE
)
8588 if (ccode
!= OMP_CLAUSE_REDUCTION
8589 || code
== OMP_TASKLOOP
8590 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8592 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8596 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8597 gimple_seq
*, gimple_seq
*);
8599 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8600 CTX is the enclosing OMP context for the current statement. */
8603 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8605 tree block
, control
;
8606 gimple_stmt_iterator tgsi
;
8607 gomp_sections
*stmt
;
8609 gbind
*new_stmt
, *bind
;
8610 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8612 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8614 push_gimplify_context ();
8620 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8621 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8622 tree rtmp
= NULL_TREE
;
8625 tree type
= build_pointer_type (pointer_sized_int_node
);
8626 tree temp
= create_tmp_var (type
);
8627 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8628 OMP_CLAUSE_DECL (c
) = temp
;
8629 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8630 gimple_omp_sections_set_clauses (stmt
, c
);
8631 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8632 gimple_omp_sections_clauses (stmt
),
8633 &ilist
, &tred_dlist
);
8635 rtmp
= make_ssa_name (type
);
8636 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8639 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8640 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8642 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8643 &ilist
, &dlist
, ctx
, NULL
);
8645 control
= create_tmp_var (unsigned_type_node
, ".section");
8646 gimple_omp_sections_set_control (stmt
, control
);
8648 new_body
= gimple_omp_body (stmt
);
8649 gimple_omp_set_body (stmt
, NULL
);
8650 tgsi
= gsi_start (new_body
);
8651 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8656 sec_start
= gsi_stmt (tgsi
);
8657 sctx
= maybe_lookup_ctx (sec_start
);
8660 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8661 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8662 GSI_CONTINUE_LINKING
);
8663 gimple_omp_set_body (sec_start
, NULL
);
8665 if (gsi_one_before_end_p (tgsi
))
8667 gimple_seq l
= NULL
;
8668 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8669 &ilist
, &l
, &clist
, ctx
);
8670 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8671 gimple_omp_section_set_last (sec_start
);
8674 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8675 GSI_CONTINUE_LINKING
);
8678 block
= make_node (BLOCK
);
8679 bind
= gimple_build_bind (NULL
, new_body
, block
);
8682 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8686 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8687 gcall
*g
= gimple_build_call (fndecl
, 0);
8688 gimple_seq_add_stmt (&olist
, g
);
8689 gimple_seq_add_seq (&olist
, clist
);
8690 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8691 g
= gimple_build_call (fndecl
, 0);
8692 gimple_seq_add_stmt (&olist
, g
);
8695 block
= make_node (BLOCK
);
8696 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8697 gsi_replace (gsi_p
, new_stmt
, true);
8699 pop_gimplify_context (new_stmt
);
8700 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8701 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8702 if (BLOCK_VARS (block
))
8703 TREE_USED (block
) = 1;
8706 gimple_seq_add_seq (&new_body
, ilist
);
8707 gimple_seq_add_stmt (&new_body
, stmt
);
8708 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8709 gimple_seq_add_stmt (&new_body
, bind
);
8711 t
= gimple_build_omp_continue (control
, control
);
8712 gimple_seq_add_stmt (&new_body
, t
);
8714 gimple_seq_add_seq (&new_body
, olist
);
8715 if (ctx
->cancellable
)
8716 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8717 gimple_seq_add_seq (&new_body
, dlist
);
8719 new_body
= maybe_catch_exception (new_body
);
8721 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8722 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8723 t
= gimple_build_omp_return (nowait
);
8724 gimple_seq_add_stmt (&new_body
, t
);
8725 gimple_seq_add_seq (&new_body
, tred_dlist
);
8726 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8729 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8731 gimple_bind_set_body (new_stmt
, new_body
);
8735 /* A subroutine of lower_omp_single. Expand the simple form of
8736 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8738 if (GOMP_single_start ())
8740 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8742 FIXME. It may be better to delay expanding the logic of this until
8743 pass_expand_omp. The expanded logic may make the job more difficult
8744 to a synchronization analysis pass. */
8747 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8749 location_t loc
= gimple_location (single_stmt
);
8750 tree tlabel
= create_artificial_label (loc
);
8751 tree flabel
= create_artificial_label (loc
);
8752 gimple
*call
, *cond
;
8755 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8756 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8757 call
= gimple_build_call (decl
, 0);
8758 gimple_call_set_lhs (call
, lhs
);
8759 gimple_seq_add_stmt (pre_p
, call
);
8761 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8762 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8765 gimple_seq_add_stmt (pre_p
, cond
);
8766 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8767 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8768 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8772 /* A subroutine of lower_omp_single. Expand the simple form of
8773 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8775 #pragma omp single copyprivate (a, b, c)
8777 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8780 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8786 GOMP_single_copy_end (©out);
8797 FIXME. It may be better to delay expanding the logic of this until
8798 pass_expand_omp. The expanded logic may make the job more difficult
8799 to a synchronization analysis pass. */
8802 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8805 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8806 gimple_seq copyin_seq
;
8807 location_t loc
= gimple_location (single_stmt
);
8809 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8811 ptr_type
= build_pointer_type (ctx
->record_type
);
8812 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8814 l0
= create_artificial_label (loc
);
8815 l1
= create_artificial_label (loc
);
8816 l2
= create_artificial_label (loc
);
8818 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8819 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8820 t
= fold_convert_loc (loc
, ptr_type
, t
);
8821 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8823 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8824 build_int_cst (ptr_type
, 0));
8825 t
= build3 (COND_EXPR
, void_type_node
, t
,
8826 build_and_jump (&l0
), build_and_jump (&l1
));
8827 gimplify_and_add (t
, pre_p
);
8829 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8831 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8834 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8837 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8838 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8839 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8840 gimplify_and_add (t
, pre_p
);
8842 t
= build_and_jump (&l2
);
8843 gimplify_and_add (t
, pre_p
);
8845 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8847 gimple_seq_add_seq (pre_p
, copyin_seq
);
8849 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8853 /* Expand code for an OpenMP single directive. */
8856 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8859 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8861 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8863 push_gimplify_context ();
8865 block
= make_node (BLOCK
);
8866 bind
= gimple_build_bind (NULL
, NULL
, block
);
8867 gsi_replace (gsi_p
, bind
, true);
8870 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8871 &bind_body
, &dlist
, ctx
, NULL
);
8872 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8874 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8876 if (ctx
->record_type
)
8877 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8879 lower_omp_single_simple (single_stmt
, &bind_body
);
8881 gimple_omp_set_body (single_stmt
, NULL
);
8883 gimple_seq_add_seq (&bind_body
, dlist
);
8885 bind_body
= maybe_catch_exception (bind_body
);
8887 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8888 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8889 gimple
*g
= gimple_build_omp_return (nowait
);
8890 gimple_seq_add_stmt (&bind_body_tail
, g
);
8891 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8892 if (ctx
->record_type
)
8894 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8895 tree clobber
= build_clobber (ctx
->record_type
);
8896 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8897 clobber
), GSI_SAME_STMT
);
8899 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8900 gimple_bind_set_body (bind
, bind_body
);
8902 pop_gimplify_context (bind
);
8904 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8905 BLOCK_VARS (block
) = ctx
->block_vars
;
8906 if (BLOCK_VARS (block
))
8907 TREE_USED (block
) = 1;
8911 /* Lower code for an OMP scope directive. */
8914 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8917 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8919 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8920 gimple_seq tred_dlist
= NULL
;
8922 push_gimplify_context ();
8924 block
= make_node (BLOCK
);
8925 bind
= gimple_build_bind (NULL
, NULL
, block
);
8926 gsi_replace (gsi_p
, bind
, true);
8931 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8932 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8935 tree type
= build_pointer_type (pointer_sized_int_node
);
8936 tree temp
= create_tmp_var (type
);
8937 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8938 OMP_CLAUSE_DECL (c
) = temp
;
8939 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8940 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8941 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8942 gimple_omp_scope_clauses (scope_stmt
),
8943 &bind_body
, &tred_dlist
);
8945 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8946 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8947 gimple_seq_add_stmt (&bind_body
, stmt
);
8950 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8951 &bind_body
, &dlist
, ctx
, NULL
);
8952 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8954 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8956 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8958 gimple_omp_set_body (scope_stmt
, NULL
);
8960 gimple_seq clist
= NULL
;
8961 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8962 &bind_body
, &clist
, ctx
);
8965 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8966 gcall
*g
= gimple_build_call (fndecl
, 0);
8967 gimple_seq_add_stmt (&bind_body
, g
);
8968 gimple_seq_add_seq (&bind_body
, clist
);
8969 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8970 g
= gimple_build_call (fndecl
, 0);
8971 gimple_seq_add_stmt (&bind_body
, g
);
8974 gimple_seq_add_seq (&bind_body
, dlist
);
8976 bind_body
= maybe_catch_exception (bind_body
);
8978 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8979 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8980 gimple
*g
= gimple_build_omp_return (nowait
);
8981 gimple_seq_add_stmt (&bind_body_tail
, g
);
8982 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8983 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8984 if (ctx
->record_type
)
8986 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8987 tree clobber
= build_clobber (ctx
->record_type
);
8988 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8989 clobber
), GSI_SAME_STMT
);
8991 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8993 gimple_bind_set_body (bind
, bind_body
);
8995 pop_gimplify_context (bind
);
8997 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8998 BLOCK_VARS (block
) = ctx
->block_vars
;
8999 if (BLOCK_VARS (block
))
9000 TREE_USED (block
) = 1;
9002 /* Expand code for an OpenMP master or masked directive. */
9005 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9007 tree block
, lab
= NULL
, x
, bfn_decl
;
9008 gimple
*stmt
= gsi_stmt (*gsi_p
);
9010 location_t loc
= gimple_location (stmt
);
9012 tree filter
= integer_zero_node
;
9014 push_gimplify_context ();
9016 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
9018 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
9021 filter
= fold_convert (integer_type_node
,
9022 OMP_CLAUSE_FILTER_EXPR (filter
));
9024 filter
= integer_zero_node
;
9026 block
= make_node (BLOCK
);
9027 bind
= gimple_build_bind (NULL
, NULL
, block
);
9028 gsi_replace (gsi_p
, bind
, true);
9029 gimple_bind_add_stmt (bind
, stmt
);
9031 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9032 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
9033 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
9034 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
9036 gimplify_and_add (x
, &tseq
);
9037 gimple_bind_add_seq (bind
, tseq
);
9039 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9040 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9041 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9042 gimple_omp_set_body (stmt
, NULL
);
9044 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
9046 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9048 pop_gimplify_context (bind
);
9050 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9051 BLOCK_VARS (block
) = ctx
->block_vars
;
9054 /* Helper function for lower_omp_task_reductions. For a specific PASS
9055 find out the current clause it should be processed, or return false
9056 if all have been processed already. */
9059 omp_task_reduction_iterate (int pass
, enum tree_code code
,
9060 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
9061 tree
*type
, tree
*next
)
9063 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
9065 if (ccode
== OMP_CLAUSE_REDUCTION
9066 && code
!= OMP_TASKLOOP
9067 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
9069 *decl
= OMP_CLAUSE_DECL (*c
);
9070 *type
= TREE_TYPE (*decl
);
9071 if (TREE_CODE (*decl
) == MEM_REF
)
9078 if (omp_privatize_by_reference (*decl
))
9079 *type
= TREE_TYPE (*type
);
9080 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
9083 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9092 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9093 OMP_TASKGROUP only with task modifier). Register mapping of those in
9094 START sequence and reducing them and unregister them in the END sequence. */
9097 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9098 gimple_seq
*start
, gimple_seq
*end
)
9100 enum omp_clause_code ccode
9101 = (code
== OMP_TASKGROUP
9102 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9103 tree cancellable
= NULL_TREE
;
9104 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9105 if (clauses
== NULL_TREE
)
9107 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9109 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9110 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9111 && outer
->cancellable
)
9113 cancellable
= error_mark_node
;
9116 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9117 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9120 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9121 tree
*last
= &TYPE_FIELDS (record_type
);
9125 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9127 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9130 DECL_CHAIN (field
) = ifield
;
9131 last
= &DECL_CHAIN (ifield
);
9132 DECL_CONTEXT (field
) = record_type
;
9133 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9134 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9135 DECL_CONTEXT (ifield
) = record_type
;
9136 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9137 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9139 for (int pass
= 0; pass
< 2; pass
++)
9141 tree decl
, type
, next
;
9142 for (tree c
= clauses
;
9143 omp_task_reduction_iterate (pass
, code
, ccode
,
9144 &c
, &decl
, &type
, &next
); c
= next
)
9147 tree new_type
= type
;
9149 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9151 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9152 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9154 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9156 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9157 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9158 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9161 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9162 DECL_CONTEXT (field
) = record_type
;
9163 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9164 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9166 last
= &DECL_CHAIN (field
);
9168 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9170 DECL_CONTEXT (bfield
) = record_type
;
9171 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9172 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9174 last
= &DECL_CHAIN (bfield
);
9178 layout_type (record_type
);
9180 /* Build up an array which registers with the runtime all the reductions
9181 and deregisters them at the end. Format documented in libgomp/task.c. */
9182 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9183 tree avar
= create_tmp_var_raw (atype
);
9184 gimple_add_tmp_var (avar
);
9185 TREE_ADDRESSABLE (avar
) = 1;
9186 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9187 NULL_TREE
, NULL_TREE
);
9188 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9189 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9190 gimple_seq seq
= NULL
;
9191 tree sz
= fold_convert (pointer_sized_int_node
,
9192 TYPE_SIZE_UNIT (record_type
));
9194 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9195 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9196 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9197 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9198 ctx
->task_reductions
.create (1 + cnt
);
9199 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9200 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9202 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9203 gimple_seq_add_seq (start
, seq
);
9204 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9205 NULL_TREE
, NULL_TREE
);
9206 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9207 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9208 NULL_TREE
, NULL_TREE
);
9209 t
= build_int_cst (pointer_sized_int_node
,
9210 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9211 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9212 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9213 NULL_TREE
, NULL_TREE
);
9214 t
= build_int_cst (pointer_sized_int_node
, -1);
9215 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9216 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9217 NULL_TREE
, NULL_TREE
);
9218 t
= build_int_cst (pointer_sized_int_node
, 0);
9219 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9221 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9222 and for each task reduction checks a bool right after the private variable
9223 within that thread's chunk; if the bool is clear, it hasn't been
9224 initialized and thus isn't going to be reduced nor destructed, otherwise
9225 reduce and destruct it. */
9226 tree idx
= create_tmp_var (size_type_node
);
9227 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9228 tree num_thr_sz
= create_tmp_var (size_type_node
);
9229 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9230 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9231 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9233 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9235 /* For worksharing constructs or scope, only perform it in the master
9236 thread, with the exception of cancelled implicit barriers - then only
9237 handle the current thread. */
9238 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9239 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9240 tree thr_num
= create_tmp_var (integer_type_node
);
9241 g
= gimple_build_call (t
, 0);
9242 gimple_call_set_lhs (g
, thr_num
);
9243 gimple_seq_add_stmt (end
, g
);
9247 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9248 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9249 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9250 if (code
== OMP_FOR
)
9251 c
= gimple_omp_for_clauses (ctx
->stmt
);
9252 else if (code
== OMP_SECTIONS
)
9253 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9254 else /* if (code == OMP_SCOPE) */
9255 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9256 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9258 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9260 gimple_seq_add_stmt (end
, g
);
9261 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9262 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9263 gimple_seq_add_stmt (end
, g
);
9264 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9265 build_one_cst (TREE_TYPE (idx
)));
9266 gimple_seq_add_stmt (end
, g
);
9267 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9268 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9270 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9271 gimple_seq_add_stmt (end
, g
);
9272 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9274 if (code
!= OMP_PARALLEL
)
9276 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9277 tree num_thr
= create_tmp_var (integer_type_node
);
9278 g
= gimple_build_call (t
, 0);
9279 gimple_call_set_lhs (g
, num_thr
);
9280 gimple_seq_add_stmt (end
, g
);
9281 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9282 gimple_seq_add_stmt (end
, g
);
9284 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9288 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9289 OMP_CLAUSE__REDUCTEMP_
);
9290 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9291 t
= fold_convert (size_type_node
, t
);
9292 gimplify_assign (num_thr_sz
, t
, end
);
9294 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9295 NULL_TREE
, NULL_TREE
);
9296 tree data
= create_tmp_var (pointer_sized_int_node
);
9297 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9298 if (code
== OMP_TASKLOOP
)
9300 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9301 g
= gimple_build_cond (NE_EXPR
, data
,
9302 build_zero_cst (pointer_sized_int_node
),
9304 gimple_seq_add_stmt (end
, g
);
9306 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9308 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9309 ptr
= create_tmp_var (build_pointer_type (record_type
));
9311 ptr
= create_tmp_var (ptr_type_node
);
9312 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9314 tree field
= TYPE_FIELDS (record_type
);
9317 field
= DECL_CHAIN (DECL_CHAIN (field
));
9318 for (int pass
= 0; pass
< 2; pass
++)
9320 tree decl
, type
, next
;
9321 for (tree c
= clauses
;
9322 omp_task_reduction_iterate (pass
, code
, ccode
,
9323 &c
, &decl
, &type
, &next
); c
= next
)
9325 tree var
= decl
, ref
;
9326 if (TREE_CODE (decl
) == MEM_REF
)
9328 var
= TREE_OPERAND (var
, 0);
9329 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9330 var
= TREE_OPERAND (var
, 0);
9332 if (TREE_CODE (var
) == ADDR_EXPR
)
9333 var
= TREE_OPERAND (var
, 0);
9334 else if (TREE_CODE (var
) == INDIRECT_REF
)
9335 var
= TREE_OPERAND (var
, 0);
9336 tree orig_var
= var
;
9337 if (is_variable_sized (var
))
9339 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9340 var
= DECL_VALUE_EXPR (var
);
9341 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9342 var
= TREE_OPERAND (var
, 0);
9343 gcc_assert (DECL_P (var
));
9345 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9346 if (orig_var
!= var
)
9347 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9348 else if (TREE_CODE (v
) == ADDR_EXPR
)
9349 t
= build_fold_addr_expr (t
);
9350 else if (TREE_CODE (v
) == INDIRECT_REF
)
9351 t
= build_fold_indirect_ref (t
);
9352 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9354 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9355 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9356 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9358 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9359 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9360 fold_convert (size_type_node
,
9361 TREE_OPERAND (decl
, 1)));
9365 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9366 if (!omp_privatize_by_reference (decl
))
9367 t
= build_fold_addr_expr (t
);
9369 t
= fold_convert (pointer_sized_int_node
, t
);
9371 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9372 gimple_seq_add_seq (start
, seq
);
9373 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9374 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9375 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9376 t
= unshare_expr (byte_position (field
));
9377 t
= fold_convert (pointer_sized_int_node
, t
);
9378 ctx
->task_reduction_map
->put (c
, cnt
);
9379 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9382 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9383 gimple_seq_add_seq (start
, seq
);
9384 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9385 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9386 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9388 tree bfield
= DECL_CHAIN (field
);
9390 if (code
== OMP_PARALLEL
9392 || code
== OMP_SECTIONS
9393 || code
== OMP_SCOPE
)
9394 /* In parallel, worksharing or scope all threads unconditionally
9395 initialize all their task reduction private variables. */
9396 cond
= boolean_true_node
;
9397 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9399 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9400 unshare_expr (byte_position (bfield
)));
9402 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9403 gimple_seq_add_seq (end
, seq
);
9404 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9405 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9406 build_int_cst (pbool
, 0));
9409 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9410 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9411 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9412 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9413 tree condv
= create_tmp_var (boolean_type_node
);
9414 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9415 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9417 gimple_seq_add_stmt (end
, g
);
9418 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9419 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9421 /* If this reduction doesn't need destruction and parallel
9422 has been cancelled, there is nothing to do for this
9423 reduction, so jump around the merge operation. */
9424 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9425 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9426 build_zero_cst (TREE_TYPE (cancellable
)),
9428 gimple_seq_add_stmt (end
, g
);
9429 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9433 if (TREE_TYPE (ptr
) == ptr_type_node
)
9435 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9436 unshare_expr (byte_position (field
)));
9438 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9439 gimple_seq_add_seq (end
, seq
);
9440 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9441 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9442 build_int_cst (pbool
, 0));
9445 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9446 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9448 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9449 if (TREE_CODE (decl
) != MEM_REF
9450 && omp_privatize_by_reference (decl
))
9451 ref
= build_simple_mem_ref (ref
);
9452 /* reduction(-:var) sums up the partial results, so it acts
9453 identically to reduction(+:var). */
9454 if (rcode
== MINUS_EXPR
)
9456 if (TREE_CODE (decl
) == MEM_REF
)
9458 tree type
= TREE_TYPE (new_var
);
9459 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9460 tree i
= create_tmp_var (TREE_TYPE (v
));
9461 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9464 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9465 tree vv
= create_tmp_var (TREE_TYPE (v
));
9466 gimplify_assign (vv
, v
, start
);
9469 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9470 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9471 new_var
= build_fold_addr_expr (new_var
);
9472 new_var
= fold_convert (ptype
, new_var
);
9473 ref
= fold_convert (ptype
, ref
);
9474 tree m
= create_tmp_var (ptype
);
9475 gimplify_assign (m
, new_var
, end
);
9477 m
= create_tmp_var (ptype
);
9478 gimplify_assign (m
, ref
, end
);
9480 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9481 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9482 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9483 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9484 tree priv
= build_simple_mem_ref (new_var
);
9485 tree out
= build_simple_mem_ref (ref
);
9486 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9488 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9489 tree decl_placeholder
9490 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9491 tree lab6
= NULL_TREE
;
9494 /* If this reduction needs destruction and parallel
9495 has been cancelled, jump around the merge operation
9496 to the destruction. */
9497 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9498 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9499 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9500 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9502 gimple_seq_add_stmt (end
, g
);
9503 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9505 SET_DECL_VALUE_EXPR (placeholder
, out
);
9506 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9507 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9508 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9509 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9510 gimple_seq_add_seq (end
,
9511 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9512 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9513 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9515 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9516 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9519 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9520 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9523 gimple_seq tseq
= NULL
;
9524 gimplify_stmt (&x
, &tseq
);
9525 gimple_seq_add_seq (end
, tseq
);
9530 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9531 out
= unshare_expr (out
);
9532 gimplify_assign (out
, x
, end
);
9535 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9536 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9537 gimple_seq_add_stmt (end
, g
);
9538 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9539 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9540 gimple_seq_add_stmt (end
, g
);
9541 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9542 build_int_cst (TREE_TYPE (i
), 1));
9543 gimple_seq_add_stmt (end
, g
);
9544 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9545 gimple_seq_add_stmt (end
, g
);
9546 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9548 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9550 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9551 tree oldv
= NULL_TREE
;
9552 tree lab6
= NULL_TREE
;
9555 /* If this reduction needs destruction and parallel
9556 has been cancelled, jump around the merge operation
9557 to the destruction. */
9558 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9559 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9560 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9561 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9563 gimple_seq_add_stmt (end
, g
);
9564 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9566 if (omp_privatize_by_reference (decl
)
9567 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9569 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9570 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9571 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9572 gimplify_assign (refv
, ref
, end
);
9573 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9574 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9575 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9576 tree d
= maybe_lookup_decl (decl
, ctx
);
9578 if (DECL_HAS_VALUE_EXPR_P (d
))
9579 oldv
= DECL_VALUE_EXPR (d
);
9580 if (omp_privatize_by_reference (var
))
9582 tree v
= fold_convert (TREE_TYPE (d
),
9583 build_fold_addr_expr (new_var
));
9584 SET_DECL_VALUE_EXPR (d
, v
);
9587 SET_DECL_VALUE_EXPR (d
, new_var
);
9588 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9589 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9591 SET_DECL_VALUE_EXPR (d
, oldv
);
9594 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9595 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9597 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9598 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9599 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9600 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9602 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9603 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9606 gimple_seq tseq
= NULL
;
9607 gimplify_stmt (&x
, &tseq
);
9608 gimple_seq_add_seq (end
, tseq
);
9613 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9614 ref
= unshare_expr (ref
);
9615 gimplify_assign (ref
, x
, end
);
9617 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9619 field
= DECL_CHAIN (bfield
);
9623 if (code
== OMP_TASKGROUP
)
9625 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9626 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9627 gimple_seq_add_stmt (start
, g
);
9632 if (code
== OMP_FOR
)
9633 c
= gimple_omp_for_clauses (ctx
->stmt
);
9634 else if (code
== OMP_SECTIONS
)
9635 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9636 else if (code
== OMP_SCOPE
)
9637 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9639 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9640 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9641 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9642 build_fold_addr_expr (avar
));
9643 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9646 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9647 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9649 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9650 gimple_seq_add_stmt (end
, g
);
9651 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9652 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9654 enum built_in_function bfn
9655 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9656 t
= builtin_decl_explicit (bfn
);
9657 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9661 arg
= create_tmp_var (c_bool_type
);
9662 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9666 arg
= build_int_cst (c_bool_type
, 0);
9667 g
= gimple_build_call (t
, 1, arg
);
9671 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9672 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9674 gimple_seq_add_stmt (end
, g
);
9676 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9677 t
= build_constructor (atype
, NULL
);
9678 TREE_THIS_VOLATILE (t
) = 1;
9679 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9682 /* Expand code for an OpenMP taskgroup directive. */
9685 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9687 gimple
*stmt
= gsi_stmt (*gsi_p
);
9690 gimple_seq dseq
= NULL
;
9691 tree block
= make_node (BLOCK
);
9693 bind
= gimple_build_bind (NULL
, NULL
, block
);
9694 gsi_replace (gsi_p
, bind
, true);
9695 gimple_bind_add_stmt (bind
, stmt
);
9697 push_gimplify_context ();
9699 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9701 gimple_bind_add_stmt (bind
, x
);
9703 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9704 gimple_omp_taskgroup_clauses (stmt
),
9705 gimple_bind_body_ptr (bind
), &dseq
);
9707 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9708 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9709 gimple_omp_set_body (stmt
, NULL
);
9711 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9712 gimple_bind_add_seq (bind
, dseq
);
9714 pop_gimplify_context (bind
);
9716 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9717 BLOCK_VARS (block
) = ctx
->block_vars
;
9721 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9724 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9727 struct omp_for_data fd
;
9728 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9731 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9732 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9733 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9737 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9738 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9739 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
9740 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9742 /* Merge depend clauses from multiple adjacent
9743 #pragma omp ordered depend(sink:...) constructs
9744 into one #pragma omp ordered depend(sink:...), so that
9745 we can optimize them together. */
9746 gimple_stmt_iterator gsi
= *gsi_p
;
9748 while (!gsi_end_p (gsi
))
9750 gimple
*stmt
= gsi_stmt (gsi
);
9751 if (is_gimple_debug (stmt
)
9752 || gimple_code (stmt
) == GIMPLE_NOP
)
9757 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9759 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9760 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9762 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
9763 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9766 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9768 gsi_remove (&gsi
, true);
9772 /* Canonicalize sink dependence clauses into one folded clause if
9775 The basic algorithm is to create a sink vector whose first
9776 element is the GCD of all the first elements, and whose remaining
9777 elements are the minimum of the subsequent columns.
9779 We ignore dependence vectors whose first element is zero because
9780 such dependencies are known to be executed by the same thread.
9782 We take into account the direction of the loop, so a minimum
9783 becomes a maximum if the loop is iterating forwards. We also
9784 ignore sink clauses where the loop direction is unknown, or where
9785 the offsets are clearly invalid because they are not a multiple
9786 of the loop increment.
9790 #pragma omp for ordered(2)
9791 for (i=0; i < N; ++i)
9792 for (j=0; j < M; ++j)
9794 #pragma omp ordered \
9795 depend(sink:i-8,j-2) \
9796 depend(sink:i,j-1) \ // Completely ignored because i+0.
9797 depend(sink:i-4,j-3) \
9798 depend(sink:i-6,j-4)
9799 #pragma omp ordered depend(source)
9804 depend(sink:-gcd(8,4,6),-min(2,3,4))
9809 /* FIXME: Computing GCD's where the first element is zero is
9810 non-trivial in the presence of collapsed loops. Do this later. */
9811 if (fd
.collapse
> 1)
9814 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9816 /* wide_int is not a POD so it must be default-constructed. */
9817 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9818 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9820 tree folded_dep
= NULL_TREE
;
9821 /* TRUE if the first dimension's offset is negative. */
9822 bool neg_offset_p
= false;
9824 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9826 while ((c
= *list_p
) != NULL
)
9828 bool remove
= false;
9830 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
9831 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9832 goto next_ordered_clause
;
9835 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9836 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9837 vec
= TREE_CHAIN (vec
), ++i
)
9839 gcc_assert (i
< len
);
9841 /* omp_extract_for_data has canonicalized the condition. */
9842 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9843 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9844 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9845 bool maybe_lexically_later
= true;
9847 /* While the committee makes up its mind, bail if we have any
9848 non-constant steps. */
9849 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9850 goto lower_omp_ordered_ret
;
9852 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9853 if (POINTER_TYPE_P (itype
))
9855 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9856 TYPE_PRECISION (itype
),
9859 /* Ignore invalid offsets that are not multiples of the step. */
9860 if (!wi::multiple_of_p (wi::abs (offset
),
9861 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9864 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9865 "ignoring sink clause with offset that is not "
9866 "a multiple of the loop step");
9868 goto next_ordered_clause
;
9871 /* Calculate the first dimension. The first dimension of
9872 the folded dependency vector is the GCD of the first
9873 elements, while ignoring any first elements whose offset
9877 /* Ignore dependence vectors whose first dimension is 0. */
9881 goto next_ordered_clause
;
9885 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9887 error_at (OMP_CLAUSE_LOCATION (c
),
9888 "first offset must be in opposite direction "
9889 "of loop iterations");
9890 goto lower_omp_ordered_ret
;
9894 neg_offset_p
= forward
;
9895 /* Initialize the first time around. */
9896 if (folded_dep
== NULL_TREE
)
9899 folded_deps
[0] = offset
;
9902 folded_deps
[0] = wi::gcd (folded_deps
[0],
9906 /* Calculate minimum for the remaining dimensions. */
9909 folded_deps
[len
+ i
- 1] = offset
;
9910 if (folded_dep
== c
)
9911 folded_deps
[i
] = offset
;
9912 else if (maybe_lexically_later
9913 && !wi::eq_p (folded_deps
[i
], offset
))
9915 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9919 for (j
= 1; j
<= i
; j
++)
9920 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9923 maybe_lexically_later
= false;
9927 gcc_assert (i
== len
);
9931 next_ordered_clause
:
9933 *list_p
= OMP_CLAUSE_CHAIN (c
);
9935 list_p
= &OMP_CLAUSE_CHAIN (c
);
9941 folded_deps
[0] = -folded_deps
[0];
9943 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9944 if (POINTER_TYPE_P (itype
))
9947 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9948 = wide_int_to_tree (itype
, folded_deps
[0]);
9949 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9950 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9953 lower_omp_ordered_ret
:
9955 /* Ordered without clauses is #pragma omp threads, while we want
9956 a nop instead if we remove all clauses. */
9957 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9958 gsi_replace (gsi_p
, gimple_build_nop (), true);
9962 /* Expand code for an OpenMP ordered directive. */
9965 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9968 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9969 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9972 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9974 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9977 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9978 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9979 OMP_CLAUSE_THREADS
);
9981 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9984 /* FIXME: This is needs to be moved to the expansion to verify various
9985 conditions only testable on cfg with dominators computed, and also
9986 all the depend clauses to be merged still might need to be available
9987 for the runtime checks. */
9989 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9993 push_gimplify_context ();
9995 block
= make_node (BLOCK
);
9996 bind
= gimple_build_bind (NULL
, NULL
, block
);
9997 gsi_replace (gsi_p
, bind
, true);
9998 gimple_bind_add_stmt (bind
, stmt
);
10002 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
10003 build_int_cst (NULL_TREE
, threads
));
10004 cfun
->has_simduid_loops
= true;
10007 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
10009 gimple_bind_add_stmt (bind
, x
);
10011 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
10014 counter
= create_tmp_var (integer_type_node
);
10015 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
10016 gimple_call_set_lhs (g
, counter
);
10017 gimple_bind_add_stmt (bind
, g
);
10019 body
= create_artificial_label (UNKNOWN_LOCATION
);
10020 test
= create_artificial_label (UNKNOWN_LOCATION
);
10021 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
10023 tree simt_pred
= create_tmp_var (integer_type_node
);
10024 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
10025 gimple_call_set_lhs (g
, simt_pred
);
10026 gimple_bind_add_stmt (bind
, g
);
10028 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
10029 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
10030 gimple_bind_add_stmt (bind
, g
);
10032 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
10034 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10035 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10036 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10037 gimple_omp_set_body (stmt
, NULL
);
10041 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
10042 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
10043 gimple_bind_add_stmt (bind
, g
);
10045 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
10046 tree nonneg
= create_tmp_var (integer_type_node
);
10047 gimple_seq tseq
= NULL
;
10048 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
10049 gimple_bind_add_seq (bind
, tseq
);
10051 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
10052 gimple_call_set_lhs (g
, nonneg
);
10053 gimple_bind_add_stmt (bind
, g
);
10055 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
10056 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
10057 gimple_bind_add_stmt (bind
, g
);
10059 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
10062 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
10063 build_int_cst (NULL_TREE
, threads
));
10065 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
10067 gimple_bind_add_stmt (bind
, x
);
10069 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10071 pop_gimplify_context (bind
);
10073 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10074 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10078 /* Expand code for an OpenMP scan directive and the structured block
10079 before the scan directive. */
10082 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10084 gimple
*stmt
= gsi_stmt (*gsi_p
);
10086 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
10087 tree lane
= NULL_TREE
;
10088 gimple_seq before
= NULL
;
10089 omp_context
*octx
= ctx
->outer
;
10091 if (octx
->scan_exclusive
&& !has_clauses
)
10093 gimple_stmt_iterator gsi2
= *gsi_p
;
10095 gimple
*stmt2
= gsi_stmt (gsi2
);
10096 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10097 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10098 the one with exclusive clause(s), comes first. */
10100 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10101 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10103 gsi_remove (gsi_p
, false);
10104 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10105 ctx
= maybe_lookup_ctx (stmt2
);
10107 lower_omp_scan (gsi_p
, ctx
);
10112 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10113 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10114 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10115 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10116 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10117 && !gimple_omp_for_combined_p (octx
->stmt
));
10118 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10119 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10122 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10123 OMP_CLAUSE__SIMDUID_
))
10125 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10126 lane
= create_tmp_var (unsigned_type_node
);
10127 tree t
= build_int_cst (integer_type_node
,
10129 : octx
->scan_inclusive
? 2 : 3);
10131 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10132 gimple_call_set_lhs (g
, lane
);
10133 gimple_seq_add_stmt (&before
, g
);
10136 if (is_simd
|| is_for
)
10138 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10139 c
; c
= OMP_CLAUSE_CHAIN (c
))
10140 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10141 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10143 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10144 tree var
= OMP_CLAUSE_DECL (c
);
10145 tree new_var
= lookup_decl (var
, octx
);
10146 tree val
= new_var
;
10147 tree var2
= NULL_TREE
;
10148 tree var3
= NULL_TREE
;
10149 tree var4
= NULL_TREE
;
10150 tree lane0
= NULL_TREE
;
10151 tree new_vard
= new_var
;
10152 if (omp_privatize_by_reference (var
))
10154 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10157 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10159 val
= DECL_VALUE_EXPR (new_vard
);
10160 if (new_vard
!= new_var
)
10162 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10163 val
= TREE_OPERAND (val
, 0);
10165 if (TREE_CODE (val
) == ARRAY_REF
10166 && VAR_P (TREE_OPERAND (val
, 0)))
10168 tree v
= TREE_OPERAND (val
, 0);
10169 if (lookup_attribute ("omp simd array",
10170 DECL_ATTRIBUTES (v
)))
10172 val
= unshare_expr (val
);
10173 lane0
= TREE_OPERAND (val
, 1);
10174 TREE_OPERAND (val
, 1) = lane
;
10175 var2
= lookup_decl (v
, octx
);
10176 if (octx
->scan_exclusive
)
10177 var4
= lookup_decl (var2
, octx
);
10179 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10180 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10183 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10184 var2
, lane
, NULL_TREE
, NULL_TREE
);
10185 TREE_THIS_NOTRAP (var2
) = 1;
10186 if (octx
->scan_exclusive
)
10188 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10189 var4
, lane
, NULL_TREE
,
10191 TREE_THIS_NOTRAP (var4
) = 1;
10202 var2
= build_outer_var_ref (var
, octx
);
10203 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10205 var3
= maybe_lookup_decl (new_vard
, octx
);
10206 if (var3
== new_vard
|| var3
== NULL_TREE
)
10208 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10210 var4
= maybe_lookup_decl (var3
, octx
);
10211 if (var4
== var3
|| var4
== NULL_TREE
)
10213 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10224 && octx
->scan_exclusive
10226 && var4
== NULL_TREE
)
10227 var4
= create_tmp_var (TREE_TYPE (val
));
10229 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10231 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10236 /* If we've added a separate identity element
10237 variable, copy it over into val. */
10238 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10240 gimplify_and_add (x
, &before
);
10242 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10244 /* Otherwise, assign to it the identity element. */
10245 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10247 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10248 tree ref
= build_outer_var_ref (var
, octx
);
10249 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10250 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10253 if (new_vard
!= new_var
)
10254 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10255 SET_DECL_VALUE_EXPR (new_vard
, val
);
10257 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10258 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10259 lower_omp (&tseq
, octx
);
10261 SET_DECL_VALUE_EXPR (new_vard
, x
);
10262 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10263 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10264 gimple_seq_add_seq (&before
, tseq
);
10266 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10272 if (octx
->scan_exclusive
)
10274 tree v4
= unshare_expr (var4
);
10275 tree v2
= unshare_expr (var2
);
10276 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10277 gimplify_and_add (x
, &before
);
10279 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10280 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10281 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10283 if (x
&& new_vard
!= new_var
)
10284 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10286 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10287 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10288 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10289 lower_omp (&tseq
, octx
);
10290 gimple_seq_add_seq (&before
, tseq
);
10291 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10293 SET_DECL_VALUE_EXPR (new_vard
, x
);
10294 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10295 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10296 if (octx
->scan_inclusive
)
10298 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10300 gimplify_and_add (x
, &before
);
10302 else if (lane0
== NULL_TREE
)
10304 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10306 gimplify_and_add (x
, &before
);
10314 /* input phase. Set val to initializer before
10316 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10317 gimplify_assign (val
, x
, &before
);
10322 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10323 if (code
== MINUS_EXPR
)
10326 tree x
= build2 (code
, TREE_TYPE (var2
),
10327 unshare_expr (var2
), unshare_expr (val
));
10328 if (octx
->scan_inclusive
)
10330 gimplify_assign (unshare_expr (var2
), x
, &before
);
10331 gimplify_assign (val
, var2
, &before
);
10335 gimplify_assign (unshare_expr (var4
),
10336 unshare_expr (var2
), &before
);
10337 gimplify_assign (var2
, x
, &before
);
10338 if (lane0
== NULL_TREE
)
10339 gimplify_assign (val
, var4
, &before
);
10343 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10345 tree vexpr
= unshare_expr (var4
);
10346 TREE_OPERAND (vexpr
, 1) = lane0
;
10347 if (new_vard
!= new_var
)
10348 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10349 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10353 if (is_simd
&& !is_for_simd
)
10355 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10356 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10357 gsi_replace (gsi_p
, gimple_build_nop (), true);
10360 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10363 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (stmt
));
10364 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10369 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10370 substitution of a couple of function calls. But in the NAMED case,
10371 requires that languages coordinate a symbol name. It is therefore
10372 best put here in common code. */
10374 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10377 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10380 tree name
, lock
, unlock
;
10381 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10383 location_t loc
= gimple_location (stmt
);
10386 name
= gimple_omp_critical_name (stmt
);
10391 if (!critical_name_mutexes
)
10392 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10394 tree
*n
= critical_name_mutexes
->get (name
);
10399 decl
= create_tmp_var_raw (ptr_type_node
);
10401 new_str
= ACONCAT ((".gomp_critical_user_",
10402 IDENTIFIER_POINTER (name
), NULL
));
10403 DECL_NAME (decl
) = get_identifier (new_str
);
10404 TREE_PUBLIC (decl
) = 1;
10405 TREE_STATIC (decl
) = 1;
10406 DECL_COMMON (decl
) = 1;
10407 DECL_ARTIFICIAL (decl
) = 1;
10408 DECL_IGNORED_P (decl
) = 1;
10410 varpool_node::finalize_decl (decl
);
10412 critical_name_mutexes
->put (name
, decl
);
10417 /* If '#pragma omp critical' is inside offloaded region or
10418 inside function marked as offloadable, the symbol must be
10419 marked as offloadable too. */
10421 if (cgraph_node::get (current_function_decl
)->offloadable
)
10422 varpool_node::get_create (decl
)->offloadable
= 1;
10424 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10425 if (is_gimple_omp_offloaded (octx
->stmt
))
10427 varpool_node::get_create (decl
)->offloadable
= 1;
10431 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10432 lock
= build_call_expr_loc (loc
, lock
, 1,
10433 build_fold_addr_expr_loc (loc
, decl
));
10435 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10436 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10437 build_fold_addr_expr_loc (loc
, decl
));
10441 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10442 lock
= build_call_expr_loc (loc
, lock
, 0);
10444 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10445 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10448 push_gimplify_context ();
10450 block
= make_node (BLOCK
);
10451 bind
= gimple_build_bind (NULL
, NULL
, block
);
10452 gsi_replace (gsi_p
, bind
, true);
10453 gimple_bind_add_stmt (bind
, stmt
);
10455 tbody
= gimple_bind_body (bind
);
10456 gimplify_and_add (lock
, &tbody
);
10457 gimple_bind_set_body (bind
, tbody
);
10459 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10460 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10461 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10462 gimple_omp_set_body (stmt
, NULL
);
10464 tbody
= gimple_bind_body (bind
);
10465 gimplify_and_add (unlock
, &tbody
);
10466 gimple_bind_set_body (bind
, tbody
);
10468 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10470 pop_gimplify_context (bind
);
10471 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10472 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10475 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10476 for a lastprivate clause. Given a loop control predicate of (V
10477 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10478 is appended to *DLIST, iterator initialization is appended to
10479 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10480 to be emitted in a critical section. */
10483 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10484 gimple_seq
*dlist
, gimple_seq
*clist
,
10485 struct omp_context
*ctx
)
10487 tree clauses
, cond
, vinit
;
10488 enum tree_code cond_code
;
10491 cond_code
= fd
->loop
.cond_code
;
10492 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10494 /* When possible, use a strict equality expression. This can let VRP
10495 type optimizations deduce the value and remove a copy. */
10496 if (tree_fits_shwi_p (fd
->loop
.step
))
10498 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10499 if (step
== 1 || step
== -1)
10500 cond_code
= EQ_EXPR
;
10503 tree n2
= fd
->loop
.n2
;
10504 if (fd
->collapse
> 1
10505 && TREE_CODE (n2
) != INTEGER_CST
10506 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10508 struct omp_context
*taskreg_ctx
= NULL
;
10509 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10511 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10512 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10513 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10515 if (gimple_omp_for_combined_into_p (gfor
))
10517 gcc_assert (ctx
->outer
->outer
10518 && is_parallel_ctx (ctx
->outer
->outer
));
10519 taskreg_ctx
= ctx
->outer
->outer
;
10523 struct omp_for_data outer_fd
;
10524 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10525 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10528 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10529 taskreg_ctx
= ctx
->outer
->outer
;
10531 else if (is_taskreg_ctx (ctx
->outer
))
10532 taskreg_ctx
= ctx
->outer
;
10536 tree taskreg_clauses
10537 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10538 tree innerc
= omp_find_clause (taskreg_clauses
,
10539 OMP_CLAUSE__LOOPTEMP_
);
10540 gcc_assert (innerc
);
10541 int count
= fd
->collapse
;
10543 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10544 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10545 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10547 for (i
= 0; i
< count
; i
++)
10549 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10550 OMP_CLAUSE__LOOPTEMP_
);
10551 gcc_assert (innerc
);
10553 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10554 OMP_CLAUSE__LOOPTEMP_
);
10556 n2
= fold_convert (TREE_TYPE (n2
),
10557 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10561 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10563 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10565 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10566 if (!gimple_seq_empty_p (stmts
))
10568 gimple_seq_add_seq (&stmts
, *dlist
);
10571 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10572 vinit
= fd
->loop
.n1
;
10573 if (cond_code
== EQ_EXPR
10574 && tree_fits_shwi_p (fd
->loop
.n2
)
10575 && ! integer_zerop (fd
->loop
.n2
))
10576 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10578 vinit
= unshare_expr (vinit
);
10580 /* Initialize the iterator variable, so that threads that don't execute
10581 any iterations don't execute the lastprivate clauses by accident. */
10582 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10586 /* OpenACC privatization.
10588 Or, in other words, *sharing* at the respective OpenACC level of
10591 From a correctness perspective, a non-addressable variable can't be accessed
10592 outside the current thread, so it can go in a (faster than shared memory)
10593 register -- though that register may need to be broadcast in some
10594 circumstances. A variable can only meaningfully be "shared" across workers
10595 or vector lanes if its address is taken, e.g. by a call to an atomic
10598 From an optimisation perspective, the answer might be fuzzier: maybe
10599 sometimes, using shared memory directly would be faster than
10603 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10604 const location_t loc
, const tree c
,
10607 const dump_user_location_t d_u_loc
10608 = dump_user_location_t::from_location_t (loc
);
10609 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10611 # pragma GCC diagnostic push
10612 # pragma GCC diagnostic ignored "-Wformat"
10614 dump_printf_loc (l_dump_flags
, d_u_loc
,
10615 "variable %<%T%> ", decl
);
10617 # pragma GCC diagnostic pop
10620 dump_printf (l_dump_flags
,
10622 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10624 dump_printf (l_dump_flags
,
10625 "declared in block ");
10629 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10632 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10634 /* There is some differentiation depending on block vs. clause. */
10639 if (res
&& !VAR_P (decl
))
10641 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10642 privatized into a new VAR_DECL. */
10643 gcc_checking_assert (TREE_CODE (decl
) != PARM_DECL
);
10647 if (dump_enabled_p ())
10649 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10650 dump_printf (l_dump_flags
,
10651 "potentially has improper OpenACC privatization level: %qs\n",
10652 get_tree_code_name (TREE_CODE (decl
)));
10656 if (res
&& block
&& TREE_STATIC (decl
))
10660 if (dump_enabled_p ())
10662 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10663 dump_printf (l_dump_flags
,
10664 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10669 if (res
&& block
&& DECL_EXTERNAL (decl
))
10673 if (dump_enabled_p ())
10675 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10676 dump_printf (l_dump_flags
,
10677 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10682 if (res
&& !TREE_ADDRESSABLE (decl
))
10686 if (dump_enabled_p ())
10688 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10689 dump_printf (l_dump_flags
,
10690 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10691 "not addressable");
10697 if (dump_enabled_p ())
10699 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10700 dump_printf (l_dump_flags
,
10701 "is candidate for adjusting OpenACC privatization level\n");
10705 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10707 print_generic_decl (dump_file
, decl
, dump_flags
);
10708 fprintf (dump_file
, "\n");
10714 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10718 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10720 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10721 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10723 tree decl
= OMP_CLAUSE_DECL (c
);
10725 tree new_decl
= lookup_decl (decl
, ctx
);
10727 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
,
10731 gcc_checking_assert
10732 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10733 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10737 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10741 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10743 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10745 tree new_decl
= lookup_decl (decl
, ctx
);
10746 gcc_checking_assert (new_decl
== decl
);
10748 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
,
10752 gcc_checking_assert
10753 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10754 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10758 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10761 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10762 struct walk_stmt_info
*wi
)
10764 gimple
*stmt
= gsi_stmt (*gsi_p
);
10766 *handled_ops_p
= true;
10767 switch (gimple_code (stmt
))
10771 case GIMPLE_OMP_FOR
:
10772 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10773 && gimple_omp_for_combined_into_p (stmt
))
10774 *handled_ops_p
= false;
10777 case GIMPLE_OMP_SCAN
:
10778 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10779 return integer_zero_node
;
10786 /* Helper function for lower_omp_for, add transformations for a worksharing
10787 loop with scan directives inside of it.
10788 For worksharing loop not combined with simd, transform:
10789 #pragma omp for reduction(inscan,+:r) private(i)
10790 for (i = 0; i < n; i = i + 1)
10795 #pragma omp scan inclusive(r)
10801 into two worksharing loops + code to merge results:
10803 num_threads = omp_get_num_threads ();
10804 thread_num = omp_get_thread_num ();
10805 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10810 // For UDRs this is UDR init, or if ctors are needed, copy from
10811 // var3 that has been constructed to contain the neutral element.
10815 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10816 // a shared array with num_threads elements and rprivb to a local array
10817 // number of elements equal to the number of (contiguous) iterations the
10818 // current thread will perform. controlb and controlp variables are
10819 // temporaries to handle deallocation of rprivb at the end of second
10821 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10822 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10823 for (i = 0; i < n; i = i + 1)
10826 // For UDRs this is UDR init or copy from var3.
10828 // This is the input phase from user code.
10832 // For UDRs this is UDR merge.
10834 // Rather than handing it over to the user, save to local thread's
10836 rprivb[ivar] = var2;
10837 // For exclusive scan, the above two statements are swapped.
10841 // And remember the final value from this thread's into the shared
10843 rpriva[(sizetype) thread_num] = var2;
10844 // If more than one thread, compute using Work-Efficient prefix sum
10845 // the inclusive parallel scan of the rpriva array.
10846 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10851 num_threadsu = (unsigned int) num_threads;
10852 thread_numup1 = (unsigned int) thread_num + 1;
10855 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10859 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10864 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10865 mul = REALPART_EXPR <cplx>;
10866 ovf = IMAGPART_EXPR <cplx>;
10867 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10870 andvm1 = andv + 4294967295;
10872 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10874 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10875 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10876 rpriva[l] = rpriva[l - k] + rpriva[l];
10878 if (down == 0) goto <D.2121>; else goto <D.2122>;
10886 if (k != 0) goto <D.2108>; else goto <D.2103>;
10888 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10890 // For UDRs this is UDR init or copy from var3.
10894 var2 = rpriva[thread_num - 1];
10897 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10898 reduction(inscan,+:r) private(i)
10899 for (i = 0; i < n; i = i + 1)
10902 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10903 r = var2 + rprivb[ivar];
10906 // This is the scan phase from user code.
10908 // Plus a bump of the iterator.
10914 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10915 struct omp_for_data
*fd
, omp_context
*ctx
)
10917 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10918 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10920 gimple_seq body
= gimple_omp_body (stmt
);
10921 gimple_stmt_iterator input1_gsi
= gsi_none ();
10922 struct walk_stmt_info wi
;
10923 memset (&wi
, 0, sizeof (wi
));
10924 wi
.val_only
= true;
10925 wi
.info
= (void *) &input1_gsi
;
10926 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10927 gcc_assert (!gsi_end_p (input1_gsi
));
10929 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10930 gimple_stmt_iterator gsi
= input1_gsi
;
10932 gimple_stmt_iterator scan1_gsi
= gsi
;
10933 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10934 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10936 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10937 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10938 gimple_omp_set_body (input_stmt1
, NULL
);
10939 gimple_omp_set_body (scan_stmt1
, NULL
);
10940 gimple_omp_set_body (stmt
, NULL
);
10942 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10943 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10944 gimple_omp_set_body (stmt
, body
);
10945 gimple_omp_set_body (input_stmt1
, input_body
);
10947 gimple_stmt_iterator input2_gsi
= gsi_none ();
10948 memset (&wi
, 0, sizeof (wi
));
10949 wi
.val_only
= true;
10950 wi
.info
= (void *) &input2_gsi
;
10951 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10952 gcc_assert (!gsi_end_p (input2_gsi
));
10954 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10957 gimple_stmt_iterator scan2_gsi
= gsi
;
10958 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10959 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10960 gimple_omp_set_body (scan_stmt2
, scan_body
);
10962 gimple_stmt_iterator input3_gsi
= gsi_none ();
10963 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10964 gimple_stmt_iterator input4_gsi
= gsi_none ();
10965 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10966 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10967 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10968 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10971 memset (&wi
, 0, sizeof (wi
));
10972 wi
.val_only
= true;
10973 wi
.info
= (void *) &input3_gsi
;
10974 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10975 gcc_assert (!gsi_end_p (input3_gsi
));
10977 input_stmt3
= gsi_stmt (input3_gsi
);
10981 scan_stmt3
= gsi_stmt (gsi
);
10982 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10984 memset (&wi
, 0, sizeof (wi
));
10985 wi
.val_only
= true;
10986 wi
.info
= (void *) &input4_gsi
;
10987 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10988 gcc_assert (!gsi_end_p (input4_gsi
));
10990 input_stmt4
= gsi_stmt (input4_gsi
);
10994 scan_stmt4
= gsi_stmt (gsi
);
10995 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10997 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10998 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
11001 tree num_threads
= create_tmp_var (integer_type_node
);
11002 tree thread_num
= create_tmp_var (integer_type_node
);
11003 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
11004 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
11005 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
11006 gimple_call_set_lhs (g
, num_threads
);
11007 gimple_seq_add_stmt (body_p
, g
);
11008 g
= gimple_build_call (threadnum_decl
, 0);
11009 gimple_call_set_lhs (g
, thread_num
);
11010 gimple_seq_add_stmt (body_p
, g
);
11012 tree ivar
= create_tmp_var (sizetype
);
11013 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
11014 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
11015 tree k
= create_tmp_var (unsigned_type_node
);
11016 tree l
= create_tmp_var (unsigned_type_node
);
11018 gimple_seq clist
= NULL
, mdlist
= NULL
;
11019 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
11020 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
11021 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
11022 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
11023 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11024 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11025 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
11027 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11028 tree var
= OMP_CLAUSE_DECL (c
);
11029 tree new_var
= lookup_decl (var
, ctx
);
11030 tree var3
= NULL_TREE
;
11031 tree new_vard
= new_var
;
11032 if (omp_privatize_by_reference (var
))
11033 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
11034 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11036 var3
= maybe_lookup_decl (new_vard
, ctx
);
11037 if (var3
== new_vard
)
11041 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
11042 tree rpriva
= create_tmp_var (ptype
);
11043 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11044 OMP_CLAUSE_DECL (nc
) = rpriva
;
11046 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11048 tree rprivb
= create_tmp_var (ptype
);
11049 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11050 OMP_CLAUSE_DECL (nc
) = rprivb
;
11051 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
11053 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11055 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
11056 if (new_vard
!= new_var
)
11057 TREE_ADDRESSABLE (var2
) = 1;
11058 gimple_add_tmp_var (var2
);
11060 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
11061 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11062 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11063 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11064 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11066 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
11067 thread_num
, integer_minus_one_node
);
11068 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11069 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11070 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11071 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11072 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11074 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
11075 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11076 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11077 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11078 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11080 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
11081 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11082 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11083 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11084 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11085 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11087 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
11088 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11089 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
11090 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11092 tree var4
= is_for_simd
? new_var
: var2
;
11093 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
11096 var5
= lookup_decl (var
, input_simd_ctx
);
11097 var6
= lookup_decl (var
, scan_simd_ctx
);
11098 if (new_vard
!= new_var
)
11100 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11101 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11104 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11106 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11109 x
= lang_hooks
.decls
.omp_clause_default_ctor
11110 (c
, var2
, build_outer_var_ref (var
, ctx
));
11112 gimplify_and_add (x
, &clist
);
11114 x
= build_outer_var_ref (var
, ctx
);
11115 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11117 gimplify_and_add (x
, &thr01_list
);
11119 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11120 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11123 x
= unshare_expr (var4
);
11124 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11125 gimplify_and_add (x
, &thrn1_list
);
11126 x
= unshare_expr (var4
);
11127 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11128 gimplify_and_add (x
, &thr02_list
);
11130 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11132 /* Otherwise, assign to it the identity element. */
11133 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11134 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11137 if (new_vard
!= new_var
)
11138 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11139 SET_DECL_VALUE_EXPR (new_vard
, val
);
11140 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11142 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11143 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11144 lower_omp (&tseq
, ctx
);
11145 gimple_seq_add_seq (&thrn1_list
, tseq
);
11146 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11147 lower_omp (&tseq
, ctx
);
11148 gimple_seq_add_seq (&thr02_list
, tseq
);
11149 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11150 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11151 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11153 SET_DECL_VALUE_EXPR (new_vard
, y
);
11156 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11157 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11161 x
= unshare_expr (var4
);
11162 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11163 gimplify_and_add (x
, &thrn2_list
);
11167 x
= unshare_expr (rprivb_ref
);
11168 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11169 gimplify_and_add (x
, &scan1_list
);
11173 if (ctx
->scan_exclusive
)
11175 x
= unshare_expr (rprivb_ref
);
11176 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11177 gimplify_and_add (x
, &scan1_list
);
11180 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11181 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11182 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11183 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11184 lower_omp (&tseq
, ctx
);
11185 gimple_seq_add_seq (&scan1_list
, tseq
);
11187 if (ctx
->scan_inclusive
)
11189 x
= unshare_expr (rprivb_ref
);
11190 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11191 gimplify_and_add (x
, &scan1_list
);
11195 x
= unshare_expr (rpriva_ref
);
11196 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11197 unshare_expr (var4
));
11198 gimplify_and_add (x
, &mdlist
);
11200 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11201 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11202 gimplify_and_add (x
, &input2_list
);
11205 if (new_vard
!= new_var
)
11206 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11208 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11209 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11210 SET_DECL_VALUE_EXPR (new_vard
, val
);
11211 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11214 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11215 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11218 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11219 lower_omp (&tseq
, ctx
);
11221 SET_DECL_VALUE_EXPR (new_vard
, y
);
11224 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11225 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11229 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11230 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11231 lower_omp (&tseq
, ctx
);
11233 gimple_seq_add_seq (&input2_list
, tseq
);
11235 x
= build_outer_var_ref (var
, ctx
);
11236 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11237 gimplify_and_add (x
, &last_list
);
11239 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11240 gimplify_and_add (x
, &reduc_list
);
11241 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11242 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11244 if (new_vard
!= new_var
)
11245 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11246 SET_DECL_VALUE_EXPR (new_vard
, val
);
11247 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11248 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11249 lower_omp (&tseq
, ctx
);
11250 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11251 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11252 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11254 SET_DECL_VALUE_EXPR (new_vard
, y
);
11257 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11258 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11260 gimple_seq_add_seq (&reduc_list
, tseq
);
11261 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11262 gimplify_and_add (x
, &reduc_list
);
11264 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11266 gimplify_and_add (x
, dlist
);
11270 x
= build_outer_var_ref (var
, ctx
);
11271 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11273 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11274 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11276 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11278 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11280 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11281 if (code
== MINUS_EXPR
)
11285 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11288 if (ctx
->scan_exclusive
)
11289 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11291 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11292 gimplify_assign (var2
, x
, &scan1_list
);
11293 if (ctx
->scan_inclusive
)
11294 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11298 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11301 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11302 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11304 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11307 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11308 unshare_expr (rprival_ref
));
11309 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11313 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11314 gimple_seq_add_stmt (&scan1_list
, g
);
11315 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11316 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11317 ? scan_stmt4
: scan_stmt2
), g
);
11319 tree controlb
= create_tmp_var (boolean_type_node
);
11320 tree controlp
= create_tmp_var (ptr_type_node
);
11321 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11322 OMP_CLAUSE_DECL (nc
) = controlb
;
11323 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11325 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11326 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11327 OMP_CLAUSE_DECL (nc
) = controlp
;
11328 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11330 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11331 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11332 OMP_CLAUSE_DECL (nc
) = controlb
;
11333 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11335 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11336 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11337 OMP_CLAUSE_DECL (nc
) = controlp
;
11338 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11340 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11342 *cp1
= gimple_omp_for_clauses (stmt
);
11343 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11344 *cp2
= gimple_omp_for_clauses (new_stmt
);
11345 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11349 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11350 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11352 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11354 gsi_remove (&input3_gsi
, true);
11355 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11357 gsi_remove (&scan3_gsi
, true);
11358 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11360 gsi_remove (&input4_gsi
, true);
11361 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11363 gsi_remove (&scan4_gsi
, true);
11367 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11368 gimple_omp_set_body (input_stmt2
, input2_list
);
11371 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11373 gsi_remove (&input1_gsi
, true);
11374 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11376 gsi_remove (&scan1_gsi
, true);
11377 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11379 gsi_remove (&input2_gsi
, true);
11380 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11382 gsi_remove (&scan2_gsi
, true);
11384 gimple_seq_add_seq (body_p
, clist
);
11386 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11387 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11388 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11389 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11390 gimple_seq_add_stmt (body_p
, g
);
11391 g
= gimple_build_label (lab1
);
11392 gimple_seq_add_stmt (body_p
, g
);
11393 gimple_seq_add_seq (body_p
, thr01_list
);
11394 g
= gimple_build_goto (lab3
);
11395 gimple_seq_add_stmt (body_p
, g
);
11396 g
= gimple_build_label (lab2
);
11397 gimple_seq_add_stmt (body_p
, g
);
11398 gimple_seq_add_seq (body_p
, thrn1_list
);
11399 g
= gimple_build_label (lab3
);
11400 gimple_seq_add_stmt (body_p
, g
);
11402 g
= gimple_build_assign (ivar
, size_zero_node
);
11403 gimple_seq_add_stmt (body_p
, g
);
11405 gimple_seq_add_stmt (body_p
, stmt
);
11406 gimple_seq_add_seq (body_p
, body
);
11407 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11410 g
= gimple_build_omp_return (true);
11411 gimple_seq_add_stmt (body_p
, g
);
11412 gimple_seq_add_seq (body_p
, mdlist
);
11414 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11415 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11416 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11417 gimple_seq_add_stmt (body_p
, g
);
11418 g
= gimple_build_label (lab1
);
11419 gimple_seq_add_stmt (body_p
, g
);
11421 g
= omp_build_barrier (NULL
);
11422 gimple_seq_add_stmt (body_p
, g
);
11424 tree down
= create_tmp_var (unsigned_type_node
);
11425 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11426 gimple_seq_add_stmt (body_p
, g
);
11428 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11429 gimple_seq_add_stmt (body_p
, g
);
11431 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11432 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11433 gimple_seq_add_stmt (body_p
, g
);
11435 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11436 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11437 gimple_seq_add_stmt (body_p
, g
);
11439 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11440 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11441 build_int_cst (unsigned_type_node
, 1));
11442 gimple_seq_add_stmt (body_p
, g
);
11444 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11445 g
= gimple_build_label (lab3
);
11446 gimple_seq_add_stmt (body_p
, g
);
11448 tree twok
= create_tmp_var (unsigned_type_node
);
11449 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11450 gimple_seq_add_stmt (body_p
, g
);
11452 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11453 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11454 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11455 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11456 gimple_seq_add_stmt (body_p
, g
);
11457 g
= gimple_build_label (lab4
);
11458 gimple_seq_add_stmt (body_p
, g
);
11459 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11460 gimple_seq_add_stmt (body_p
, g
);
11461 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11462 gimple_seq_add_stmt (body_p
, g
);
11464 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11465 gimple_seq_add_stmt (body_p
, g
);
11466 g
= gimple_build_label (lab6
);
11467 gimple_seq_add_stmt (body_p
, g
);
11469 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11470 gimple_seq_add_stmt (body_p
, g
);
11472 g
= gimple_build_label (lab5
);
11473 gimple_seq_add_stmt (body_p
, g
);
11475 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11476 gimple_seq_add_stmt (body_p
, g
);
11478 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11479 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11480 gimple_call_set_lhs (g
, cplx
);
11481 gimple_seq_add_stmt (body_p
, g
);
11482 tree mul
= create_tmp_var (unsigned_type_node
);
11483 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11484 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11485 gimple_seq_add_stmt (body_p
, g
);
11486 tree ovf
= create_tmp_var (unsigned_type_node
);
11487 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11488 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11489 gimple_seq_add_stmt (body_p
, g
);
11491 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11492 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11493 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11495 gimple_seq_add_stmt (body_p
, g
);
11496 g
= gimple_build_label (lab7
);
11497 gimple_seq_add_stmt (body_p
, g
);
11499 tree andv
= create_tmp_var (unsigned_type_node
);
11500 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11501 gimple_seq_add_stmt (body_p
, g
);
11502 tree andvm1
= create_tmp_var (unsigned_type_node
);
11503 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11504 build_minus_one_cst (unsigned_type_node
));
11505 gimple_seq_add_stmt (body_p
, g
);
11507 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11508 gimple_seq_add_stmt (body_p
, g
);
11510 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11511 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11512 gimple_seq_add_stmt (body_p
, g
);
11513 g
= gimple_build_label (lab9
);
11514 gimple_seq_add_stmt (body_p
, g
);
11515 gimple_seq_add_seq (body_p
, reduc_list
);
11516 g
= gimple_build_label (lab8
);
11517 gimple_seq_add_stmt (body_p
, g
);
11519 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11520 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11521 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11522 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11524 gimple_seq_add_stmt (body_p
, g
);
11525 g
= gimple_build_label (lab10
);
11526 gimple_seq_add_stmt (body_p
, g
);
11527 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11528 gimple_seq_add_stmt (body_p
, g
);
11529 g
= gimple_build_goto (lab12
);
11530 gimple_seq_add_stmt (body_p
, g
);
11531 g
= gimple_build_label (lab11
);
11532 gimple_seq_add_stmt (body_p
, g
);
11533 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11534 gimple_seq_add_stmt (body_p
, g
);
11535 g
= gimple_build_label (lab12
);
11536 gimple_seq_add_stmt (body_p
, g
);
11538 g
= omp_build_barrier (NULL
);
11539 gimple_seq_add_stmt (body_p
, g
);
11541 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11543 gimple_seq_add_stmt (body_p
, g
);
11545 g
= gimple_build_label (lab2
);
11546 gimple_seq_add_stmt (body_p
, g
);
11548 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11549 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11550 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11551 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11552 gimple_seq_add_stmt (body_p
, g
);
11553 g
= gimple_build_label (lab1
);
11554 gimple_seq_add_stmt (body_p
, g
);
11555 gimple_seq_add_seq (body_p
, thr02_list
);
11556 g
= gimple_build_goto (lab3
);
11557 gimple_seq_add_stmt (body_p
, g
);
11558 g
= gimple_build_label (lab2
);
11559 gimple_seq_add_stmt (body_p
, g
);
11560 gimple_seq_add_seq (body_p
, thrn2_list
);
11561 g
= gimple_build_label (lab3
);
11562 gimple_seq_add_stmt (body_p
, g
);
11564 g
= gimple_build_assign (ivar
, size_zero_node
);
11565 gimple_seq_add_stmt (body_p
, g
);
11566 gimple_seq_add_stmt (body_p
, new_stmt
);
11567 gimple_seq_add_seq (body_p
, new_body
);
11569 gimple_seq new_dlist
= NULL
;
11570 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11571 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11572 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11573 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11574 integer_minus_one_node
);
11575 gimple_seq_add_stmt (&new_dlist
, g
);
11576 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11577 gimple_seq_add_stmt (&new_dlist
, g
);
11578 g
= gimple_build_label (lab1
);
11579 gimple_seq_add_stmt (&new_dlist
, g
);
11580 gimple_seq_add_seq (&new_dlist
, last_list
);
11581 g
= gimple_build_label (lab2
);
11582 gimple_seq_add_stmt (&new_dlist
, g
);
11583 gimple_seq_add_seq (&new_dlist
, *dlist
);
11584 *dlist
= new_dlist
;
11587 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11588 the addresses of variables to be made private at the surrounding
11589 parallelism level. Such functions appear in the gimple code stream in two
11590 forms, e.g. for a partitioned loop:
11592 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11593 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11594 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11595 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11597 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11598 not as part of a HEAD_MARK sequence:
11600 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11602 For such stand-alone appearances, the 3rd argument is always 0, denoting
11603 gang partitioning. */
11606 lower_oacc_private_marker (omp_context
*ctx
)
11608 if (ctx
->oacc_privatization_candidates
.length () == 0)
11611 auto_vec
<tree
, 5> args
;
11613 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11614 args
.quick_push (integer_zero_node
);
11615 args
.quick_push (integer_minus_one_node
);
11619 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11621 gcc_checking_assert (TREE_ADDRESSABLE (decl
));
11622 tree addr
= build_fold_addr_expr (decl
);
11623 args
.safe_push (addr
);
11626 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11629 /* Lower code for an OMP loop directive. */
11632 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11634 tree
*rhs_p
, block
;
11635 struct omp_for_data fd
, *fdp
= NULL
;
11636 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11638 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11639 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11640 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11643 push_gimplify_context ();
11645 if (is_gimple_omp_oacc (ctx
->stmt
))
11646 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11648 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11650 block
= make_node (BLOCK
);
11651 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11652 /* Replace at gsi right away, so that 'stmt' is no member
11653 of a sequence anymore as we're going to add to a different
11655 gsi_replace (gsi_p
, new_stmt
, true);
11657 /* Move declaration of temporaries in the loop body before we make
11659 omp_for_body
= gimple_omp_body (stmt
);
11660 if (!gimple_seq_empty_p (omp_for_body
)
11661 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11664 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11665 tree vars
= gimple_bind_vars (inner_bind
);
11666 if (is_gimple_omp_oacc (ctx
->stmt
))
11667 oacc_privatization_scan_decl_chain (ctx
, vars
);
11668 gimple_bind_append_vars (new_stmt
, vars
);
11669 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11670 keep them on the inner_bind and it's block. */
11671 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11672 if (gimple_bind_block (inner_bind
))
11673 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11676 if (gimple_omp_for_combined_into_p (stmt
))
11678 omp_extract_for_data (stmt
, &fd
, NULL
);
11681 /* We need two temporaries with fd.loop.v type (istart/iend)
11682 and then (fd.collapse - 1) temporaries with the same
11683 type for count2 ... countN-1 vars if not constant. */
11685 tree type
= fd
.iter_type
;
11686 if (fd
.collapse
> 1
11687 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11688 count
+= fd
.collapse
- 1;
11690 tree type2
= NULL_TREE
;
11692 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11693 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11694 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11696 tree clauses
= *pc
;
11697 if (fd
.collapse
> 1
11699 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11700 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11701 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11702 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11704 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11705 type2
= TREE_TYPE (v
);
11711 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11712 OMP_CLAUSE__LOOPTEMP_
);
11713 if (ctx
->simt_stmt
)
11714 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11715 OMP_CLAUSE__LOOPTEMP_
);
11716 for (i
= 0; i
< count
+ count2
; i
++)
11721 gcc_assert (outerc
);
11722 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11723 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11724 OMP_CLAUSE__LOOPTEMP_
);
11728 /* If there are 2 adjacent SIMD stmts, one with _simt_
11729 clause, another without, make sure they have the same
11730 decls in _looptemp_ clauses, because the outer stmt
11731 they are combined into will look up just one inner_stmt. */
11732 if (ctx
->simt_stmt
)
11733 temp
= OMP_CLAUSE_DECL (simtc
);
11735 temp
= create_tmp_var (i
>= count
? type2
: type
);
11736 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11738 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11739 OMP_CLAUSE_DECL (*pc
) = temp
;
11740 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11741 if (ctx
->simt_stmt
)
11742 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11743 OMP_CLAUSE__LOOPTEMP_
);
11748 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11752 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11753 OMP_CLAUSE_REDUCTION
);
11754 tree rtmp
= NULL_TREE
;
11757 tree type
= build_pointer_type (pointer_sized_int_node
);
11758 tree temp
= create_tmp_var (type
);
11759 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11760 OMP_CLAUSE_DECL (c
) = temp
;
11761 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11762 gimple_omp_for_set_clauses (stmt
, c
);
11763 lower_omp_task_reductions (ctx
, OMP_FOR
,
11764 gimple_omp_for_clauses (stmt
),
11765 &tred_ilist
, &tred_dlist
);
11767 rtmp
= make_ssa_name (type
);
11768 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11771 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11774 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11776 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11777 gimple_omp_for_pre_body (stmt
));
11779 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11781 gcall
*private_marker
= NULL
;
11782 if (is_gimple_omp_oacc (ctx
->stmt
)
11783 && !gimple_seq_empty_p (omp_for_body
))
11784 private_marker
= lower_oacc_private_marker (ctx
);
11786 /* Lower the header expressions. At this point, we can assume that
11787 the header is of the form:
11789 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11791 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11792 using the .omp_data_s mapping, if needed. */
11793 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11795 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11796 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11798 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11799 TREE_VEC_ELT (*rhs_p
, 1)
11800 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11801 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11802 TREE_VEC_ELT (*rhs_p
, 2)
11803 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11805 else if (!is_gimple_min_invariant (*rhs_p
))
11806 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11807 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11808 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11810 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11811 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11813 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11814 TREE_VEC_ELT (*rhs_p
, 1)
11815 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11816 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11817 TREE_VEC_ELT (*rhs_p
, 2)
11818 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11820 else if (!is_gimple_min_invariant (*rhs_p
))
11821 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11822 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11823 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11825 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11826 if (!is_gimple_min_invariant (*rhs_p
))
11827 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11830 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11832 gimple_seq_add_seq (&body
, cnt_list
);
11834 /* Once lowered, extract the bounds and clauses. */
11835 omp_extract_for_data (stmt
, &fd
, NULL
);
11837 if (is_gimple_omp_oacc (ctx
->stmt
)
11838 && !ctx_in_oacc_kernels_region (ctx
))
11839 lower_oacc_head_tail (gimple_location (stmt
),
11840 gimple_omp_for_clauses (stmt
), private_marker
,
11841 &oacc_head
, &oacc_tail
, ctx
);
11843 /* Add OpenACC partitioning and reduction markers just before the loop. */
11845 gimple_seq_add_seq (&body
, oacc_head
);
11847 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11849 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11850 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11851 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11852 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11854 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11855 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11856 OMP_CLAUSE_LINEAR_STEP (c
)
11857 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11861 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11862 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11863 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11866 gimple_seq_add_stmt (&body
, stmt
);
11867 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11870 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11873 /* After the loop, add exit clauses. */
11874 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11878 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11879 gcall
*g
= gimple_build_call (fndecl
, 0);
11880 gimple_seq_add_stmt (&body
, g
);
11881 gimple_seq_add_seq (&body
, clist
);
11882 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11883 g
= gimple_build_call (fndecl
, 0);
11884 gimple_seq_add_stmt (&body
, g
);
11887 if (ctx
->cancellable
)
11888 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11890 gimple_seq_add_seq (&body
, dlist
);
11894 gimple_seq_add_seq (&tred_ilist
, body
);
11898 body
= maybe_catch_exception (body
);
11900 /* Region exit marker goes at the end of the loop body. */
11901 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11902 gimple_seq_add_stmt (&body
, g
);
11904 gimple_seq_add_seq (&body
, tred_dlist
);
11906 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11909 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11911 /* Add OpenACC joining and reduction markers just after the loop. */
11913 gimple_seq_add_seq (&body
, oacc_tail
);
11915 pop_gimplify_context (new_stmt
);
11917 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11918 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11919 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11920 if (BLOCK_VARS (block
))
11921 TREE_USED (block
) = 1;
11923 gimple_bind_set_body (new_stmt
, body
);
11924 gimple_omp_set_body (stmt
, NULL
);
11925 gimple_omp_for_set_pre_body (stmt
, NULL
);
11928 /* Callback for walk_stmts. Check if the current statement only contains
11929 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11932 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11933 bool *handled_ops_p
,
11934 struct walk_stmt_info
*wi
)
11936 int *info
= (int *) wi
->info
;
11937 gimple
*stmt
= gsi_stmt (*gsi_p
);
11939 *handled_ops_p
= true;
11940 switch (gimple_code (stmt
))
11946 case GIMPLE_OMP_FOR
:
11947 case GIMPLE_OMP_SECTIONS
:
11948 *info
= *info
== 0 ? 1 : -1;
11957 struct omp_taskcopy_context
11959 /* This field must be at the beginning, as we do "inheritance": Some
11960 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11961 receive a copy_body_data pointer that is up-casted to an
11962 omp_context pointer. */
11968 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11970 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11972 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11973 return create_tmp_var (TREE_TYPE (var
));
11979 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11981 tree name
, new_fields
= NULL
, type
, f
;
11983 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11984 name
= DECL_NAME (TYPE_NAME (orig_type
));
11985 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11986 TYPE_DECL
, name
, type
);
11987 TYPE_NAME (type
) = name
;
11989 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
11991 tree new_f
= copy_node (f
);
11992 DECL_CONTEXT (new_f
) = type
;
11993 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
11994 TREE_CHAIN (new_f
) = new_fields
;
11995 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11996 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11997 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
11999 new_fields
= new_f
;
12000 tcctx
->cb
.decl_map
->put (f
, new_f
);
12002 TYPE_FIELDS (type
) = nreverse (new_fields
);
12003 layout_type (type
);
12007 /* Create task copyfn. */
12010 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
12012 struct function
*child_cfun
;
12013 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
12014 tree record_type
, srecord_type
, bind
, list
;
12015 bool record_needs_remap
= false, srecord_needs_remap
= false;
12017 struct omp_taskcopy_context tcctx
;
12018 location_t loc
= gimple_location (task_stmt
);
12019 size_t looptempno
= 0;
12021 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
12022 task_cpyfns
.safe_push (task_stmt
);
12023 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
12024 gcc_assert (child_cfun
->cfg
== NULL
);
12025 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
12027 /* Reset DECL_CONTEXT on function arguments. */
12028 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
12029 DECL_CONTEXT (t
) = child_fn
;
12031 /* Populate the function. */
12032 push_gimplify_context ();
12033 push_cfun (child_cfun
);
12035 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12036 TREE_SIDE_EFFECTS (bind
) = 1;
12038 DECL_SAVED_TREE (child_fn
) = bind
;
12039 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
12041 /* Remap src and dst argument types if needed. */
12042 record_type
= ctx
->record_type
;
12043 srecord_type
= ctx
->srecord_type
;
12044 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
12045 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12047 record_needs_remap
= true;
12050 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
12051 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12053 srecord_needs_remap
= true;
12057 if (record_needs_remap
|| srecord_needs_remap
)
12059 memset (&tcctx
, '\0', sizeof (tcctx
));
12060 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
12061 tcctx
.cb
.dst_fn
= child_fn
;
12062 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
12063 gcc_checking_assert (tcctx
.cb
.src_node
);
12064 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
12065 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
12066 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
12067 tcctx
.cb
.eh_lp_nr
= 0;
12068 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
12069 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
12072 if (record_needs_remap
)
12073 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
12074 if (srecord_needs_remap
)
12075 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
12078 tcctx
.cb
.decl_map
= NULL
;
12080 arg
= DECL_ARGUMENTS (child_fn
);
12081 TREE_TYPE (arg
) = build_pointer_type (record_type
);
12082 sarg
= DECL_CHAIN (arg
);
12083 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
12085 /* First pass: initialize temporaries used in record_type and srecord_type
12086 sizes and field offsets. */
12087 if (tcctx
.cb
.decl_map
)
12088 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12089 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12093 decl
= OMP_CLAUSE_DECL (c
);
12094 p
= tcctx
.cb
.decl_map
->get (decl
);
12097 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12098 sf
= (tree
) n
->value
;
12099 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12100 src
= build_simple_mem_ref_loc (loc
, sarg
);
12101 src
= omp_build_component_ref (src
, sf
);
12102 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12103 append_to_statement_list (t
, &list
);
12106 /* Second pass: copy shared var pointers and copy construct non-VLA
12107 firstprivate vars. */
12108 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12109 switch (OMP_CLAUSE_CODE (c
))
12111 splay_tree_key key
;
12112 case OMP_CLAUSE_SHARED
:
12113 decl
= OMP_CLAUSE_DECL (c
);
12114 key
= (splay_tree_key
) decl
;
12115 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12116 key
= (splay_tree_key
) &DECL_UID (decl
);
12117 n
= splay_tree_lookup (ctx
->field_map
, key
);
12120 f
= (tree
) n
->value
;
12121 if (tcctx
.cb
.decl_map
)
12122 f
= *tcctx
.cb
.decl_map
->get (f
);
12123 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12124 sf
= (tree
) n
->value
;
12125 if (tcctx
.cb
.decl_map
)
12126 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12127 src
= build_simple_mem_ref_loc (loc
, sarg
);
12128 src
= omp_build_component_ref (src
, sf
);
12129 dst
= build_simple_mem_ref_loc (loc
, arg
);
12130 dst
= omp_build_component_ref (dst
, f
);
12131 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12132 append_to_statement_list (t
, &list
);
12134 case OMP_CLAUSE_REDUCTION
:
12135 case OMP_CLAUSE_IN_REDUCTION
:
12136 decl
= OMP_CLAUSE_DECL (c
);
12137 if (TREE_CODE (decl
) == MEM_REF
)
12139 decl
= TREE_OPERAND (decl
, 0);
12140 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12141 decl
= TREE_OPERAND (decl
, 0);
12142 if (TREE_CODE (decl
) == INDIRECT_REF
12143 || TREE_CODE (decl
) == ADDR_EXPR
)
12144 decl
= TREE_OPERAND (decl
, 0);
12146 key
= (splay_tree_key
) decl
;
12147 n
= splay_tree_lookup (ctx
->field_map
, key
);
12150 f
= (tree
) n
->value
;
12151 if (tcctx
.cb
.decl_map
)
12152 f
= *tcctx
.cb
.decl_map
->get (f
);
12153 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12154 sf
= (tree
) n
->value
;
12155 if (tcctx
.cb
.decl_map
)
12156 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12157 src
= build_simple_mem_ref_loc (loc
, sarg
);
12158 src
= omp_build_component_ref (src
, sf
);
12159 if (decl
!= OMP_CLAUSE_DECL (c
)
12160 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12161 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12162 src
= build_simple_mem_ref_loc (loc
, src
);
12163 dst
= build_simple_mem_ref_loc (loc
, arg
);
12164 dst
= omp_build_component_ref (dst
, f
);
12165 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12166 append_to_statement_list (t
, &list
);
12168 case OMP_CLAUSE__LOOPTEMP_
:
12169 /* Fields for first two _looptemp_ clauses are initialized by
12170 GOMP_taskloop*, the rest are handled like firstprivate. */
12171 if (looptempno
< 2)
12177 case OMP_CLAUSE__REDUCTEMP_
:
12178 case OMP_CLAUSE_FIRSTPRIVATE
:
12179 decl
= OMP_CLAUSE_DECL (c
);
12180 if (is_variable_sized (decl
))
12182 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12185 f
= (tree
) n
->value
;
12186 if (tcctx
.cb
.decl_map
)
12187 f
= *tcctx
.cb
.decl_map
->get (f
);
12188 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12191 sf
= (tree
) n
->value
;
12192 if (tcctx
.cb
.decl_map
)
12193 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12194 src
= build_simple_mem_ref_loc (loc
, sarg
);
12195 src
= omp_build_component_ref (src
, sf
);
12196 if (use_pointer_for_field (decl
, NULL
)
12197 || omp_privatize_by_reference (decl
))
12198 src
= build_simple_mem_ref_loc (loc
, src
);
12202 dst
= build_simple_mem_ref_loc (loc
, arg
);
12203 dst
= omp_build_component_ref (dst
, f
);
12204 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12205 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12208 if (ctx
->allocate_map
)
12209 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12211 tree allocator
= *allocatorp
;
12212 HOST_WIDE_INT ialign
= 0;
12213 if (TREE_CODE (allocator
) == TREE_LIST
)
12215 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12216 allocator
= TREE_PURPOSE (allocator
);
12218 if (TREE_CODE (allocator
) != INTEGER_CST
)
12220 n
= splay_tree_lookup (ctx
->sfield_map
,
12221 (splay_tree_key
) allocator
);
12222 allocator
= (tree
) n
->value
;
12223 if (tcctx
.cb
.decl_map
)
12224 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12225 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12226 allocator
= omp_build_component_ref (a
, allocator
);
12228 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12229 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12230 tree align
= build_int_cst (size_type_node
,
12232 DECL_ALIGN_UNIT (decl
)));
12233 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12234 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12236 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12237 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12238 append_to_statement_list (t
, &list
);
12239 dst
= build_simple_mem_ref_loc (loc
, dst
);
12241 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12243 append_to_statement_list (t
, &list
);
12245 case OMP_CLAUSE_PRIVATE
:
12246 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12248 decl
= OMP_CLAUSE_DECL (c
);
12249 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12250 f
= (tree
) n
->value
;
12251 if (tcctx
.cb
.decl_map
)
12252 f
= *tcctx
.cb
.decl_map
->get (f
);
12253 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12256 sf
= (tree
) n
->value
;
12257 if (tcctx
.cb
.decl_map
)
12258 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12259 src
= build_simple_mem_ref_loc (loc
, sarg
);
12260 src
= omp_build_component_ref (src
, sf
);
12261 if (use_pointer_for_field (decl
, NULL
))
12262 src
= build_simple_mem_ref_loc (loc
, src
);
12266 dst
= build_simple_mem_ref_loc (loc
, arg
);
12267 dst
= omp_build_component_ref (dst
, f
);
12268 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12269 append_to_statement_list (t
, &list
);
12275 /* Last pass: handle VLA firstprivates. */
12276 if (tcctx
.cb
.decl_map
)
12277 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12278 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12282 decl
= OMP_CLAUSE_DECL (c
);
12283 if (!is_variable_sized (decl
))
12285 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12288 f
= (tree
) n
->value
;
12289 f
= *tcctx
.cb
.decl_map
->get (f
);
12290 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12291 ind
= DECL_VALUE_EXPR (decl
);
12292 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12293 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12294 n
= splay_tree_lookup (ctx
->sfield_map
,
12295 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12296 sf
= (tree
) n
->value
;
12297 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12298 src
= build_simple_mem_ref_loc (loc
, sarg
);
12299 src
= omp_build_component_ref (src
, sf
);
12300 src
= build_simple_mem_ref_loc (loc
, src
);
12301 dst
= build_simple_mem_ref_loc (loc
, arg
);
12302 dst
= omp_build_component_ref (dst
, f
);
12303 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12304 append_to_statement_list (t
, &list
);
12305 n
= splay_tree_lookup (ctx
->field_map
,
12306 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12307 df
= (tree
) n
->value
;
12308 df
= *tcctx
.cb
.decl_map
->get (df
);
12309 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12310 ptr
= omp_build_component_ref (ptr
, df
);
12311 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12312 build_fold_addr_expr_loc (loc
, dst
));
12313 append_to_statement_list (t
, &list
);
12316 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12317 append_to_statement_list (t
, &list
);
12319 if (tcctx
.cb
.decl_map
)
12320 delete tcctx
.cb
.decl_map
;
12321 pop_gimplify_context (NULL
);
12322 BIND_EXPR_BODY (bind
) = list
;
12327 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12331 size_t cnt
[5] = { 0, 0, 0, 0, 0 }, idx
= 2, i
;
12333 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12334 gcc_assert (clauses
);
12335 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12336 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12337 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12339 case OMP_CLAUSE_DEPEND_LAST
:
12340 /* Lowering already done at gimplification. */
12342 case OMP_CLAUSE_DEPEND_IN
:
12345 case OMP_CLAUSE_DEPEND_OUT
:
12346 case OMP_CLAUSE_DEPEND_INOUT
:
12349 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12352 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12355 case OMP_CLAUSE_DEPEND_INOUTSET
:
12358 case OMP_CLAUSE_DEPEND_SOURCE
:
12359 case OMP_CLAUSE_DEPEND_SINK
:
12362 gcc_unreachable ();
12364 if (cnt
[1] || cnt
[3] || cnt
[4])
12366 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3] + cnt
[4];
12367 size_t inoutidx
= total
+ idx
;
12368 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
+ 2 * cnt
[4]);
12369 tree array
= create_tmp_var (type
);
12370 TREE_ADDRESSABLE (array
) = 1;
12371 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12375 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12376 gimple_seq_add_stmt (iseq
, g
);
12377 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12380 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12381 gimple_seq_add_stmt (iseq
, g
);
12382 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12384 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12385 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12386 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12387 gimple_seq_add_stmt (iseq
, g
);
12389 for (i
= 0; i
< 5; i
++)
12393 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12394 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12398 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12400 case OMP_CLAUSE_DEPEND_IN
:
12404 case OMP_CLAUSE_DEPEND_OUT
:
12405 case OMP_CLAUSE_DEPEND_INOUT
:
12409 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12413 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12417 case OMP_CLAUSE_DEPEND_INOUTSET
:
12422 gcc_unreachable ();
12424 tree t
= OMP_CLAUSE_DECL (c
);
12427 t
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12428 size_int (inoutidx
), NULL_TREE
, NULL_TREE
);
12429 t
= build_fold_addr_expr (t
);
12432 t
= fold_convert (ptr_type_node
, t
);
12433 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12434 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12435 NULL_TREE
, NULL_TREE
);
12436 g
= gimple_build_assign (r
, t
);
12437 gimple_seq_add_stmt (iseq
, g
);
12441 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12442 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12443 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_INOUTSET
)
12445 tree t
= OMP_CLAUSE_DECL (c
);
12446 t
= fold_convert (ptr_type_node
, t
);
12447 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12448 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12449 NULL_TREE
, NULL_TREE
);
12450 g
= gimple_build_assign (r
, t
);
12451 gimple_seq_add_stmt (iseq
, g
);
12452 t
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
12453 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12454 NULL_TREE
, NULL_TREE
);
12455 g
= gimple_build_assign (r
, t
);
12456 gimple_seq_add_stmt (iseq
, g
);
12459 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12460 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12461 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12462 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12464 tree clobber
= build_clobber (type
);
12465 g
= gimple_build_assign (array
, clobber
);
12466 gimple_seq_add_stmt (oseq
, g
);
12469 /* Lower the OpenMP parallel or task directive in the current statement
12470 in GSI_P. CTX holds context information for the directive. */
12473 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12477 gimple
*stmt
= gsi_stmt (*gsi_p
);
12478 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12479 gimple_seq par_body
;
12480 location_t loc
= gimple_location (stmt
);
12482 clauses
= gimple_omp_taskreg_clauses (stmt
);
12483 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12484 && gimple_omp_task_taskwait_p (stmt
))
12492 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12493 par_body
= gimple_bind_body (par_bind
);
12495 child_fn
= ctx
->cb
.dst_fn
;
12496 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12497 && !gimple_omp_parallel_combined_p (stmt
))
12499 struct walk_stmt_info wi
;
12502 memset (&wi
, 0, sizeof (wi
));
12504 wi
.val_only
= true;
12505 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12507 gimple_omp_parallel_set_combined_p (stmt
, true);
12509 gimple_seq dep_ilist
= NULL
;
12510 gimple_seq dep_olist
= NULL
;
12511 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12512 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12514 push_gimplify_context ();
12515 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12516 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12517 &dep_ilist
, &dep_olist
);
12520 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12521 && gimple_omp_task_taskwait_p (stmt
))
12525 gsi_replace (gsi_p
, dep_bind
, true);
12526 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12527 gimple_bind_add_stmt (dep_bind
, stmt
);
12528 gimple_bind_add_seq (dep_bind
, dep_olist
);
12529 pop_gimplify_context (dep_bind
);
12534 if (ctx
->srecord_type
)
12535 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12537 gimple_seq tskred_ilist
= NULL
;
12538 gimple_seq tskred_olist
= NULL
;
12539 if ((is_task_ctx (ctx
)
12540 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12541 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12542 OMP_CLAUSE_REDUCTION
))
12543 || (is_parallel_ctx (ctx
)
12544 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12545 OMP_CLAUSE__REDUCTEMP_
)))
12547 if (dep_bind
== NULL
)
12549 push_gimplify_context ();
12550 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12552 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12554 gimple_omp_taskreg_clauses (ctx
->stmt
),
12555 &tskred_ilist
, &tskred_olist
);
12558 push_gimplify_context ();
12560 gimple_seq par_olist
= NULL
;
12561 gimple_seq par_ilist
= NULL
;
12562 gimple_seq par_rlist
= NULL
;
12563 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12564 lower_omp (&par_body
, ctx
);
12565 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12566 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12568 /* Declare all the variables created by mapping and the variables
12569 declared in the scope of the parallel body. */
12570 record_vars_into (ctx
->block_vars
, child_fn
);
12571 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12572 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12574 if (ctx
->record_type
)
12577 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12578 : ctx
->record_type
, ".omp_data_o");
12579 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12580 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12581 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12584 gimple_seq olist
= NULL
;
12585 gimple_seq ilist
= NULL
;
12586 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12587 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12589 if (ctx
->record_type
)
12591 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12592 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12596 /* Once all the expansions are done, sequence all the different
12597 fragments inside gimple_omp_body. */
12599 gimple_seq new_body
= NULL
;
12601 if (ctx
->record_type
)
12603 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12604 /* fixup_child_record_type might have changed receiver_decl's type. */
12605 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12606 gimple_seq_add_stmt (&new_body
,
12607 gimple_build_assign (ctx
->receiver_decl
, t
));
12610 gimple_seq_add_seq (&new_body
, par_ilist
);
12611 gimple_seq_add_seq (&new_body
, par_body
);
12612 gimple_seq_add_seq (&new_body
, par_rlist
);
12613 if (ctx
->cancellable
)
12614 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12615 gimple_seq_add_seq (&new_body
, par_olist
);
12616 new_body
= maybe_catch_exception (new_body
);
12617 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12618 gimple_seq_add_stmt (&new_body
,
12619 gimple_build_omp_continue (integer_zero_node
,
12620 integer_zero_node
));
12621 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12622 gimple_omp_set_body (stmt
, new_body
);
12624 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12625 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12627 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12628 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12629 gimple_bind_add_seq (bind
, ilist
);
12630 gimple_bind_add_stmt (bind
, stmt
);
12631 gimple_bind_add_seq (bind
, olist
);
12633 pop_gimplify_context (NULL
);
12637 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12638 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12639 gimple_bind_add_stmt (dep_bind
, bind
);
12640 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12641 gimple_bind_add_seq (dep_bind
, dep_olist
);
12642 pop_gimplify_context (dep_bind
);
12646 /* Lower the GIMPLE_OMP_TARGET in the current statement
12647 in GSI_P. CTX holds context information for the directive. */
12650 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12653 tree child_fn
, t
, c
;
12654 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12655 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12656 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12657 location_t loc
= gimple_location (stmt
);
12658 bool offloaded
, data_region
;
12659 unsigned int map_cnt
= 0;
12660 tree in_reduction_clauses
= NULL_TREE
;
12662 offloaded
= is_gimple_omp_offloaded (stmt
);
12663 switch (gimple_omp_target_kind (stmt
))
12665 case GF_OMP_TARGET_KIND_REGION
:
12667 q
= &in_reduction_clauses
;
12668 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12669 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12672 q
= &OMP_CLAUSE_CHAIN (*q
);
12673 *p
= OMP_CLAUSE_CHAIN (*p
);
12676 p
= &OMP_CLAUSE_CHAIN (*p
);
12678 *p
= in_reduction_clauses
;
12680 case GF_OMP_TARGET_KIND_UPDATE
:
12681 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12682 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12683 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12684 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12685 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12686 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12687 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12688 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12689 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12690 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12691 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12692 data_region
= false;
12694 case GF_OMP_TARGET_KIND_DATA
:
12695 case GF_OMP_TARGET_KIND_OACC_DATA
:
12696 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12697 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12698 data_region
= true;
12701 gcc_unreachable ();
12704 clauses
= gimple_omp_target_clauses (stmt
);
12706 gimple_seq dep_ilist
= NULL
;
12707 gimple_seq dep_olist
= NULL
;
12708 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12709 if (has_depend
|| in_reduction_clauses
)
12711 push_gimplify_context ();
12712 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12714 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12715 &dep_ilist
, &dep_olist
);
12716 if (in_reduction_clauses
)
12717 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12725 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12726 tgt_body
= gimple_bind_body (tgt_bind
);
12728 else if (data_region
)
12729 tgt_body
= gimple_omp_body (stmt
);
12730 child_fn
= ctx
->cb
.dst_fn
;
12732 push_gimplify_context ();
12735 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12736 switch (OMP_CLAUSE_CODE (c
))
12742 case OMP_CLAUSE_MAP
:
12744 /* First check what we're prepared to handle in the following. */
12745 switch (OMP_CLAUSE_MAP_KIND (c
))
12747 case GOMP_MAP_ALLOC
:
12749 case GOMP_MAP_FROM
:
12750 case GOMP_MAP_TOFROM
:
12751 case GOMP_MAP_POINTER
:
12752 case GOMP_MAP_TO_PSET
:
12753 case GOMP_MAP_DELETE
:
12754 case GOMP_MAP_RELEASE
:
12755 case GOMP_MAP_ALWAYS_TO
:
12756 case GOMP_MAP_ALWAYS_FROM
:
12757 case GOMP_MAP_ALWAYS_TOFROM
:
12758 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12759 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12760 case GOMP_MAP_STRUCT
:
12761 case GOMP_MAP_ALWAYS_POINTER
:
12762 case GOMP_MAP_ATTACH
:
12763 case GOMP_MAP_DETACH
:
12764 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12765 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12767 case GOMP_MAP_IF_PRESENT
:
12768 case GOMP_MAP_FORCE_ALLOC
:
12769 case GOMP_MAP_FORCE_TO
:
12770 case GOMP_MAP_FORCE_FROM
:
12771 case GOMP_MAP_FORCE_TOFROM
:
12772 case GOMP_MAP_FORCE_PRESENT
:
12773 case GOMP_MAP_FORCE_DEVICEPTR
:
12774 case GOMP_MAP_DEVICE_RESIDENT
:
12775 case GOMP_MAP_LINK
:
12776 case GOMP_MAP_FORCE_DETACH
:
12777 gcc_assert (is_gimple_omp_oacc (stmt
));
12780 gcc_unreachable ();
12784 case OMP_CLAUSE_TO
:
12785 case OMP_CLAUSE_FROM
:
12787 var
= OMP_CLAUSE_DECL (c
);
12790 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12791 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12792 && (OMP_CLAUSE_MAP_KIND (c
)
12793 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12798 if (DECL_SIZE (var
)
12799 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12801 tree var2
= DECL_VALUE_EXPR (var
);
12802 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12803 var2
= TREE_OPERAND (var2
, 0);
12804 gcc_assert (DECL_P (var2
));
12809 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12810 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12811 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12813 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12815 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12816 && varpool_node::get_create (var
)->offloadable
)
12819 tree type
= build_pointer_type (TREE_TYPE (var
));
12820 tree new_var
= lookup_decl (var
, ctx
);
12821 x
= create_tmp_var_raw (type
, get_name (new_var
));
12822 gimple_add_tmp_var (x
);
12823 x
= build_simple_mem_ref (x
);
12824 SET_DECL_VALUE_EXPR (new_var
, x
);
12825 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12830 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12831 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12832 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12833 && is_omp_target (stmt
))
12835 gcc_assert (maybe_lookup_field (c
, ctx
));
12840 if (!maybe_lookup_field (var
, ctx
))
12843 /* Don't remap compute constructs' reduction variables, because the
12844 intermediate result must be local to each gang. */
12845 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12846 && is_gimple_omp_oacc (ctx
->stmt
)
12847 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12849 x
= build_receiver_ref (var
, true, ctx
);
12850 tree new_var
= lookup_decl (var
, ctx
);
12852 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12853 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12854 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12855 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12856 x
= build_simple_mem_ref (x
);
12857 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12859 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12860 if (omp_privatize_by_reference (new_var
)
12861 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12862 || DECL_BY_REFERENCE (var
)))
12864 /* Create a local object to hold the instance
12866 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12867 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12868 tree inst
= create_tmp_var (type
, id
);
12869 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12870 x
= build_fold_addr_expr (inst
);
12872 gimplify_assign (new_var
, x
, &fplist
);
12874 else if (DECL_P (new_var
))
12876 SET_DECL_VALUE_EXPR (new_var
, x
);
12877 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12880 gcc_unreachable ();
12885 case OMP_CLAUSE_FIRSTPRIVATE
:
12886 omp_firstprivate_recv
:
12887 gcc_checking_assert (offloaded
);
12888 if (is_gimple_omp_oacc (ctx
->stmt
))
12890 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12891 gcc_checking_assert (!is_oacc_kernels (ctx
));
12892 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12893 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12895 goto oacc_firstprivate
;
12898 var
= OMP_CLAUSE_DECL (c
);
12899 if (!omp_privatize_by_reference (var
)
12900 && !is_gimple_reg_type (TREE_TYPE (var
)))
12902 tree new_var
= lookup_decl (var
, ctx
);
12903 if (is_variable_sized (var
))
12905 tree pvar
= DECL_VALUE_EXPR (var
);
12906 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12907 pvar
= TREE_OPERAND (pvar
, 0);
12908 gcc_assert (DECL_P (pvar
));
12909 tree new_pvar
= lookup_decl (pvar
, ctx
);
12910 x
= build_fold_indirect_ref (new_pvar
);
12911 TREE_THIS_NOTRAP (x
) = 1;
12914 x
= build_receiver_ref (var
, true, ctx
);
12915 SET_DECL_VALUE_EXPR (new_var
, x
);
12916 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12918 /* Fortran array descriptors: firstprivate of data + attach. */
12919 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
12920 && lang_hooks
.decls
.omp_array_data (var
, true))
12924 case OMP_CLAUSE_PRIVATE
:
12925 gcc_checking_assert (offloaded
);
12926 if (is_gimple_omp_oacc (ctx
->stmt
))
12928 /* No 'private' clauses on OpenACC 'kernels'. */
12929 gcc_checking_assert (!is_oacc_kernels (ctx
));
12930 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12931 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12935 var
= OMP_CLAUSE_DECL (c
);
12936 if (is_variable_sized (var
))
12938 tree new_var
= lookup_decl (var
, ctx
);
12939 tree pvar
= DECL_VALUE_EXPR (var
);
12940 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12941 pvar
= TREE_OPERAND (pvar
, 0);
12942 gcc_assert (DECL_P (pvar
));
12943 tree new_pvar
= lookup_decl (pvar
, ctx
);
12944 x
= build_fold_indirect_ref (new_pvar
);
12945 TREE_THIS_NOTRAP (x
) = 1;
12946 SET_DECL_VALUE_EXPR (new_var
, x
);
12947 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12951 case OMP_CLAUSE_USE_DEVICE_PTR
:
12952 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12953 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12954 case OMP_CLAUSE_IS_DEVICE_PTR
:
12955 var
= OMP_CLAUSE_DECL (c
);
12956 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12958 while (TREE_CODE (var
) == INDIRECT_REF
12959 || TREE_CODE (var
) == ARRAY_REF
)
12960 var
= TREE_OPERAND (var
, 0);
12961 if (lang_hooks
.decls
.omp_array_data (var
, true))
12962 goto omp_firstprivate_recv
;
12965 if (is_variable_sized (var
))
12967 tree new_var
= lookup_decl (var
, ctx
);
12968 tree pvar
= DECL_VALUE_EXPR (var
);
12969 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12970 pvar
= TREE_OPERAND (pvar
, 0);
12971 gcc_assert (DECL_P (pvar
));
12972 tree new_pvar
= lookup_decl (pvar
, ctx
);
12973 x
= build_fold_indirect_ref (new_pvar
);
12974 TREE_THIS_NOTRAP (x
) = 1;
12975 SET_DECL_VALUE_EXPR (new_var
, x
);
12976 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12978 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12979 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12980 && !omp_privatize_by_reference (var
)
12981 && !omp_is_allocatable_or_ptr (var
)
12982 && !lang_hooks
.decls
.omp_array_data (var
, true))
12983 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12985 tree new_var
= lookup_decl (var
, ctx
);
12986 tree type
= build_pointer_type (TREE_TYPE (var
));
12987 x
= create_tmp_var_raw (type
, get_name (new_var
));
12988 gimple_add_tmp_var (x
);
12989 x
= build_simple_mem_ref (x
);
12990 SET_DECL_VALUE_EXPR (new_var
, x
);
12991 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12995 tree new_var
= lookup_decl (var
, ctx
);
12996 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
12997 gimple_add_tmp_var (x
);
12998 SET_DECL_VALUE_EXPR (new_var
, x
);
12999 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13006 target_nesting_level
++;
13007 lower_omp (&tgt_body
, ctx
);
13008 target_nesting_level
--;
13010 else if (data_region
)
13011 lower_omp (&tgt_body
, ctx
);
13015 /* Declare all the variables created by mapping and the variables
13016 declared in the scope of the target body. */
13017 record_vars_into (ctx
->block_vars
, child_fn
);
13018 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
13019 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
13024 if (ctx
->record_type
)
13027 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
13028 DECL_NAMELESS (ctx
->sender_decl
) = 1;
13029 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
13030 t
= make_tree_vec (3);
13031 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
13032 TREE_VEC_ELT (t
, 1)
13033 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
13034 ".omp_data_sizes");
13035 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
13036 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
13037 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
13038 tree tkind_type
= short_unsigned_type_node
;
13039 int talign_shift
= 8;
13040 TREE_VEC_ELT (t
, 2)
13041 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
13042 ".omp_data_kinds");
13043 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
13044 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
13045 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
13046 gimple_omp_target_set_data_arg (stmt
, t
);
13048 vec
<constructor_elt
, va_gc
> *vsize
;
13049 vec
<constructor_elt
, va_gc
> *vkind
;
13050 vec_alloc (vsize
, map_cnt
);
13051 vec_alloc (vkind
, map_cnt
);
13052 unsigned int map_idx
= 0;
13054 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13055 switch (OMP_CLAUSE_CODE (c
))
13057 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
13058 unsigned int talign
;
13063 case OMP_CLAUSE_MAP
:
13064 case OMP_CLAUSE_TO
:
13065 case OMP_CLAUSE_FROM
:
13066 oacc_firstprivate_map
:
13068 ovar
= OMP_CLAUSE_DECL (c
);
13069 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13070 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13071 || (OMP_CLAUSE_MAP_KIND (c
)
13072 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13074 if (!DECL_P (ovar
))
13076 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13077 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
13079 nc
= OMP_CLAUSE_CHAIN (c
);
13080 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
13081 == get_base_address (ovar
));
13082 ovar
= OMP_CLAUSE_DECL (nc
);
13086 tree x
= build_sender_ref (ovar
, ctx
);
13088 if (in_reduction_clauses
13089 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13090 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13092 v
= unshare_expr (v
);
13094 while (handled_component_p (*p
)
13095 || TREE_CODE (*p
) == INDIRECT_REF
13096 || TREE_CODE (*p
) == ADDR_EXPR
13097 || TREE_CODE (*p
) == MEM_REF
13098 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
13099 p
= &TREE_OPERAND (*p
, 0);
13101 if (is_variable_sized (d
))
13103 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13104 d
= DECL_VALUE_EXPR (d
);
13105 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13106 d
= TREE_OPERAND (d
, 0);
13107 gcc_assert (DECL_P (d
));
13110 = (splay_tree_key
) &DECL_CONTEXT (d
);
13111 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13116 *p
= build_fold_indirect_ref (nd
);
13118 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
13119 gimplify_assign (x
, v
, &ilist
);
13125 if (DECL_SIZE (ovar
)
13126 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
13128 tree ovar2
= DECL_VALUE_EXPR (ovar
);
13129 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
13130 ovar2
= TREE_OPERAND (ovar2
, 0);
13131 gcc_assert (DECL_P (ovar2
));
13134 if (!maybe_lookup_field (ovar
, ctx
)
13135 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13136 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13137 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13141 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13142 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13143 talign
= DECL_ALIGN_UNIT (ovar
);
13148 if (in_reduction_clauses
13149 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13150 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13153 if (is_variable_sized (d
))
13155 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13156 d
= DECL_VALUE_EXPR (d
);
13157 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13158 d
= TREE_OPERAND (d
, 0);
13159 gcc_assert (DECL_P (d
));
13162 = (splay_tree_key
) &DECL_CONTEXT (d
);
13163 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13168 var
= build_fold_indirect_ref (nd
);
13171 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13174 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13175 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13176 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13177 && is_omp_target (stmt
))
13179 x
= build_sender_ref (c
, ctx
);
13180 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13184 x
= build_sender_ref (ovar
, ctx
);
13186 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13187 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13188 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13189 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13191 gcc_assert (offloaded
);
13193 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13194 mark_addressable (avar
);
13195 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13196 talign
= DECL_ALIGN_UNIT (avar
);
13197 avar
= build_fold_addr_expr (avar
);
13198 gimplify_assign (x
, avar
, &ilist
);
13200 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13202 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13203 if (!omp_privatize_by_reference (var
))
13205 if (is_gimple_reg (var
)
13206 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13207 suppress_warning (var
);
13208 var
= build_fold_addr_expr (var
);
13211 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13212 gimplify_assign (x
, var
, &ilist
);
13214 else if (is_gimple_reg (var
))
13216 gcc_assert (offloaded
);
13217 tree avar
= create_tmp_var (TREE_TYPE (var
));
13218 mark_addressable (avar
);
13219 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13220 if (GOMP_MAP_COPY_TO_P (map_kind
)
13221 || map_kind
== GOMP_MAP_POINTER
13222 || map_kind
== GOMP_MAP_TO_PSET
13223 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13225 /* If we need to initialize a temporary
13226 with VAR because it is not addressable, and
13227 the variable hasn't been initialized yet, then
13228 we'll get a warning for the store to avar.
13229 Don't warn in that case, the mapping might
13231 suppress_warning (var
, OPT_Wuninitialized
);
13232 gimplify_assign (avar
, var
, &ilist
);
13234 avar
= build_fold_addr_expr (avar
);
13235 gimplify_assign (x
, avar
, &ilist
);
13236 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13237 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13238 && !TYPE_READONLY (TREE_TYPE (var
)))
13240 x
= unshare_expr (x
);
13241 x
= build_simple_mem_ref (x
);
13242 gimplify_assign (var
, x
, &olist
);
13247 /* While MAP is handled explicitly by the FE,
13248 for 'target update', only the identified is passed. */
13249 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13250 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13251 && (omp_is_allocatable_or_ptr (var
)
13252 && omp_check_optional_argument (var
, false)))
13253 var
= build_fold_indirect_ref (var
);
13254 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13255 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13256 || (!omp_is_allocatable_or_ptr (var
)
13257 && !omp_check_optional_argument (var
, false)))
13258 var
= build_fold_addr_expr (var
);
13259 gimplify_assign (x
, var
, &ilist
);
13263 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13265 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13266 s
= TREE_TYPE (ovar
);
13267 if (TREE_CODE (s
) == REFERENCE_TYPE
13268 || omp_check_optional_argument (ovar
, false))
13270 s
= TYPE_SIZE_UNIT (s
);
13273 s
= OMP_CLAUSE_SIZE (c
);
13274 if (s
== NULL_TREE
)
13275 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13276 s
= fold_convert (size_type_node
, s
);
13277 purpose
= size_int (map_idx
++);
13278 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13279 if (TREE_CODE (s
) != INTEGER_CST
)
13280 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13282 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13283 switch (OMP_CLAUSE_CODE (c
))
13285 case OMP_CLAUSE_MAP
:
13286 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13287 tkind_zero
= tkind
;
13288 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13291 case GOMP_MAP_ALLOC
:
13292 case GOMP_MAP_IF_PRESENT
:
13294 case GOMP_MAP_FROM
:
13295 case GOMP_MAP_TOFROM
:
13296 case GOMP_MAP_ALWAYS_TO
:
13297 case GOMP_MAP_ALWAYS_FROM
:
13298 case GOMP_MAP_ALWAYS_TOFROM
:
13299 case GOMP_MAP_RELEASE
:
13300 case GOMP_MAP_FORCE_TO
:
13301 case GOMP_MAP_FORCE_FROM
:
13302 case GOMP_MAP_FORCE_TOFROM
:
13303 case GOMP_MAP_FORCE_PRESENT
:
13304 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13306 case GOMP_MAP_DELETE
:
13307 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13311 if (tkind_zero
!= tkind
)
13313 if (integer_zerop (s
))
13314 tkind
= tkind_zero
;
13315 else if (integer_nonzerop (s
))
13316 tkind_zero
= tkind
;
13318 if (tkind_zero
== tkind
13319 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13320 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13321 & ~GOMP_MAP_IMPLICIT
)
13324 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13325 bits are not interfered by other special bit encodings,
13326 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13328 tkind
|= GOMP_MAP_IMPLICIT
;
13329 tkind_zero
= tkind
;
13332 case OMP_CLAUSE_FIRSTPRIVATE
:
13333 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13334 tkind
= GOMP_MAP_TO
;
13335 tkind_zero
= tkind
;
13337 case OMP_CLAUSE_TO
:
13338 tkind
= GOMP_MAP_TO
;
13339 tkind_zero
= tkind
;
13341 case OMP_CLAUSE_FROM
:
13342 tkind
= GOMP_MAP_FROM
;
13343 tkind_zero
= tkind
;
13346 gcc_unreachable ();
13348 gcc_checking_assert (tkind
13349 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13350 gcc_checking_assert (tkind_zero
13351 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13352 talign
= ceil_log2 (talign
);
13353 tkind
|= talign
<< talign_shift
;
13354 tkind_zero
|= talign
<< talign_shift
;
13355 gcc_checking_assert (tkind
13356 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13357 gcc_checking_assert (tkind_zero
13358 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13359 if (tkind
== tkind_zero
)
13360 x
= build_int_cstu (tkind_type
, tkind
);
13363 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13364 x
= build3 (COND_EXPR
, tkind_type
,
13365 fold_build2 (EQ_EXPR
, boolean_type_node
,
13366 unshare_expr (s
), size_zero_node
),
13367 build_int_cstu (tkind_type
, tkind_zero
),
13368 build_int_cstu (tkind_type
, tkind
));
13370 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13375 case OMP_CLAUSE_FIRSTPRIVATE
:
13376 omp_has_device_addr_descr
:
13377 if (is_gimple_omp_oacc (ctx
->stmt
))
13378 goto oacc_firstprivate_map
;
13379 ovar
= OMP_CLAUSE_DECL (c
);
13380 if (omp_privatize_by_reference (ovar
))
13381 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13383 talign
= DECL_ALIGN_UNIT (ovar
);
13384 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13385 x
= build_sender_ref (ovar
, ctx
);
13386 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13387 type
= TREE_TYPE (ovar
);
13388 if (omp_privatize_by_reference (ovar
))
13389 type
= TREE_TYPE (type
);
13390 if ((INTEGRAL_TYPE_P (type
)
13391 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13392 || TREE_CODE (type
) == POINTER_TYPE
)
13394 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13396 if (omp_privatize_by_reference (var
))
13397 t
= build_simple_mem_ref (var
);
13398 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13399 suppress_warning (var
);
13400 if (TREE_CODE (type
) != POINTER_TYPE
)
13401 t
= fold_convert (pointer_sized_int_node
, t
);
13402 t
= fold_convert (TREE_TYPE (x
), t
);
13403 gimplify_assign (x
, t
, &ilist
);
13405 else if (omp_privatize_by_reference (var
))
13406 gimplify_assign (x
, var
, &ilist
);
13407 else if (is_gimple_reg (var
))
13409 tree avar
= create_tmp_var (TREE_TYPE (var
));
13410 mark_addressable (avar
);
13411 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13412 suppress_warning (var
);
13413 gimplify_assign (avar
, var
, &ilist
);
13414 avar
= build_fold_addr_expr (avar
);
13415 gimplify_assign (x
, avar
, &ilist
);
13419 var
= build_fold_addr_expr (var
);
13420 gimplify_assign (x
, var
, &ilist
);
13422 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13424 else if (omp_privatize_by_reference (ovar
))
13425 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13427 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13428 s
= fold_convert (size_type_node
, s
);
13429 purpose
= size_int (map_idx
++);
13430 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13431 if (TREE_CODE (s
) != INTEGER_CST
)
13432 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13434 gcc_checking_assert (tkind
13435 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13436 talign
= ceil_log2 (talign
);
13437 tkind
|= talign
<< talign_shift
;
13438 gcc_checking_assert (tkind
13439 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13440 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13441 build_int_cstu (tkind_type
, tkind
));
13442 /* Fortran array descriptors: firstprivate of data + attach. */
13443 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13444 && lang_hooks
.decls
.omp_array_data (ovar
, true))
13446 tree not_null_lb
, null_lb
, after_lb
;
13447 tree var1
, var2
, size1
, size2
;
13448 tree present
= omp_check_optional_argument (ovar
, true);
13451 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13452 not_null_lb
= create_artificial_label (clause_loc
);
13453 null_lb
= create_artificial_label (clause_loc
);
13454 after_lb
= create_artificial_label (clause_loc
);
13455 gimple_seq seq
= NULL
;
13456 present
= force_gimple_operand (present
, &seq
, true,
13458 gimple_seq_add_seq (&ilist
, seq
);
13459 gimple_seq_add_stmt (&ilist
,
13460 gimple_build_cond_from_tree (present
,
13461 not_null_lb
, null_lb
));
13462 gimple_seq_add_stmt (&ilist
,
13463 gimple_build_label (not_null_lb
));
13465 var1
= lang_hooks
.decls
.omp_array_data (var
, false);
13466 size1
= lang_hooks
.decls
.omp_array_size (var
, &ilist
);
13467 var2
= build_fold_addr_expr (x
);
13468 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
13469 var
= build_fold_addr_expr (var
);
13470 size2
= fold_build2 (POINTER_DIFF_EXPR
, ssizetype
,
13471 build_fold_addr_expr (var1
), var
);
13472 size2
= fold_convert (sizetype
, size2
);
13475 tree tmp
= create_tmp_var (TREE_TYPE (var1
));
13476 gimplify_assign (tmp
, var1
, &ilist
);
13478 tmp
= create_tmp_var (TREE_TYPE (var2
));
13479 gimplify_assign (tmp
, var2
, &ilist
);
13481 tmp
= create_tmp_var (TREE_TYPE (size1
));
13482 gimplify_assign (tmp
, size1
, &ilist
);
13484 tmp
= create_tmp_var (TREE_TYPE (size2
));
13485 gimplify_assign (tmp
, size2
, &ilist
);
13487 gimple_seq_add_stmt (&ilist
, gimple_build_goto (after_lb
));
13488 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_lb
));
13489 gimplify_assign (var1
, null_pointer_node
, &ilist
);
13490 gimplify_assign (var2
, null_pointer_node
, &ilist
);
13491 gimplify_assign (size1
, size_zero_node
, &ilist
);
13492 gimplify_assign (size2
, size_zero_node
, &ilist
);
13493 gimple_seq_add_stmt (&ilist
, gimple_build_label (after_lb
));
13495 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13496 gimplify_assign (x
, var1
, &ilist
);
13497 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13498 talign
= DECL_ALIGN_UNIT (ovar
);
13499 talign
= ceil_log2 (talign
);
13500 tkind
|= talign
<< talign_shift
;
13501 gcc_checking_assert (tkind
13503 TYPE_MAX_VALUE (tkind_type
)));
13504 purpose
= size_int (map_idx
++);
13505 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size1
);
13506 if (TREE_CODE (size1
) != INTEGER_CST
)
13507 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13508 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13509 build_int_cstu (tkind_type
, tkind
));
13510 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13511 gimplify_assign (x
, var2
, &ilist
);
13512 tkind
= GOMP_MAP_ATTACH
;
13513 purpose
= size_int (map_idx
++);
13514 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size2
);
13515 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13516 build_int_cstu (tkind_type
, tkind
));
13520 case OMP_CLAUSE_USE_DEVICE_PTR
:
13521 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13522 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13523 case OMP_CLAUSE_IS_DEVICE_PTR
:
13524 ovar
= OMP_CLAUSE_DECL (c
);
13525 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13527 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13528 goto omp_has_device_addr_descr
;
13529 while (TREE_CODE (ovar
) == INDIRECT_REF
13530 || TREE_CODE (ovar
) == ARRAY_REF
)
13531 ovar
= TREE_OPERAND (ovar
, 0);
13533 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13535 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13537 tkind
= ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13538 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13539 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13540 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13542 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13543 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13545 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13546 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13550 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13551 x
= build_sender_ref (ovar
, ctx
);
13554 if (is_gimple_omp_oacc (ctx
->stmt
))
13556 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13558 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13559 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13562 type
= TREE_TYPE (ovar
);
13563 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13564 var
= lang_hooks
.decls
.omp_array_data (var
, false);
13565 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13566 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13567 && !omp_privatize_by_reference (ovar
)
13568 && !omp_is_allocatable_or_ptr (ovar
))
13569 || TREE_CODE (type
) == ARRAY_TYPE
)
13570 var
= build_fold_addr_expr (var
);
13573 if (omp_privatize_by_reference (ovar
)
13574 || omp_check_optional_argument (ovar
, false)
13575 || omp_is_allocatable_or_ptr (ovar
))
13577 type
= TREE_TYPE (type
);
13578 if (POINTER_TYPE_P (type
)
13579 && TREE_CODE (type
) != ARRAY_TYPE
13580 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13581 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13582 && !omp_is_allocatable_or_ptr (ovar
))
13583 || (omp_privatize_by_reference (ovar
)
13584 && omp_is_allocatable_or_ptr (ovar
))))
13585 var
= build_simple_mem_ref (var
);
13586 var
= fold_convert (TREE_TYPE (x
), var
);
13590 present
= omp_check_optional_argument (ovar
, true);
13593 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13594 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13595 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13596 tree new_x
= unshare_expr (x
);
13597 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13599 gcond
*cond
= gimple_build_cond_from_tree (present
,
13602 gimple_seq_add_stmt (&ilist
, cond
);
13603 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13604 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13605 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13606 gimple_seq_add_stmt (&ilist
,
13607 gimple_build_label (notnull_label
));
13608 gimplify_assign (x
, var
, &ilist
);
13609 gimple_seq_add_stmt (&ilist
,
13610 gimple_build_label (opt_arg_label
));
13613 gimplify_assign (x
, var
, &ilist
);
13615 purpose
= size_int (map_idx
++);
13616 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13617 gcc_checking_assert (tkind
13618 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13619 gcc_checking_assert (tkind
13620 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13621 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13622 build_int_cstu (tkind_type
, tkind
));
13626 gcc_assert (map_idx
== map_cnt
);
13628 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13629 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13630 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13631 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13632 for (int i
= 1; i
<= 2; i
++)
13633 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13635 gimple_seq initlist
= NULL
;
13636 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13637 TREE_VEC_ELT (t
, i
)),
13638 &initlist
, true, NULL_TREE
);
13639 gimple_seq_add_seq (&ilist
, initlist
);
13641 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13642 gimple_seq_add_stmt (&olist
,
13643 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13646 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13648 tree id
= get_identifier ("omp declare target");
13649 tree decl
= TREE_VEC_ELT (t
, i
);
13650 DECL_ATTRIBUTES (decl
)
13651 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13652 varpool_node
*node
= varpool_node::get (decl
);
13655 node
->offloadable
= 1;
13656 if (ENABLE_OFFLOADING
)
13658 g
->have_offload
= true;
13659 vec_safe_push (offload_vars
, t
);
13664 tree clobber
= build_clobber (ctx
->record_type
);
13665 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13669 /* Once all the expansions are done, sequence all the different
13670 fragments inside gimple_omp_body. */
13675 && ctx
->record_type
)
13677 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13678 /* fixup_child_record_type might have changed receiver_decl's type. */
13679 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13680 gimple_seq_add_stmt (&new_body
,
13681 gimple_build_assign (ctx
->receiver_decl
, t
));
13683 gimple_seq_add_seq (&new_body
, fplist
);
13685 if (offloaded
|| data_region
)
13687 tree prev
= NULL_TREE
;
13688 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13689 switch (OMP_CLAUSE_CODE (c
))
13694 case OMP_CLAUSE_FIRSTPRIVATE
:
13695 omp_firstprivatize_data_region
:
13696 if (is_gimple_omp_oacc (ctx
->stmt
))
13698 var
= OMP_CLAUSE_DECL (c
);
13699 if (omp_privatize_by_reference (var
)
13700 || is_gimple_reg_type (TREE_TYPE (var
)))
13702 tree new_var
= lookup_decl (var
, ctx
);
13704 type
= TREE_TYPE (var
);
13705 if (omp_privatize_by_reference (var
))
13706 type
= TREE_TYPE (type
);
13707 if ((INTEGRAL_TYPE_P (type
)
13708 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13709 || TREE_CODE (type
) == POINTER_TYPE
)
13711 x
= build_receiver_ref (var
, false, ctx
);
13712 if (TREE_CODE (type
) != POINTER_TYPE
)
13713 x
= fold_convert (pointer_sized_int_node
, x
);
13714 x
= fold_convert (type
, x
);
13715 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13717 if (omp_privatize_by_reference (var
))
13719 tree v
= create_tmp_var_raw (type
, get_name (var
));
13720 gimple_add_tmp_var (v
);
13721 TREE_ADDRESSABLE (v
) = 1;
13722 gimple_seq_add_stmt (&new_body
,
13723 gimple_build_assign (v
, x
));
13724 x
= build_fold_addr_expr (v
);
13726 gimple_seq_add_stmt (&new_body
,
13727 gimple_build_assign (new_var
, x
));
13731 bool by_ref
= !omp_privatize_by_reference (var
);
13732 x
= build_receiver_ref (var
, by_ref
, ctx
);
13733 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13735 gimple_seq_add_stmt (&new_body
,
13736 gimple_build_assign (new_var
, x
));
13739 else if (is_variable_sized (var
))
13741 tree pvar
= DECL_VALUE_EXPR (var
);
13742 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13743 pvar
= TREE_OPERAND (pvar
, 0);
13744 gcc_assert (DECL_P (pvar
));
13745 tree new_var
= lookup_decl (pvar
, ctx
);
13746 x
= build_receiver_ref (var
, false, ctx
);
13747 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13748 gimple_seq_add_stmt (&new_body
,
13749 gimple_build_assign (new_var
, x
));
13752 case OMP_CLAUSE_PRIVATE
:
13753 if (is_gimple_omp_oacc (ctx
->stmt
))
13755 var
= OMP_CLAUSE_DECL (c
);
13756 if (omp_privatize_by_reference (var
))
13758 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13759 tree new_var
= lookup_decl (var
, ctx
);
13760 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13761 if (TREE_CONSTANT (x
))
13763 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13765 gimple_add_tmp_var (x
);
13766 TREE_ADDRESSABLE (x
) = 1;
13767 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13772 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13773 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13774 gimple_seq_add_stmt (&new_body
,
13775 gimple_build_assign (new_var
, x
));
13778 case OMP_CLAUSE_USE_DEVICE_PTR
:
13779 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13780 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13781 case OMP_CLAUSE_IS_DEVICE_PTR
:
13783 gimple_seq assign_body
;
13784 bool is_array_data
;
13785 bool do_optional_check
;
13786 assign_body
= NULL
;
13787 do_optional_check
= false;
13788 var
= OMP_CLAUSE_DECL (c
);
13789 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13790 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
&& is_array_data
)
13791 goto omp_firstprivatize_data_region
;
13793 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13794 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13795 x
= build_sender_ref (is_array_data
13796 ? (splay_tree_key
) &DECL_NAME (var
)
13797 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13800 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13802 while (TREE_CODE (var
) == INDIRECT_REF
13803 || TREE_CODE (var
) == ARRAY_REF
)
13804 var
= TREE_OPERAND (var
, 0);
13806 x
= build_receiver_ref (var
, false, ctx
);
13811 bool is_ref
= omp_privatize_by_reference (var
);
13812 do_optional_check
= true;
13813 /* First, we copy the descriptor data from the host; then
13814 we update its data to point to the target address. */
13815 new_var
= lookup_decl (var
, ctx
);
13816 new_var
= DECL_VALUE_EXPR (new_var
);
13819 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
13820 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13821 v2
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
13825 v2
= build_fold_indirect_ref (v2
);
13826 v
= create_tmp_var_raw (TREE_TYPE (v2
), get_name (var
));
13827 gimple_add_tmp_var (v
);
13828 TREE_ADDRESSABLE (v
) = 1;
13829 gimplify_assign (v
, v2
, &assign_body
);
13830 tree rhs
= build_fold_addr_expr (v
);
13831 gimple_seq_add_stmt (&assign_body
,
13832 gimple_build_assign (new_var
, rhs
));
13835 gimplify_assign (new_var
, v2
, &assign_body
);
13837 v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13839 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13840 gimple_seq_add_stmt (&assign_body
,
13841 gimple_build_assign (v2
, x
));
13843 else if (is_variable_sized (var
))
13845 tree pvar
= DECL_VALUE_EXPR (var
);
13846 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13847 pvar
= TREE_OPERAND (pvar
, 0);
13848 gcc_assert (DECL_P (pvar
));
13849 new_var
= lookup_decl (pvar
, ctx
);
13850 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13851 gimple_seq_add_stmt (&assign_body
,
13852 gimple_build_assign (new_var
, x
));
13854 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13855 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13856 && !omp_privatize_by_reference (var
)
13857 && !omp_is_allocatable_or_ptr (var
))
13858 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13860 new_var
= lookup_decl (var
, ctx
);
13861 new_var
= DECL_VALUE_EXPR (new_var
);
13862 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13863 new_var
= TREE_OPERAND (new_var
, 0);
13864 gcc_assert (DECL_P (new_var
));
13865 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13866 gimple_seq_add_stmt (&assign_body
,
13867 gimple_build_assign (new_var
, x
));
13871 tree type
= TREE_TYPE (var
);
13872 new_var
= lookup_decl (var
, ctx
);
13873 if (omp_privatize_by_reference (var
))
13875 type
= TREE_TYPE (type
);
13876 if (POINTER_TYPE_P (type
)
13877 && TREE_CODE (type
) != ARRAY_TYPE
13878 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13879 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13880 || (omp_privatize_by_reference (var
)
13881 && omp_is_allocatable_or_ptr (var
))))
13883 tree v
= create_tmp_var_raw (type
, get_name (var
));
13884 gimple_add_tmp_var (v
);
13885 TREE_ADDRESSABLE (v
) = 1;
13886 x
= fold_convert (type
, x
);
13887 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13889 gimple_seq_add_stmt (&assign_body
,
13890 gimple_build_assign (v
, x
));
13891 x
= build_fold_addr_expr (v
);
13892 do_optional_check
= true;
13895 new_var
= DECL_VALUE_EXPR (new_var
);
13896 x
= fold_convert (TREE_TYPE (new_var
), x
);
13897 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13898 gimple_seq_add_stmt (&assign_body
,
13899 gimple_build_assign (new_var
, x
));
13902 present
= ((do_optional_check
13903 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13904 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13908 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13909 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13910 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13911 glabel
*null_glabel
= gimple_build_label (null_label
);
13912 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13913 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13914 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13916 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13918 gcond
*cond
= gimple_build_cond_from_tree (present
,
13921 gimple_seq_add_stmt (&new_body
, cond
);
13922 gimple_seq_add_stmt (&new_body
, null_glabel
);
13923 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13924 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13925 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13926 gimple_seq_add_seq (&new_body
, assign_body
);
13927 gimple_seq_add_stmt (&new_body
,
13928 gimple_build_label (opt_arg_label
));
13931 gimple_seq_add_seq (&new_body
, assign_body
);
13934 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13935 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13936 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13937 or references to VLAs. */
13938 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13939 switch (OMP_CLAUSE_CODE (c
))
13944 case OMP_CLAUSE_MAP
:
13945 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13946 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13948 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13949 poly_int64 offset
= 0;
13951 var
= OMP_CLAUSE_DECL (c
);
13953 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13954 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13956 && varpool_node::get_create (var
)->offloadable
)
13958 if (TREE_CODE (var
) == INDIRECT_REF
13959 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13960 var
= TREE_OPERAND (var
, 0);
13961 if (TREE_CODE (var
) == COMPONENT_REF
)
13963 var
= get_addr_base_and_unit_offset (var
, &offset
);
13964 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13966 else if (DECL_SIZE (var
)
13967 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13969 tree var2
= DECL_VALUE_EXPR (var
);
13970 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13971 var2
= TREE_OPERAND (var2
, 0);
13972 gcc_assert (DECL_P (var2
));
13975 tree new_var
= lookup_decl (var
, ctx
), x
;
13976 tree type
= TREE_TYPE (new_var
);
13978 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13979 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13982 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13984 new_var
= build2 (MEM_REF
, type
,
13985 build_fold_addr_expr (new_var
),
13986 build_int_cst (build_pointer_type (type
),
13989 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
13991 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
13992 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
13993 new_var
= build2 (MEM_REF
, type
,
13994 build_fold_addr_expr (new_var
),
13995 build_int_cst (build_pointer_type (type
),
13999 is_ref
= omp_privatize_by_reference (var
);
14000 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14002 bool ref_to_array
= false;
14005 type
= TREE_TYPE (type
);
14006 if (TREE_CODE (type
) == ARRAY_TYPE
)
14008 type
= build_pointer_type (type
);
14009 ref_to_array
= true;
14012 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14014 tree decl2
= DECL_VALUE_EXPR (new_var
);
14015 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
14016 decl2
= TREE_OPERAND (decl2
, 0);
14017 gcc_assert (DECL_P (decl2
));
14019 type
= TREE_TYPE (new_var
);
14021 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
14022 x
= fold_convert_loc (clause_loc
, type
, x
);
14023 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
14025 tree bias
= OMP_CLAUSE_SIZE (c
);
14027 bias
= lookup_decl (bias
, ctx
);
14028 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
14029 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
14031 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
14032 TREE_TYPE (x
), x
, bias
);
14035 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14036 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14037 if (is_ref
&& !ref_to_array
)
14039 tree t
= create_tmp_var_raw (type
, get_name (var
));
14040 gimple_add_tmp_var (t
);
14041 TREE_ADDRESSABLE (t
) = 1;
14042 gimple_seq_add_stmt (&new_body
,
14043 gimple_build_assign (t
, x
));
14044 x
= build_fold_addr_expr_loc (clause_loc
, t
);
14046 gimple_seq_add_stmt (&new_body
,
14047 gimple_build_assign (new_var
, x
));
14050 else if (OMP_CLAUSE_CHAIN (c
)
14051 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
14053 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14054 == GOMP_MAP_FIRSTPRIVATE_POINTER
14055 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14056 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
14059 case OMP_CLAUSE_PRIVATE
:
14060 var
= OMP_CLAUSE_DECL (c
);
14061 if (is_variable_sized (var
))
14063 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14064 tree new_var
= lookup_decl (var
, ctx
);
14065 tree pvar
= DECL_VALUE_EXPR (var
);
14066 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
14067 pvar
= TREE_OPERAND (pvar
, 0);
14068 gcc_assert (DECL_P (pvar
));
14069 tree new_pvar
= lookup_decl (pvar
, ctx
);
14070 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14071 tree al
= size_int (DECL_ALIGN (var
));
14072 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
14073 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14074 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
14075 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14076 gimple_seq_add_stmt (&new_body
,
14077 gimple_build_assign (new_pvar
, x
));
14079 else if (omp_privatize_by_reference (var
)
14080 && !is_gimple_omp_oacc (ctx
->stmt
))
14082 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14083 tree new_var
= lookup_decl (var
, ctx
);
14084 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
14085 if (TREE_CONSTANT (x
))
14090 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14091 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
14092 tree al
= size_int (TYPE_ALIGN (rtype
));
14093 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14096 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14097 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14098 gimple_seq_add_stmt (&new_body
,
14099 gimple_build_assign (new_var
, x
));
14104 gimple_seq fork_seq
= NULL
;
14105 gimple_seq join_seq
= NULL
;
14107 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
14109 /* If there are reductions on the offloaded region itself, treat
14110 them as a dummy GANG loop. */
14111 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
14113 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
14115 if (private_marker
)
14116 gimple_call_set_arg (private_marker
, 2, level
);
14118 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
14119 false, NULL
, private_marker
, NULL
, &fork_seq
,
14123 gimple_seq_add_seq (&new_body
, fork_seq
);
14124 gimple_seq_add_seq (&new_body
, tgt_body
);
14125 gimple_seq_add_seq (&new_body
, join_seq
);
14129 new_body
= maybe_catch_exception (new_body
);
14130 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
14132 gimple_omp_set_body (stmt
, new_body
);
14135 bind
= gimple_build_bind (NULL
, NULL
,
14136 tgt_bind
? gimple_bind_block (tgt_bind
)
14138 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
14139 gimple_bind_add_seq (bind
, ilist
);
14140 gimple_bind_add_stmt (bind
, stmt
);
14141 gimple_bind_add_seq (bind
, olist
);
14143 pop_gimplify_context (NULL
);
14147 gimple_bind_add_seq (dep_bind
, dep_ilist
);
14148 gimple_bind_add_stmt (dep_bind
, bind
);
14149 gimple_bind_add_seq (dep_bind
, dep_olist
);
14150 pop_gimplify_context (dep_bind
);
14154 /* Expand code for an OpenMP teams directive. */
14157 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14159 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
14160 push_gimplify_context ();
14162 tree block
= make_node (BLOCK
);
14163 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
14164 gsi_replace (gsi_p
, bind
, true);
14165 gimple_seq bind_body
= NULL
;
14166 gimple_seq dlist
= NULL
;
14167 gimple_seq olist
= NULL
;
14169 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14170 OMP_CLAUSE_NUM_TEAMS
);
14171 tree num_teams_lower
= NULL_TREE
;
14172 if (num_teams
== NULL_TREE
)
14173 num_teams
= build_int_cst (unsigned_type_node
, 0);
14176 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
14177 if (num_teams_lower
)
14179 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
14180 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
14183 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
14184 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
14185 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
14187 if (num_teams_lower
== NULL_TREE
)
14188 num_teams_lower
= num_teams
;
14189 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14190 OMP_CLAUSE_THREAD_LIMIT
);
14191 if (thread_limit
== NULL_TREE
)
14192 thread_limit
= build_int_cst (unsigned_type_node
, 0);
14195 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
14196 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
14197 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
14200 location_t loc
= gimple_location (teams_stmt
);
14201 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
14202 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
14203 tree first
= create_tmp_var (rettype
);
14204 gimple_seq_add_stmt (&bind_body
,
14205 gimple_build_assign (first
, build_one_cst (rettype
)));
14206 tree llabel
= create_artificial_label (loc
);
14207 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
14209 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
14211 gimple_set_location (call
, loc
);
14212 tree temp
= create_tmp_var (rettype
);
14213 gimple_call_set_lhs (call
, temp
);
14214 gimple_seq_add_stmt (&bind_body
, call
);
14216 tree tlabel
= create_artificial_label (loc
);
14217 tree flabel
= create_artificial_label (loc
);
14218 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
14220 gimple_seq_add_stmt (&bind_body
, cond
);
14221 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
14222 gimple_seq_add_stmt (&bind_body
,
14223 gimple_build_assign (first
, build_zero_cst (rettype
)));
14225 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
14226 &bind_body
, &dlist
, ctx
, NULL
);
14227 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
14228 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
14230 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
14232 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
14233 gimple_omp_set_body (teams_stmt
, NULL
);
14234 gimple_seq_add_seq (&bind_body
, olist
);
14235 gimple_seq_add_seq (&bind_body
, dlist
);
14236 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
14237 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
14238 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
14239 gimple_bind_set_body (bind
, bind_body
);
14241 pop_gimplify_context (bind
);
14243 gimple_bind_append_vars (bind
, ctx
->block_vars
);
14244 BLOCK_VARS (block
) = ctx
->block_vars
;
14245 if (BLOCK_VARS (block
))
14246 TREE_USED (block
) = 1;
14249 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14250 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14251 of OMP context, but with make_addressable_vars set. */
14254 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14259 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14260 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14262 && DECL_HAS_VALUE_EXPR_P (t
))
14265 if (make_addressable_vars
14267 && bitmap_bit_p (make_addressable_vars
, DECL_UID (t
)))
14270 /* If a global variable has been privatized, TREE_CONSTANT on
14271 ADDR_EXPR might be wrong. */
14272 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14273 recompute_tree_invariant_for_addr_expr (t
);
14275 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14279 /* Data to be communicated between lower_omp_regimplify_operands and
14280 lower_omp_regimplify_operands_p. */
14282 struct lower_omp_regimplify_operands_data
14288 /* Helper function for lower_omp_regimplify_operands. Find
14289 omp_member_access_dummy_var vars and adjust temporarily their
14290 DECL_VALUE_EXPRs if needed. */
14293 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14296 tree t
= omp_member_access_dummy_var (*tp
);
14299 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14300 lower_omp_regimplify_operands_data
*ldata
14301 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14302 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14305 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14306 ldata
->decls
->safe_push (*tp
);
14307 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14308 SET_DECL_VALUE_EXPR (*tp
, v
);
14311 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14315 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14316 of omp_member_access_dummy_var vars during regimplification. */
14319 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14320 gimple_stmt_iterator
*gsi_p
)
14322 auto_vec
<tree
, 10> decls
;
14325 struct walk_stmt_info wi
;
14326 memset (&wi
, '\0', sizeof (wi
));
14327 struct lower_omp_regimplify_operands_data data
;
14329 data
.decls
= &decls
;
14331 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14333 gimple_regimplify_operands (stmt
, gsi_p
);
14334 while (!decls
.is_empty ())
14336 tree t
= decls
.pop ();
14337 tree v
= decls
.pop ();
14338 SET_DECL_VALUE_EXPR (t
, v
);
14343 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14345 gimple
*stmt
= gsi_stmt (*gsi_p
);
14346 struct walk_stmt_info wi
;
14349 if (gimple_has_location (stmt
))
14350 input_location
= gimple_location (stmt
);
14352 if (make_addressable_vars
)
14353 memset (&wi
, '\0', sizeof (wi
));
14355 /* If we have issued syntax errors, avoid doing any heavy lifting.
14356 Just replace the OMP directives with a NOP to avoid
14357 confusing RTL expansion. */
14358 if (seen_error () && is_gimple_omp (stmt
))
14360 gsi_replace (gsi_p
, gimple_build_nop (), true);
14364 switch (gimple_code (stmt
))
14368 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14369 if ((ctx
|| make_addressable_vars
)
14370 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14371 lower_omp_regimplify_p
,
14372 ctx
? NULL
: &wi
, NULL
)
14373 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14374 lower_omp_regimplify_p
,
14375 ctx
? NULL
: &wi
, NULL
)))
14376 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14380 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14382 case GIMPLE_EH_FILTER
:
14383 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14386 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14387 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14389 case GIMPLE_TRANSACTION
:
14390 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14394 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14396 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14397 oacc_privatization_scan_decl_chain (ctx
, vars
);
14399 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14400 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14402 case GIMPLE_OMP_PARALLEL
:
14403 case GIMPLE_OMP_TASK
:
14404 ctx
= maybe_lookup_ctx (stmt
);
14406 if (ctx
->cancellable
)
14407 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14408 lower_omp_taskreg (gsi_p
, ctx
);
14410 case GIMPLE_OMP_FOR
:
14411 ctx
= maybe_lookup_ctx (stmt
);
14413 if (ctx
->cancellable
)
14414 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14415 lower_omp_for (gsi_p
, ctx
);
14417 case GIMPLE_OMP_SECTIONS
:
14418 ctx
= maybe_lookup_ctx (stmt
);
14420 if (ctx
->cancellable
)
14421 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14422 lower_omp_sections (gsi_p
, ctx
);
14424 case GIMPLE_OMP_SCOPE
:
14425 ctx
= maybe_lookup_ctx (stmt
);
14427 lower_omp_scope (gsi_p
, ctx
);
14429 case GIMPLE_OMP_SINGLE
:
14430 ctx
= maybe_lookup_ctx (stmt
);
14432 lower_omp_single (gsi_p
, ctx
);
14434 case GIMPLE_OMP_MASTER
:
14435 case GIMPLE_OMP_MASKED
:
14436 ctx
= maybe_lookup_ctx (stmt
);
14438 lower_omp_master (gsi_p
, ctx
);
14440 case GIMPLE_OMP_TASKGROUP
:
14441 ctx
= maybe_lookup_ctx (stmt
);
14443 lower_omp_taskgroup (gsi_p
, ctx
);
14445 case GIMPLE_OMP_ORDERED
:
14446 ctx
= maybe_lookup_ctx (stmt
);
14448 lower_omp_ordered (gsi_p
, ctx
);
14450 case GIMPLE_OMP_SCAN
:
14451 ctx
= maybe_lookup_ctx (stmt
);
14453 lower_omp_scan (gsi_p
, ctx
);
14455 case GIMPLE_OMP_CRITICAL
:
14456 ctx
= maybe_lookup_ctx (stmt
);
14458 lower_omp_critical (gsi_p
, ctx
);
14460 case GIMPLE_OMP_ATOMIC_LOAD
:
14461 if ((ctx
|| make_addressable_vars
)
14462 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14463 as_a
<gomp_atomic_load
*> (stmt
)),
14464 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14465 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14467 case GIMPLE_OMP_TARGET
:
14468 ctx
= maybe_lookup_ctx (stmt
);
14470 lower_omp_target (gsi_p
, ctx
);
14472 case GIMPLE_OMP_TEAMS
:
14473 ctx
= maybe_lookup_ctx (stmt
);
14475 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14476 lower_omp_taskreg (gsi_p
, ctx
);
14478 lower_omp_teams (gsi_p
, ctx
);
14482 call_stmt
= as_a
<gcall
*> (stmt
);
14483 fndecl
= gimple_call_fndecl (call_stmt
);
14485 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14486 switch (DECL_FUNCTION_CODE (fndecl
))
14488 case BUILT_IN_GOMP_BARRIER
:
14492 case BUILT_IN_GOMP_CANCEL
:
14493 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14496 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14497 cctx
= cctx
->outer
;
14498 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14499 if (!cctx
->cancellable
)
14501 if (DECL_FUNCTION_CODE (fndecl
)
14502 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14504 stmt
= gimple_build_nop ();
14505 gsi_replace (gsi_p
, stmt
, false);
14509 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14511 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14512 gimple_call_set_fndecl (call_stmt
, fndecl
);
14513 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14516 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14517 gimple_call_set_lhs (call_stmt
, lhs
);
14518 tree fallthru_label
;
14519 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14521 g
= gimple_build_label (fallthru_label
);
14522 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14523 g
= gimple_build_cond (NE_EXPR
, lhs
,
14524 fold_convert (TREE_TYPE (lhs
),
14525 boolean_false_node
),
14526 cctx
->cancel_label
, fallthru_label
);
14527 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14534 case GIMPLE_ASSIGN
:
14535 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14537 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14538 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14539 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14540 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14541 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14542 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14543 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14544 && (gimple_omp_target_kind (up
->stmt
)
14545 == GF_OMP_TARGET_KIND_DATA
)))
14547 else if (!up
->lastprivate_conditional_map
)
14549 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14550 if (TREE_CODE (lhs
) == MEM_REF
14551 && DECL_P (TREE_OPERAND (lhs
, 0))
14552 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14553 0))) == REFERENCE_TYPE
)
14554 lhs
= TREE_OPERAND (lhs
, 0);
14556 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14559 if (up
->combined_into_simd_safelen1
)
14562 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14565 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14566 clauses
= gimple_omp_for_clauses (up
->stmt
);
14568 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14569 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14570 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14571 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14572 OMP_CLAUSE__CONDTEMP_
);
14573 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14574 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14575 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14582 if ((ctx
|| make_addressable_vars
)
14583 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14586 /* Just remove clobbers, this should happen only if we have
14587 "privatized" local addressable variables in SIMD regions,
14588 the clobber isn't needed in that case and gimplifying address
14589 of the ARRAY_REF into a pointer and creating MEM_REF based
14590 clobber would create worse code than we get with the clobber
14592 if (gimple_clobber_p (stmt
))
14594 gsi_replace (gsi_p
, gimple_build_nop (), true);
14597 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14604 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14606 location_t saved_location
= input_location
;
14607 gimple_stmt_iterator gsi
;
14608 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14609 lower_omp_1 (&gsi
, ctx
);
14610 /* During gimplification, we haven't folded statments inside offloading
14611 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14612 if (target_nesting_level
|| taskreg_nesting_level
)
14613 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14615 input_location
= saved_location
;
14618 /* Main entry point. */
14620 static unsigned int
14621 execute_lower_omp (void)
14627 /* This pass always runs, to provide PROP_gimple_lomp.
14628 But often, there is nothing to do. */
14629 if (flag_openacc
== 0 && flag_openmp
== 0
14630 && flag_openmp_simd
== 0)
14633 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14634 delete_omp_context
);
14636 body
= gimple_body (current_function_decl
);
14638 scan_omp (&body
, NULL
);
14639 gcc_assert (taskreg_nesting_level
== 0);
14640 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14641 finish_taskreg_scan (ctx
);
14642 taskreg_contexts
.release ();
14644 if (all_contexts
->root
)
14646 if (make_addressable_vars
)
14647 push_gimplify_context ();
14648 lower_omp (&body
, NULL
);
14649 if (make_addressable_vars
)
14650 pop_gimplify_context (NULL
);
14655 splay_tree_delete (all_contexts
);
14656 all_contexts
= NULL
;
14658 BITMAP_FREE (make_addressable_vars
);
14659 BITMAP_FREE (global_nonaddressable_vars
);
14661 /* If current function is a method, remove artificial dummy VAR_DECL created
14662 for non-static data member privatization, they aren't needed for
14663 debuginfo nor anything else, have been already replaced everywhere in the
14664 IL and cause problems with LTO. */
14665 if (DECL_ARGUMENTS (current_function_decl
)
14666 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14667 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14669 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14671 for (auto task_stmt
: task_cpyfns
)
14672 finalize_task_copyfn (task_stmt
);
14673 task_cpyfns
.release ();
14679 const pass_data pass_data_lower_omp
=
14681 GIMPLE_PASS
, /* type */
14682 "omplower", /* name */
14683 OPTGROUP_OMP
, /* optinfo_flags */
14684 TV_NONE
, /* tv_id */
14685 PROP_gimple_any
, /* properties_required */
14686 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14687 0, /* properties_destroyed */
14688 0, /* todo_flags_start */
14689 0, /* todo_flags_finish */
14692 class pass_lower_omp
: public gimple_opt_pass
14695 pass_lower_omp (gcc::context
*ctxt
)
14696 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14699 /* opt_pass methods: */
14700 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
14702 }; // class pass_lower_omp
14704 } // anon namespace
14707 make_pass_lower_omp (gcc::context
*ctxt
)
14709 return new pass_lower_omp (ctxt
);
14712 /* The following is a utility to diagnose structured block violations.
14713 It is not part of the "omplower" pass, as that's invoked too late. It
14714 should be invoked by the respective front ends after gimplification. */
14716 static splay_tree all_labels
;
14718 /* Check for mismatched contexts and generate an error if needed. Return
14719 true if an error is detected. */
14722 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14723 gimple
*branch_ctx
, gimple
*label_ctx
)
14725 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14726 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14728 if (label_ctx
== branch_ctx
)
14731 const char* kind
= NULL
;
14735 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14736 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14738 gcc_checking_assert (kind
== NULL
);
14744 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14748 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14749 so we could traverse it and issue a correct "exit" or "enter" error
14750 message upon a structured block violation.
14752 We built the context by building a list with tree_cons'ing, but there is
14753 no easy counterpart in gimple tuples. It seems like far too much work
14754 for issuing exit/enter error messages. If someone really misses the
14755 distinct error message... patches welcome. */
14758 /* Try to avoid confusing the user by producing and error message
14759 with correct "exit" or "enter" verbiage. We prefer "exit"
14760 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14761 if (branch_ctx
== NULL
)
14767 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14772 label_ctx
= TREE_CHAIN (label_ctx
);
14777 error ("invalid exit from %s structured block", kind
);
14779 error ("invalid entry to %s structured block", kind
);
14782 /* If it's obvious we have an invalid entry, be specific about the error. */
14783 if (branch_ctx
== NULL
)
14784 error ("invalid entry to %s structured block", kind
);
14787 /* Otherwise, be vague and lazy, but efficient. */
14788 error ("invalid branch to/from %s structured block", kind
);
14791 gsi_replace (gsi_p
, gimple_build_nop (), false);
14795 /* Pass 1: Create a minimal tree of structured blocks, and record
14796 where each label is found. */
14799 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14800 struct walk_stmt_info
*wi
)
14802 gimple
*context
= (gimple
*) wi
->info
;
14803 gimple
*inner_context
;
14804 gimple
*stmt
= gsi_stmt (*gsi_p
);
14806 *handled_ops_p
= true;
14808 switch (gimple_code (stmt
))
14812 case GIMPLE_OMP_PARALLEL
:
14813 case GIMPLE_OMP_TASK
:
14814 case GIMPLE_OMP_SCOPE
:
14815 case GIMPLE_OMP_SECTIONS
:
14816 case GIMPLE_OMP_SINGLE
:
14817 case GIMPLE_OMP_SECTION
:
14818 case GIMPLE_OMP_MASTER
:
14819 case GIMPLE_OMP_MASKED
:
14820 case GIMPLE_OMP_ORDERED
:
14821 case GIMPLE_OMP_SCAN
:
14822 case GIMPLE_OMP_CRITICAL
:
14823 case GIMPLE_OMP_TARGET
:
14824 case GIMPLE_OMP_TEAMS
:
14825 case GIMPLE_OMP_TASKGROUP
:
14826 /* The minimal context here is just the current OMP construct. */
14827 inner_context
= stmt
;
14828 wi
->info
= inner_context
;
14829 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14830 wi
->info
= context
;
14833 case GIMPLE_OMP_FOR
:
14834 inner_context
= stmt
;
14835 wi
->info
= inner_context
;
14836 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14838 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14839 diagnose_sb_1
, NULL
, wi
);
14840 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14841 wi
->info
= context
;
14845 splay_tree_insert (all_labels
,
14846 (splay_tree_key
) gimple_label_label (
14847 as_a
<glabel
*> (stmt
)),
14848 (splay_tree_value
) context
);
14858 /* Pass 2: Check each branch and see if its context differs from that of
14859 the destination label's context. */
14862 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14863 struct walk_stmt_info
*wi
)
14865 gimple
*context
= (gimple
*) wi
->info
;
14867 gimple
*stmt
= gsi_stmt (*gsi_p
);
14869 *handled_ops_p
= true;
14871 switch (gimple_code (stmt
))
14875 case GIMPLE_OMP_PARALLEL
:
14876 case GIMPLE_OMP_TASK
:
14877 case GIMPLE_OMP_SCOPE
:
14878 case GIMPLE_OMP_SECTIONS
:
14879 case GIMPLE_OMP_SINGLE
:
14880 case GIMPLE_OMP_SECTION
:
14881 case GIMPLE_OMP_MASTER
:
14882 case GIMPLE_OMP_MASKED
:
14883 case GIMPLE_OMP_ORDERED
:
14884 case GIMPLE_OMP_SCAN
:
14885 case GIMPLE_OMP_CRITICAL
:
14886 case GIMPLE_OMP_TARGET
:
14887 case GIMPLE_OMP_TEAMS
:
14888 case GIMPLE_OMP_TASKGROUP
:
14890 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14891 wi
->info
= context
;
14894 case GIMPLE_OMP_FOR
:
14896 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14898 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14899 diagnose_sb_2
, NULL
, wi
);
14900 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14901 wi
->info
= context
;
14906 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14907 tree lab
= gimple_cond_true_label (cond_stmt
);
14910 n
= splay_tree_lookup (all_labels
,
14911 (splay_tree_key
) lab
);
14912 diagnose_sb_0 (gsi_p
, context
,
14913 n
? (gimple
*) n
->value
: NULL
);
14915 lab
= gimple_cond_false_label (cond_stmt
);
14918 n
= splay_tree_lookup (all_labels
,
14919 (splay_tree_key
) lab
);
14920 diagnose_sb_0 (gsi_p
, context
,
14921 n
? (gimple
*) n
->value
: NULL
);
14928 tree lab
= gimple_goto_dest (stmt
);
14929 if (TREE_CODE (lab
) != LABEL_DECL
)
14932 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14933 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14937 case GIMPLE_SWITCH
:
14939 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14941 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14943 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14944 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14945 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14951 case GIMPLE_RETURN
:
14952 diagnose_sb_0 (gsi_p
, context
, NULL
);
14962 static unsigned int
14963 diagnose_omp_structured_block_errors (void)
14965 struct walk_stmt_info wi
;
14966 gimple_seq body
= gimple_body (current_function_decl
);
14968 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14970 memset (&wi
, 0, sizeof (wi
));
14971 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14973 memset (&wi
, 0, sizeof (wi
));
14974 wi
.want_locations
= true;
14975 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14977 gimple_set_body (current_function_decl
, body
);
14979 splay_tree_delete (all_labels
);
14987 const pass_data pass_data_diagnose_omp_blocks
=
14989 GIMPLE_PASS
, /* type */
14990 "*diagnose_omp_blocks", /* name */
14991 OPTGROUP_OMP
, /* optinfo_flags */
14992 TV_NONE
, /* tv_id */
14993 PROP_gimple_any
, /* properties_required */
14994 0, /* properties_provided */
14995 0, /* properties_destroyed */
14996 0, /* todo_flags_start */
14997 0, /* todo_flags_finish */
15000 class pass_diagnose_omp_blocks
: public gimple_opt_pass
15003 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15004 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
15007 /* opt_pass methods: */
15008 virtual bool gate (function
*)
15010 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
15012 virtual unsigned int execute (function
*)
15014 return diagnose_omp_structured_block_errors ();
15017 }; // class pass_diagnose_omp_blocks
15019 } // anon namespace
15022 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15024 return new pass_diagnose_omp_blocks (ctxt
);
15028 #include "gt-omp-low.h"