1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap make_addressable_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
194 static vec
<gomp_task
*> task_cpyfns
;
196 static void scan_omp (gimple_seq
*, omp_context
*);
197 static tree
scan_omp_1_op (tree
*, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context
*ctx
);
200 #define WALK_SUBSTMTS \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_TRANSACTION: \
206 /* The sub-statements for these should be walked. */ \
207 *handled_ops_p = false; \
210 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 is_oacc_parallel_or_serial (omp_context
*ctx
)
216 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
217 return ((outer_type
== GIMPLE_OMP_TARGET
)
218 && ((gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
220 || (gimple_omp_target_kind (ctx
->stmt
)
221 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
224 /* Return whether CTX represents an OpenACC 'kernels' construct.
225 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 is_oacc_kernels (omp_context
*ctx
)
230 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
231 return ((outer_type
== GIMPLE_OMP_TARGET
)
232 && (gimple_omp_target_kind (ctx
->stmt
)
233 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
236 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
241 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
242 return ((outer_type
== GIMPLE_OMP_TARGET
)
243 && ((gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
247 || (gimple_omp_target_kind (ctx
->stmt
)
248 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
251 /* Return true if STMT corresponds to an OpenMP target region. */
253 is_omp_target (gimple
*stmt
)
255 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
257 int kind
= gimple_omp_target_kind (stmt
);
258 return (kind
== GF_OMP_TARGET_KIND_REGION
259 || kind
== GF_OMP_TARGET_KIND_DATA
260 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
261 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
266 /* If DECL is the artificial dummy VAR_DECL created for non-static
267 data member privatization, return the underlying "this" parameter,
268 otherwise return NULL. */
271 omp_member_access_dummy_var (tree decl
)
274 || !DECL_ARTIFICIAL (decl
)
275 || !DECL_IGNORED_P (decl
)
276 || !DECL_HAS_VALUE_EXPR_P (decl
)
277 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
280 tree v
= DECL_VALUE_EXPR (decl
);
281 if (TREE_CODE (v
) != COMPONENT_REF
)
285 switch (TREE_CODE (v
))
291 case POINTER_PLUS_EXPR
:
292 v
= TREE_OPERAND (v
, 0);
295 if (DECL_CONTEXT (v
) == current_function_decl
296 && DECL_ARTIFICIAL (v
)
297 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
305 /* Helper for unshare_and_remap, called through walk_tree. */
308 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
310 tree
*pair
= (tree
*) data
;
313 *tp
= unshare_expr (pair
[1]);
316 else if (IS_TYPE_OR_DECL_P (*tp
))
321 /* Return unshare_expr (X) with all occurrences of FROM
325 unshare_and_remap (tree x
, tree from
, tree to
)
327 tree pair
[2] = { from
, to
};
328 x
= unshare_expr (x
);
329 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
333 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 scan_omp_op (tree
*tp
, omp_context
*ctx
)
338 struct walk_stmt_info wi
;
340 memset (&wi
, 0, sizeof (wi
));
342 wi
.want_locations
= true;
344 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
347 static void lower_omp (gimple_seq
*, omp_context
*);
348 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
351 /* Return true if CTX is for an omp parallel. */
354 is_parallel_ctx (omp_context
*ctx
)
356 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
360 /* Return true if CTX is for an omp task. */
363 is_task_ctx (omp_context
*ctx
)
365 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
369 /* Return true if CTX is for an omp taskloop. */
372 is_taskloop_ctx (omp_context
*ctx
)
374 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
375 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
379 /* Return true if CTX is for a host omp teams. */
382 is_host_teams_ctx (omp_context
*ctx
)
384 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
385 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
388 /* Return true if CTX is for an omp parallel or omp task or host omp teams
389 (the last one is strictly not a task region in OpenMP speak, but we
390 need to treat it similarly). */
393 is_taskreg_ctx (omp_context
*ctx
)
395 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
398 /* Return true if EXPR is variable sized. */
401 is_variable_sized (const_tree expr
)
403 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
406 /* Lookup variables. The "maybe" form
407 allows for the variable form to not have been entered, otherwise we
408 assert that the variable must have been entered. */
411 lookup_decl (tree var
, omp_context
*ctx
)
413 tree
*n
= ctx
->cb
.decl_map
->get (var
);
418 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
420 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
421 return n
? *n
: NULL_TREE
;
425 lookup_field (tree var
, omp_context
*ctx
)
428 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
429 return (tree
) n
->value
;
433 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
436 n
= splay_tree_lookup (ctx
->sfield_map
437 ? ctx
->sfield_map
: ctx
->field_map
, key
);
438 return (tree
) n
->value
;
442 lookup_sfield (tree var
, omp_context
*ctx
)
444 return lookup_sfield ((splay_tree_key
) var
, ctx
);
448 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
451 n
= splay_tree_lookup (ctx
->field_map
, key
);
452 return n
? (tree
) n
->value
: NULL_TREE
;
456 maybe_lookup_field (tree var
, omp_context
*ctx
)
458 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
461 /* Return true if DECL should be copied by pointer. SHARED_CTX is
462 the parallel context if DECL is to be shared. */
465 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
467 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
468 || TYPE_ATOMIC (TREE_TYPE (decl
)))
471 /* We can only use copy-in/copy-out semantics for shared variables
472 when we know the value is not accessible from an outer scope. */
475 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
477 /* ??? Trivially accessible from anywhere. But why would we even
478 be passing an address in this case? Should we simply assert
479 this to be false, or should we have a cleanup pass that removes
480 these from the list of mappings? */
481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
484 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
485 without analyzing the expression whether or not its location
486 is accessible to anyone else. In the case of nested parallel
487 regions it certainly may be. */
488 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
491 /* Do not use copy-in/copy-out for variables that have their
493 if (is_global_var (decl
))
495 /* For file scope vars, track whether we've seen them as
496 non-addressable initially and in that case, keep the same
497 answer for the duration of the pass, even when they are made
498 addressable later on e.g. through reduction expansion. Global
499 variables which weren't addressable before the pass will not
500 have their privatized copies address taken. See PR91216. */
501 if (!TREE_ADDRESSABLE (decl
))
503 if (!global_nonaddressable_vars
)
504 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
505 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
507 else if (!global_nonaddressable_vars
508 || !bitmap_bit_p (global_nonaddressable_vars
,
512 else if (TREE_ADDRESSABLE (decl
))
515 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 if (TREE_READONLY (decl
)
518 || ((TREE_CODE (decl
) == RESULT_DECL
519 || TREE_CODE (decl
) == PARM_DECL
)
520 && DECL_BY_REFERENCE (decl
)))
523 /* Disallow copy-in/out in nested parallel if
524 decl is shared in outer parallel, otherwise
525 each thread could store the shared variable
526 in its own copy-in location, making the
527 variable no longer really shared. */
528 if (shared_ctx
->is_nested
)
532 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
533 if ((is_taskreg_ctx (up
)
534 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
535 && is_gimple_omp_offloaded (up
->stmt
)))
536 && maybe_lookup_decl (decl
, up
))
543 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
545 for (c
= gimple_omp_target_clauses (up
->stmt
);
546 c
; c
= OMP_CLAUSE_CHAIN (c
))
547 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
548 && OMP_CLAUSE_DECL (c
) == decl
)
552 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
553 c
; c
= OMP_CLAUSE_CHAIN (c
))
554 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
555 && OMP_CLAUSE_DECL (c
) == decl
)
559 goto maybe_mark_addressable_and_ret
;
563 /* For tasks avoid using copy-in/out. As tasks can be
564 deferred or executed in different thread, when GOMP_task
565 returns, the task hasn't necessarily terminated. */
566 if (is_task_ctx (shared_ctx
))
569 maybe_mark_addressable_and_ret
:
570 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
571 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
573 /* Taking address of OUTER in lower_send_shared_vars
574 might need regimplification of everything that uses the
576 if (!make_addressable_vars
)
577 make_addressable_vars
= BITMAP_ALLOC (NULL
);
578 bitmap_set_bit (make_addressable_vars
, DECL_UID (outer
));
579 TREE_ADDRESSABLE (outer
) = 1;
588 /* Construct a new automatic decl similar to VAR. */
591 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
593 tree copy
= copy_var_decl (var
, name
, type
);
595 DECL_CONTEXT (copy
) = current_function_decl
;
599 DECL_CHAIN (copy
) = ctx
->block_vars
;
600 ctx
->block_vars
= copy
;
605 /* If VAR is listed in make_addressable_vars, it wasn't
606 originally addressable, but was only later made so.
607 We don't need to take address of privatizations
609 if (TREE_ADDRESSABLE (var
)
610 && ((make_addressable_vars
611 && bitmap_bit_p (make_addressable_vars
, DECL_UID (var
)))
612 || (global_nonaddressable_vars
613 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
614 TREE_ADDRESSABLE (copy
) = 0;
620 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
622 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
625 /* Build tree nodes to access the field for VAR on the receiver side. */
628 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
630 tree x
, field
= lookup_field (var
, ctx
);
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x
= maybe_lookup_field (field
, ctx
);
638 x
= build_simple_mem_ref (ctx
->receiver_decl
);
639 TREE_THIS_NOTRAP (x
) = 1;
640 x
= omp_build_component_ref (x
, field
);
643 x
= build_simple_mem_ref (x
);
644 TREE_THIS_NOTRAP (x
) = 1;
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
655 build_outer_var_ref (tree var
, omp_context
*ctx
,
656 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
659 omp_context
*outer
= ctx
->outer
;
660 for (; outer
; outer
= outer
->outer
)
662 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
664 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
665 && !maybe_lookup_decl (var
, outer
))
670 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
672 else if (is_variable_sized (var
))
674 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
675 x
= build_outer_var_ref (x
, ctx
, code
);
676 x
= build_simple_mem_ref (x
);
678 else if (is_taskreg_ctx (ctx
))
680 bool by_ref
= use_pointer_for_field (var
, NULL
);
681 x
= build_receiver_ref (var
, by_ref
, ctx
);
683 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
684 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
686 || code
== OMP_CLAUSE_ALLOCATE
687 || (code
== OMP_CLAUSE_PRIVATE
688 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
689 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
690 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
692 /* #pragma omp simd isn't a worksharing construct, and can reference
693 even private vars in its linear etc. clauses.
694 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
695 to private vars in all worksharing constructs. */
697 if (outer
&& is_taskreg_ctx (outer
))
698 x
= lookup_decl (var
, outer
);
700 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
704 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
708 = splay_tree_lookup (outer
->field_map
,
709 (splay_tree_key
) &DECL_UID (var
));
712 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
715 x
= lookup_decl (var
, outer
);
719 tree field
= (tree
) n
->value
;
720 /* If the receiver record type was remapped in the child function,
721 remap the field into the new record type. */
722 x
= maybe_lookup_field (field
, outer
);
726 x
= build_simple_mem_ref (outer
->receiver_decl
);
727 x
= omp_build_component_ref (x
, field
);
728 if (use_pointer_for_field (var
, outer
))
729 x
= build_simple_mem_ref (x
);
733 x
= lookup_decl (var
, outer
);
734 else if (omp_privatize_by_reference (var
))
735 /* This can happen with orphaned constructs. If var is reference, it is
736 possible it is shared and as such valid. */
738 else if (omp_member_access_dummy_var (var
))
745 tree t
= omp_member_access_dummy_var (var
);
748 x
= DECL_VALUE_EXPR (var
);
749 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
751 x
= unshare_and_remap (x
, t
, o
);
753 x
= unshare_expr (x
);
757 if (omp_privatize_by_reference (var
))
758 x
= build_simple_mem_ref (x
);
763 /* Build tree nodes to access the field for VAR on the sender side. */
766 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
768 tree field
= lookup_sfield (key
, ctx
);
769 return omp_build_component_ref (ctx
->sender_decl
, field
);
773 build_sender_ref (tree var
, omp_context
*ctx
)
775 return build_sender_ref ((splay_tree_key
) var
, ctx
);
778 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
779 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
784 tree field
, type
, sfield
= NULL_TREE
;
785 splay_tree_key key
= (splay_tree_key
) var
;
787 if ((mask
& 16) != 0)
789 key
= (splay_tree_key
) &DECL_NAME (var
);
790 gcc_checking_assert (key
!= (splay_tree_key
) var
);
794 key
= (splay_tree_key
) &DECL_UID (var
);
795 gcc_checking_assert (key
!= (splay_tree_key
) var
);
797 gcc_assert ((mask
& 1) == 0
798 || !splay_tree_lookup (ctx
->field_map
, key
));
799 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
800 || !splay_tree_lookup (ctx
->sfield_map
, key
));
801 gcc_assert ((mask
& 3) == 3
802 || !is_gimple_omp_oacc (ctx
->stmt
));
804 type
= TREE_TYPE (var
);
805 if ((mask
& 16) != 0)
806 type
= lang_hooks
.decls
.omp_array_data (var
, true);
808 /* Prevent redeclaring the var in the split-off function with a restrict
809 pointer type. Note that we only clear type itself, restrict qualifiers in
810 the pointed-to type will be ignored by points-to analysis. */
811 if (POINTER_TYPE_P (type
)
812 && TYPE_RESTRICT (type
))
813 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
817 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
818 type
= build_pointer_type (build_pointer_type (type
));
821 type
= build_pointer_type (type
);
822 else if ((mask
& (32 | 3)) == 1
823 && omp_privatize_by_reference (var
))
824 type
= TREE_TYPE (type
);
826 field
= build_decl (DECL_SOURCE_LOCATION (var
),
827 FIELD_DECL
, DECL_NAME (var
), type
);
829 /* Remember what variable this field was created for. This does have a
830 side effect of making dwarf2out ignore this member, so for helpful
831 debugging we clear it later in delete_omp_context. */
832 DECL_ABSTRACT_ORIGIN (field
) = var
;
833 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
835 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
836 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
837 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
840 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
844 insert_field_into_struct (ctx
->record_type
, field
);
845 if (ctx
->srecord_type
)
847 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
848 FIELD_DECL
, DECL_NAME (var
), type
);
849 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
850 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
851 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
852 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
853 insert_field_into_struct (ctx
->srecord_type
, sfield
);
858 if (ctx
->srecord_type
== NULL_TREE
)
862 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
863 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
864 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
866 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
867 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
868 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
869 insert_field_into_struct (ctx
->srecord_type
, sfield
);
870 splay_tree_insert (ctx
->sfield_map
,
871 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
872 (splay_tree_value
) sfield
);
876 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
877 : ctx
->srecord_type
, field
);
881 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
882 if ((mask
& 2) && ctx
->sfield_map
)
883 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
887 install_var_local (tree var
, omp_context
*ctx
)
889 tree new_var
= omp_copy_decl_1 (var
, ctx
);
890 insert_decl_map (&ctx
->cb
, var
, new_var
);
894 /* Adjust the replacement for DECL in CTX for the new context. This means
895 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
902 new_decl
= lookup_decl (decl
, ctx
);
904 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
906 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
907 && DECL_HAS_VALUE_EXPR_P (decl
))
909 tree ve
= DECL_VALUE_EXPR (decl
);
910 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
911 SET_DECL_VALUE_EXPR (new_decl
, ve
);
912 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
915 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
917 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
918 if (size
== error_mark_node
)
919 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
920 DECL_SIZE (new_decl
) = size
;
922 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
923 if (size
== error_mark_node
)
924 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
925 DECL_SIZE_UNIT (new_decl
) = size
;
929 /* The callback for remap_decl. Search all containing contexts for a
930 mapping of the variable; this avoids having to duplicate the splay
931 tree ahead of time. We know a mapping doesn't already exist in the
932 given context. Create new mappings to implement default semantics. */
935 omp_copy_decl (tree var
, copy_body_data
*cb
)
937 omp_context
*ctx
= (omp_context
*) cb
;
940 if (TREE_CODE (var
) == LABEL_DECL
)
942 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
944 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
945 DECL_CONTEXT (new_var
) = current_function_decl
;
946 insert_decl_map (&ctx
->cb
, var
, new_var
);
950 while (!is_taskreg_ctx (ctx
))
955 new_var
= maybe_lookup_decl (var
, ctx
);
960 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
963 return error_mark_node
;
966 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
971 omp_context
*ctx
= XCNEW (omp_context
);
973 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
974 (splay_tree_value
) ctx
);
979 ctx
->outer
= outer_ctx
;
980 ctx
->cb
= outer_ctx
->cb
;
981 ctx
->cb
.block
= NULL
;
982 ctx
->depth
= outer_ctx
->depth
+ 1;
986 ctx
->cb
.src_fn
= current_function_decl
;
987 ctx
->cb
.dst_fn
= current_function_decl
;
988 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
989 gcc_checking_assert (ctx
->cb
.src_node
);
990 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
991 ctx
->cb
.src_cfun
= cfun
;
992 ctx
->cb
.copy_decl
= omp_copy_decl
;
993 ctx
->cb
.eh_lp_nr
= 0;
994 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
995 ctx
->cb
.adjust_array_error_bounds
= true;
996 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1000 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1005 static gimple_seq
maybe_catch_exception (gimple_seq
);
1007 /* Finalize task copyfn. */
1010 finalize_task_copyfn (gomp_task
*task_stmt
)
1012 struct function
*child_cfun
;
1014 gimple_seq seq
= NULL
, new_seq
;
1017 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1018 if (child_fn
== NULL_TREE
)
1021 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1022 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1024 push_cfun (child_cfun
);
1025 bind
= gimplify_body (child_fn
, false);
1026 gimple_seq_add_stmt (&seq
, bind
);
1027 new_seq
= maybe_catch_exception (seq
);
1030 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1032 gimple_seq_add_stmt (&seq
, bind
);
1034 gimple_set_body (child_fn
, seq
);
1037 /* Inform the callgraph about the new function. */
1038 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1039 node
->parallelized_function
= 1;
1040 cgraph_node::add_new_function (child_fn
, false);
1043 /* Destroy a omp_context data structures. Called through the splay tree
1044 value delete callback. */
1047 delete_omp_context (splay_tree_value value
)
1049 omp_context
*ctx
= (omp_context
*) value
;
1051 delete ctx
->cb
.decl_map
;
1054 splay_tree_delete (ctx
->field_map
);
1055 if (ctx
->sfield_map
)
1056 splay_tree_delete (ctx
->sfield_map
);
1058 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1059 it produces corrupt debug information. */
1060 if (ctx
->record_type
)
1063 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1064 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1066 if (ctx
->srecord_type
)
1069 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1070 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1073 if (ctx
->task_reduction_map
)
1075 ctx
->task_reductions
.release ();
1076 delete ctx
->task_reduction_map
;
1079 delete ctx
->lastprivate_conditional_map
;
1080 delete ctx
->allocate_map
;
1085 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1089 fixup_child_record_type (omp_context
*ctx
)
1091 tree f
, type
= ctx
->record_type
;
1093 if (!ctx
->receiver_decl
)
1095 /* ??? It isn't sufficient to just call remap_type here, because
1096 variably_modified_type_p doesn't work the way we expect for
1097 record types. Testing each field for whether it needs remapping
1098 and creating a new record by hand works, however. */
1099 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1100 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1104 tree name
, new_fields
= NULL
;
1106 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1107 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1108 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1109 TYPE_DECL
, name
, type
);
1110 TYPE_NAME (type
) = name
;
1112 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1114 tree new_f
= copy_node (f
);
1115 DECL_CONTEXT (new_f
) = type
;
1116 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1117 DECL_CHAIN (new_f
) = new_fields
;
1118 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1119 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1121 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1125 /* Arrange to be able to look up the receiver field
1126 given the sender field. */
1127 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1128 (splay_tree_value
) new_f
);
1130 TYPE_FIELDS (type
) = nreverse (new_fields
);
1134 /* In a target region we never modify any of the pointers in *.omp_data_i,
1135 so attempt to help the optimizers. */
1136 if (is_gimple_omp_offloaded (ctx
->stmt
))
1137 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1139 TREE_TYPE (ctx
->receiver_decl
)
1140 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1143 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1144 specified by CLAUSES. */
1147 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1150 bool scan_array_reductions
= false;
1152 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1154 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1155 /* omp_default_mem_alloc is 1 */
1156 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1157 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1159 /* The allocate clauses that appear on a target construct or on
1160 constructs in a target region must specify an allocator expression
1161 unless a requires directive with the dynamic_allocators clause
1162 is present in the same compilation unit. */
1163 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1164 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
) == 0)
1165 && omp_maybe_offloaded_ctx (ctx
))
1166 error_at (OMP_CLAUSE_LOCATION (c
), "%<allocate%> clause must"
1167 " specify an allocator here");
1168 if (ctx
->allocate_map
== NULL
)
1169 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1170 tree val
= integer_zero_node
;
1171 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1172 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1173 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1174 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1175 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1178 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1182 switch (OMP_CLAUSE_CODE (c
))
1184 case OMP_CLAUSE_PRIVATE
:
1185 decl
= OMP_CLAUSE_DECL (c
);
1186 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1188 else if (!is_variable_sized (decl
))
1189 install_var_local (decl
, ctx
);
1192 case OMP_CLAUSE_SHARED
:
1193 decl
= OMP_CLAUSE_DECL (c
);
1194 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1195 ctx
->allocate_map
->remove (decl
);
1196 /* Ignore shared directives in teams construct inside of
1197 target construct. */
1198 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1199 && !is_host_teams_ctx (ctx
))
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1204 if (is_global_var (odecl
))
1206 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1209 gcc_assert (is_taskreg_ctx (ctx
));
1210 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1211 || !is_variable_sized (decl
));
1212 /* Global variables don't need to be copied,
1213 the receiver side will use them directly. */
1214 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1216 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1218 use_pointer_for_field (decl
, ctx
);
1221 by_ref
= use_pointer_for_field (decl
, NULL
);
1222 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1223 || TREE_ADDRESSABLE (decl
)
1225 || omp_privatize_by_reference (decl
))
1227 by_ref
= use_pointer_for_field (decl
, ctx
);
1228 install_var_field (decl
, by_ref
, 3, ctx
);
1229 install_var_local (decl
, ctx
);
1232 /* We don't need to copy const scalar vars back. */
1233 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1236 case OMP_CLAUSE_REDUCTION
:
1237 /* Collect 'reduction' clauses on OpenACC compute construct. */
1238 if (is_gimple_omp_oacc (ctx
->stmt
)
1239 && is_gimple_omp_offloaded (ctx
->stmt
))
1241 /* No 'reduction' clauses on OpenACC 'kernels'. */
1242 gcc_checking_assert (!is_oacc_kernels (ctx
));
1243 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1244 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1246 ctx
->local_reduction_clauses
1247 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1251 case OMP_CLAUSE_IN_REDUCTION
:
1252 decl
= OMP_CLAUSE_DECL (c
);
1253 if (ctx
->allocate_map
1254 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1255 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1256 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1257 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1258 || is_task_ctx (ctx
)))
1261 if (ctx
->allocate_map
->get (decl
))
1262 ctx
->allocate_map
->remove (decl
);
1264 if (TREE_CODE (decl
) == MEM_REF
)
1266 tree t
= TREE_OPERAND (decl
, 0);
1267 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1268 t
= TREE_OPERAND (t
, 0);
1269 if (TREE_CODE (t
) == INDIRECT_REF
1270 || TREE_CODE (t
) == ADDR_EXPR
)
1271 t
= TREE_OPERAND (t
, 0);
1272 if (is_omp_target (ctx
->stmt
))
1274 if (is_variable_sized (t
))
1276 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1277 t
= DECL_VALUE_EXPR (t
);
1278 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1279 t
= TREE_OPERAND (t
, 0);
1280 gcc_assert (DECL_P (t
));
1284 scan_omp_op (&at
, ctx
->outer
);
1285 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1286 splay_tree_insert (ctx
->field_map
,
1287 (splay_tree_key
) &DECL_CONTEXT (t
),
1288 (splay_tree_value
) nt
);
1290 splay_tree_insert (ctx
->field_map
,
1291 (splay_tree_key
) &DECL_CONTEXT (at
),
1292 (splay_tree_value
) nt
);
1295 install_var_local (t
, ctx
);
1296 if (is_taskreg_ctx (ctx
)
1297 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1298 || (is_task_ctx (ctx
)
1299 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1300 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1302 == POINTER_TYPE
)))))
1303 && !is_variable_sized (t
)
1304 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1305 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1306 && !is_task_ctx (ctx
))))
1308 by_ref
= use_pointer_for_field (t
, NULL
);
1309 if (is_task_ctx (ctx
)
1310 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1311 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1313 install_var_field (t
, false, 1, ctx
);
1314 install_var_field (t
, by_ref
, 2, ctx
);
1317 install_var_field (t
, by_ref
, 3, ctx
);
1321 if (is_omp_target (ctx
->stmt
))
1325 scan_omp_op (&at
, ctx
->outer
);
1326 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1327 splay_tree_insert (ctx
->field_map
,
1328 (splay_tree_key
) &DECL_CONTEXT (decl
),
1329 (splay_tree_value
) nt
);
1331 splay_tree_insert (ctx
->field_map
,
1332 (splay_tree_key
) &DECL_CONTEXT (at
),
1333 (splay_tree_value
) nt
);
1336 if (is_task_ctx (ctx
)
1337 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1338 && OMP_CLAUSE_REDUCTION_TASK (c
)
1339 && is_parallel_ctx (ctx
)))
1341 /* Global variables don't need to be copied,
1342 the receiver side will use them directly. */
1343 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1345 by_ref
= use_pointer_for_field (decl
, ctx
);
1346 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1347 install_var_field (decl
, by_ref
, 3, ctx
);
1349 install_var_local (decl
, ctx
);
1352 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1353 && OMP_CLAUSE_REDUCTION_TASK (c
))
1355 install_var_local (decl
, ctx
);
1360 case OMP_CLAUSE_LASTPRIVATE
:
1361 /* Let the corresponding firstprivate clause create
1363 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1367 case OMP_CLAUSE_FIRSTPRIVATE
:
1368 case OMP_CLAUSE_LINEAR
:
1369 decl
= OMP_CLAUSE_DECL (c
);
1371 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1372 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1373 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1374 && is_gimple_omp_offloaded (ctx
->stmt
))
1376 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1377 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
1378 && lang_hooks
.decls
.omp_array_data (decl
, true)))
1380 by_ref
= !omp_privatize_by_reference (decl
);
1381 install_var_field (decl
, by_ref
, 3, ctx
);
1383 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1385 if (TREE_CODE (decl
) == INDIRECT_REF
)
1386 decl
= TREE_OPERAND (decl
, 0);
1387 install_var_field (decl
, true, 3, ctx
);
1389 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1390 install_var_field (decl
, true, 3, ctx
);
1392 install_var_field (decl
, false, 3, ctx
);
1394 if (is_variable_sized (decl
))
1396 if (is_task_ctx (ctx
))
1398 if (ctx
->allocate_map
1399 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1402 if (ctx
->allocate_map
->get (decl
))
1403 ctx
->allocate_map
->remove (decl
);
1405 install_var_field (decl
, false, 1, ctx
);
1409 else if (is_taskreg_ctx (ctx
))
1412 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1413 by_ref
= use_pointer_for_field (decl
, NULL
);
1415 if (is_task_ctx (ctx
)
1416 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1418 if (ctx
->allocate_map
1419 && ctx
->allocate_map
->get (decl
))
1420 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1422 install_var_field (decl
, false, 1, ctx
);
1424 install_var_field (decl
, by_ref
, 2, ctx
);
1427 install_var_field (decl
, by_ref
, 3, ctx
);
1429 install_var_local (decl
, ctx
);
1430 /* For descr arrays on target: firstprivatize data + attach ptr. */
1431 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1432 && is_gimple_omp_offloaded (ctx
->stmt
)
1433 && !is_gimple_omp_oacc (ctx
->stmt
)
1434 && lang_hooks
.decls
.omp_array_data (decl
, true))
1436 install_var_field (decl
, false, 16 | 3, ctx
);
1437 install_var_field (decl
, true, 8 | 3, ctx
);
1441 case OMP_CLAUSE_USE_DEVICE_PTR
:
1442 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1443 decl
= OMP_CLAUSE_DECL (c
);
1445 /* Fortran array descriptors. */
1446 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1447 install_var_field (decl
, false, 19, ctx
);
1448 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1449 && !omp_privatize_by_reference (decl
)
1450 && !omp_is_allocatable_or_ptr (decl
))
1451 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1452 install_var_field (decl
, true, 11, ctx
);
1454 install_var_field (decl
, false, 11, ctx
);
1455 if (DECL_SIZE (decl
)
1456 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1458 tree decl2
= DECL_VALUE_EXPR (decl
);
1459 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1460 decl2
= TREE_OPERAND (decl2
, 0);
1461 gcc_assert (DECL_P (decl2
));
1462 install_var_local (decl2
, ctx
);
1464 install_var_local (decl
, ctx
);
1467 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1468 decl
= OMP_CLAUSE_DECL (c
);
1469 while (TREE_CODE (decl
) == INDIRECT_REF
1470 || TREE_CODE (decl
) == ARRAY_REF
)
1471 decl
= TREE_OPERAND (decl
, 0);
1474 case OMP_CLAUSE_IS_DEVICE_PTR
:
1475 decl
= OMP_CLAUSE_DECL (c
);
1478 case OMP_CLAUSE__LOOPTEMP_
:
1479 case OMP_CLAUSE__REDUCTEMP_
:
1480 gcc_assert (is_taskreg_ctx (ctx
));
1481 decl
= OMP_CLAUSE_DECL (c
);
1482 install_var_field (decl
, false, 3, ctx
);
1483 install_var_local (decl
, ctx
);
1486 case OMP_CLAUSE_COPYPRIVATE
:
1487 case OMP_CLAUSE_COPYIN
:
1488 decl
= OMP_CLAUSE_DECL (c
);
1489 by_ref
= use_pointer_for_field (decl
, NULL
);
1490 install_var_field (decl
, by_ref
, 3, ctx
);
1493 case OMP_CLAUSE_FINAL
:
1495 case OMP_CLAUSE_NUM_THREADS
:
1496 case OMP_CLAUSE_NUM_TEAMS
:
1497 case OMP_CLAUSE_THREAD_LIMIT
:
1498 case OMP_CLAUSE_DEVICE
:
1499 case OMP_CLAUSE_SCHEDULE
:
1500 case OMP_CLAUSE_DIST_SCHEDULE
:
1501 case OMP_CLAUSE_DEPEND
:
1502 case OMP_CLAUSE_PRIORITY
:
1503 case OMP_CLAUSE_GRAINSIZE
:
1504 case OMP_CLAUSE_NUM_TASKS
:
1505 case OMP_CLAUSE_NUM_GANGS
:
1506 case OMP_CLAUSE_NUM_WORKERS
:
1507 case OMP_CLAUSE_VECTOR_LENGTH
:
1508 case OMP_CLAUSE_DETACH
:
1509 case OMP_CLAUSE_FILTER
:
1511 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1515 case OMP_CLAUSE_FROM
:
1516 case OMP_CLAUSE_MAP
:
1518 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1519 decl
= OMP_CLAUSE_DECL (c
);
1520 /* If requested, make 'decl' addressable. */
1521 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1522 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
))
1524 gcc_checking_assert (DECL_P (decl
));
1526 bool decl_addressable
= TREE_ADDRESSABLE (decl
);
1527 if (!decl_addressable
)
1529 if (!make_addressable_vars
)
1530 make_addressable_vars
= BITMAP_ALLOC (NULL
);
1531 bitmap_set_bit (make_addressable_vars
, DECL_UID (decl
));
1532 TREE_ADDRESSABLE (decl
) = 1;
1535 if (dump_enabled_p ())
1537 location_t loc
= OMP_CLAUSE_LOCATION (c
);
1538 const dump_user_location_t d_u_loc
1539 = dump_user_location_t::from_location_t (loc
);
1540 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 # pragma GCC diagnostic push
1543 # pragma GCC diagnostic ignored "-Wformat"
1545 if (!decl_addressable
)
1546 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1548 " made addressable\n",
1551 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1553 " already made addressable\n",
1556 # pragma GCC diagnostic pop
1561 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
) = 0;
1563 /* Global variables with "omp declare target" attribute
1564 don't need to be copied, the receiver side will use them
1565 directly. However, global variables with "omp declare target link"
1566 attribute need to be copied. Or when ALWAYS modifier is used. */
1567 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1569 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1570 && (OMP_CLAUSE_MAP_KIND (c
)
1571 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1572 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1573 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1574 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1575 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1576 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1577 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1578 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1579 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1580 && varpool_node::get_create (decl
)->offloadable
1581 && !lookup_attribute ("omp declare target link",
1582 DECL_ATTRIBUTES (decl
)))
1584 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1585 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1587 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1588 not offloaded; there is nothing to map for those. */
1589 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1590 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1591 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1594 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1596 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1597 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1598 && is_omp_target (ctx
->stmt
))
1600 /* If this is an offloaded region, an attach operation should
1601 only exist when the pointer variable is mapped in a prior
1603 If we had an error, we may not have attempted to sort clauses
1604 properly, so avoid the test. */
1605 if (is_gimple_omp_offloaded (ctx
->stmt
)
1608 (maybe_lookup_decl (decl
, ctx
)
1609 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1610 && lookup_attribute ("omp declare target",
1611 DECL_ATTRIBUTES (decl
))));
1613 /* By itself, attach/detach is generated as part of pointer
1614 variable mapping and should not create new variables in the
1615 offloaded region, however sender refs for it must be created
1616 for its address to be passed to the runtime. */
1618 = build_decl (OMP_CLAUSE_LOCATION (c
),
1619 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1620 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1621 insert_field_into_struct (ctx
->record_type
, field
);
1622 /* To not clash with a map of the pointer variable itself,
1623 attach/detach maps have their field looked up by the *clause*
1624 tree expression, not the decl. */
1625 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1626 (splay_tree_key
) c
));
1627 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1628 (splay_tree_value
) field
);
1631 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1632 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1633 || (OMP_CLAUSE_MAP_KIND (c
)
1634 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1636 if (TREE_CODE (decl
) == COMPONENT_REF
1637 || (TREE_CODE (decl
) == INDIRECT_REF
1638 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1639 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1641 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1642 == POINTER_TYPE
)))))
1644 if (DECL_SIZE (decl
)
1645 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1647 tree decl2
= DECL_VALUE_EXPR (decl
);
1648 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1649 decl2
= TREE_OPERAND (decl2
, 0);
1650 gcc_assert (DECL_P (decl2
));
1651 install_var_local (decl2
, ctx
);
1653 install_var_local (decl
, ctx
);
1658 if (DECL_SIZE (decl
)
1659 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1661 tree decl2
= DECL_VALUE_EXPR (decl
);
1662 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1663 decl2
= TREE_OPERAND (decl2
, 0);
1664 gcc_assert (DECL_P (decl2
));
1665 install_var_field (decl2
, true, 3, ctx
);
1666 install_var_local (decl2
, ctx
);
1667 install_var_local (decl
, ctx
);
1671 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1672 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1673 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1674 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1675 install_var_field (decl
, true, 7, ctx
);
1677 install_var_field (decl
, true, 3, ctx
);
1678 if (is_gimple_omp_offloaded (ctx
->stmt
)
1679 && !(is_gimple_omp_oacc (ctx
->stmt
)
1680 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1681 install_var_local (decl
, ctx
);
1686 tree base
= get_base_address (decl
);
1687 tree nc
= OMP_CLAUSE_CHAIN (c
);
1690 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1691 && OMP_CLAUSE_DECL (nc
) == base
1692 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1693 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1695 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1696 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1702 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1703 decl
= OMP_CLAUSE_DECL (c
);
1705 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1706 (splay_tree_key
) decl
));
1708 = build_decl (OMP_CLAUSE_LOCATION (c
),
1709 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1710 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1711 insert_field_into_struct (ctx
->record_type
, field
);
1712 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1713 (splay_tree_value
) field
);
1718 case OMP_CLAUSE_ORDER
:
1719 ctx
->order_concurrent
= true;
1722 case OMP_CLAUSE_BIND
:
1726 case OMP_CLAUSE_NOWAIT
:
1727 case OMP_CLAUSE_ORDERED
:
1728 case OMP_CLAUSE_COLLAPSE
:
1729 case OMP_CLAUSE_UNTIED
:
1730 case OMP_CLAUSE_MERGEABLE
:
1731 case OMP_CLAUSE_PROC_BIND
:
1732 case OMP_CLAUSE_SAFELEN
:
1733 case OMP_CLAUSE_SIMDLEN
:
1734 case OMP_CLAUSE_THREADS
:
1735 case OMP_CLAUSE_SIMD
:
1736 case OMP_CLAUSE_NOGROUP
:
1737 case OMP_CLAUSE_DEFAULTMAP
:
1738 case OMP_CLAUSE_ASYNC
:
1739 case OMP_CLAUSE_WAIT
:
1740 case OMP_CLAUSE_GANG
:
1741 case OMP_CLAUSE_WORKER
:
1742 case OMP_CLAUSE_VECTOR
:
1743 case OMP_CLAUSE_INDEPENDENT
:
1744 case OMP_CLAUSE_AUTO
:
1745 case OMP_CLAUSE_SEQ
:
1746 case OMP_CLAUSE_TILE
:
1747 case OMP_CLAUSE__SIMT_
:
1748 case OMP_CLAUSE_DEFAULT
:
1749 case OMP_CLAUSE_NONTEMPORAL
:
1750 case OMP_CLAUSE_IF_PRESENT
:
1751 case OMP_CLAUSE_FINALIZE
:
1752 case OMP_CLAUSE_TASK_REDUCTION
:
1753 case OMP_CLAUSE_ALLOCATE
:
1756 case OMP_CLAUSE_ALIGNED
:
1757 decl
= OMP_CLAUSE_DECL (c
);
1758 if (is_global_var (decl
)
1759 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1760 install_var_local (decl
, ctx
);
1763 case OMP_CLAUSE__CONDTEMP_
:
1764 decl
= OMP_CLAUSE_DECL (c
);
1765 if (is_parallel_ctx (ctx
))
1767 install_var_field (decl
, false, 3, ctx
);
1768 install_var_local (decl
, ctx
);
1770 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1771 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1772 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1773 install_var_local (decl
, ctx
);
1776 case OMP_CLAUSE__CACHE_
:
1777 case OMP_CLAUSE_NOHOST
:
1783 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1785 switch (OMP_CLAUSE_CODE (c
))
1787 case OMP_CLAUSE_LASTPRIVATE
:
1788 /* Let the corresponding firstprivate clause create
1790 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1791 scan_array_reductions
= true;
1792 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1796 case OMP_CLAUSE_FIRSTPRIVATE
:
1797 case OMP_CLAUSE_PRIVATE
:
1798 case OMP_CLAUSE_LINEAR
:
1799 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1800 case OMP_CLAUSE_IS_DEVICE_PTR
:
1801 decl
= OMP_CLAUSE_DECL (c
);
1802 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1804 while (TREE_CODE (decl
) == INDIRECT_REF
1805 || TREE_CODE (decl
) == ARRAY_REF
)
1806 decl
= TREE_OPERAND (decl
, 0);
1809 if (is_variable_sized (decl
))
1811 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1812 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1813 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1814 && is_gimple_omp_offloaded (ctx
->stmt
))
1816 tree decl2
= DECL_VALUE_EXPR (decl
);
1817 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1818 decl2
= TREE_OPERAND (decl2
, 0);
1819 gcc_assert (DECL_P (decl2
));
1820 install_var_local (decl2
, ctx
);
1821 fixup_remapped_decl (decl2
, ctx
, false);
1823 install_var_local (decl
, ctx
);
1825 fixup_remapped_decl (decl
, ctx
,
1826 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1827 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1828 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1829 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1830 scan_array_reductions
= true;
1833 case OMP_CLAUSE_REDUCTION
:
1834 case OMP_CLAUSE_IN_REDUCTION
:
1835 decl
= OMP_CLAUSE_DECL (c
);
1836 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1838 if (is_variable_sized (decl
))
1839 install_var_local (decl
, ctx
);
1840 fixup_remapped_decl (decl
, ctx
, false);
1842 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1843 scan_array_reductions
= true;
1846 case OMP_CLAUSE_TASK_REDUCTION
:
1847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1848 scan_array_reductions
= true;
1851 case OMP_CLAUSE_SHARED
:
1852 /* Ignore shared directives in teams construct inside of
1853 target construct. */
1854 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1855 && !is_host_teams_ctx (ctx
))
1857 decl
= OMP_CLAUSE_DECL (c
);
1858 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1860 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1862 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1865 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1866 install_var_field (decl
, by_ref
, 11, ctx
);
1869 fixup_remapped_decl (decl
, ctx
, false);
1872 case OMP_CLAUSE_MAP
:
1873 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1875 decl
= OMP_CLAUSE_DECL (c
);
1877 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1878 && (OMP_CLAUSE_MAP_KIND (c
)
1879 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1880 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1881 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1882 && varpool_node::get_create (decl
)->offloadable
)
1884 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1885 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1886 && is_omp_target (ctx
->stmt
)
1887 && !is_gimple_omp_offloaded (ctx
->stmt
))
1891 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1892 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1893 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1894 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1896 tree new_decl
= lookup_decl (decl
, ctx
);
1897 TREE_TYPE (new_decl
)
1898 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1900 else if (DECL_SIZE (decl
)
1901 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1903 tree decl2
= DECL_VALUE_EXPR (decl
);
1904 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1905 decl2
= TREE_OPERAND (decl2
, 0);
1906 gcc_assert (DECL_P (decl2
));
1907 fixup_remapped_decl (decl2
, ctx
, false);
1908 fixup_remapped_decl (decl
, ctx
, true);
1911 fixup_remapped_decl (decl
, ctx
, false);
1915 case OMP_CLAUSE_COPYPRIVATE
:
1916 case OMP_CLAUSE_COPYIN
:
1917 case OMP_CLAUSE_DEFAULT
:
1919 case OMP_CLAUSE_NUM_THREADS
:
1920 case OMP_CLAUSE_NUM_TEAMS
:
1921 case OMP_CLAUSE_THREAD_LIMIT
:
1922 case OMP_CLAUSE_DEVICE
:
1923 case OMP_CLAUSE_SCHEDULE
:
1924 case OMP_CLAUSE_DIST_SCHEDULE
:
1925 case OMP_CLAUSE_NOWAIT
:
1926 case OMP_CLAUSE_ORDERED
:
1927 case OMP_CLAUSE_COLLAPSE
:
1928 case OMP_CLAUSE_UNTIED
:
1929 case OMP_CLAUSE_FINAL
:
1930 case OMP_CLAUSE_MERGEABLE
:
1931 case OMP_CLAUSE_PROC_BIND
:
1932 case OMP_CLAUSE_SAFELEN
:
1933 case OMP_CLAUSE_SIMDLEN
:
1934 case OMP_CLAUSE_ALIGNED
:
1935 case OMP_CLAUSE_DEPEND
:
1936 case OMP_CLAUSE_DETACH
:
1937 case OMP_CLAUSE_ALLOCATE
:
1938 case OMP_CLAUSE__LOOPTEMP_
:
1939 case OMP_CLAUSE__REDUCTEMP_
:
1941 case OMP_CLAUSE_FROM
:
1942 case OMP_CLAUSE_PRIORITY
:
1943 case OMP_CLAUSE_GRAINSIZE
:
1944 case OMP_CLAUSE_NUM_TASKS
:
1945 case OMP_CLAUSE_THREADS
:
1946 case OMP_CLAUSE_SIMD
:
1947 case OMP_CLAUSE_NOGROUP
:
1948 case OMP_CLAUSE_DEFAULTMAP
:
1949 case OMP_CLAUSE_ORDER
:
1950 case OMP_CLAUSE_BIND
:
1951 case OMP_CLAUSE_USE_DEVICE_PTR
:
1952 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1953 case OMP_CLAUSE_NONTEMPORAL
:
1954 case OMP_CLAUSE_ASYNC
:
1955 case OMP_CLAUSE_WAIT
:
1956 case OMP_CLAUSE_NUM_GANGS
:
1957 case OMP_CLAUSE_NUM_WORKERS
:
1958 case OMP_CLAUSE_VECTOR_LENGTH
:
1959 case OMP_CLAUSE_GANG
:
1960 case OMP_CLAUSE_WORKER
:
1961 case OMP_CLAUSE_VECTOR
:
1962 case OMP_CLAUSE_INDEPENDENT
:
1963 case OMP_CLAUSE_AUTO
:
1964 case OMP_CLAUSE_SEQ
:
1965 case OMP_CLAUSE_TILE
:
1966 case OMP_CLAUSE__SIMT_
:
1967 case OMP_CLAUSE_IF_PRESENT
:
1968 case OMP_CLAUSE_FINALIZE
:
1969 case OMP_CLAUSE_FILTER
:
1970 case OMP_CLAUSE__CONDTEMP_
:
1973 case OMP_CLAUSE__CACHE_
:
1974 case OMP_CLAUSE_NOHOST
:
1980 gcc_checking_assert (!scan_array_reductions
1981 || !is_gimple_omp_oacc (ctx
->stmt
));
1982 if (scan_array_reductions
)
1984 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1985 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1986 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1987 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1988 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1990 omp_context
*rctx
= ctx
;
1991 if (is_omp_target (ctx
->stmt
))
1993 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1994 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1996 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1997 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1998 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1999 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
2000 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
2001 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
2005 /* Create a new name for omp child function. Returns an identifier. */
2008 create_omp_child_function_name (bool task_copy
)
2010 return clone_function_name_numbered (current_function_decl
,
2011 task_copy
? "_omp_cpyfn" : "_omp_fn");
2014 /* Return true if CTX may belong to offloaded code: either if current function
2015 is offloaded, or any enclosing context corresponds to a target region. */
2018 omp_maybe_offloaded_ctx (omp_context
*ctx
)
2020 if (cgraph_node::get (current_function_decl
)->offloadable
)
2022 for (; ctx
; ctx
= ctx
->outer
)
2023 if (is_gimple_omp_offloaded (ctx
->stmt
))
2028 /* Build a decl for the omp child function. It'll not contain a body
2029 yet, just the bare decl. */
2032 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
2034 tree decl
, type
, name
, t
;
2036 name
= create_omp_child_function_name (task_copy
);
2038 type
= build_function_type_list (void_type_node
, ptr_type_node
,
2039 ptr_type_node
, NULL_TREE
);
2041 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2043 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
2045 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
2048 ctx
->cb
.dst_fn
= decl
;
2050 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
2052 TREE_STATIC (decl
) = 1;
2053 TREE_USED (decl
) = 1;
2054 DECL_ARTIFICIAL (decl
) = 1;
2055 DECL_IGNORED_P (decl
) = 0;
2056 TREE_PUBLIC (decl
) = 0;
2057 DECL_UNINLINABLE (decl
) = 1;
2058 DECL_EXTERNAL (decl
) = 0;
2059 DECL_CONTEXT (decl
) = NULL_TREE
;
2060 DECL_INITIAL (decl
) = make_node (BLOCK
);
2061 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
2062 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
2063 /* Remove omp declare simd attribute from the new attributes. */
2064 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
2066 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
2069 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
2070 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
2071 *p
= TREE_CHAIN (*p
);
2074 tree chain
= TREE_CHAIN (*p
);
2075 *p
= copy_node (*p
);
2076 p
= &TREE_CHAIN (*p
);
2080 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2081 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2082 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2083 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2084 DECL_FUNCTION_VERSIONED (decl
)
2085 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2087 if (omp_maybe_offloaded_ctx (ctx
))
2089 cgraph_node::get_create (decl
)->offloadable
= 1;
2090 if (ENABLE_OFFLOADING
)
2091 g
->have_offload
= true;
2094 if (cgraph_node::get_create (decl
)->offloadable
)
2096 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2097 ? "omp target entrypoint"
2098 : "omp declare target");
2099 if (lookup_attribute ("omp declare target",
2100 DECL_ATTRIBUTES (current_function_decl
)))
2102 if (is_gimple_omp_offloaded (ctx
->stmt
))
2103 DECL_ATTRIBUTES (decl
)
2104 = remove_attribute ("omp declare target",
2105 copy_list (DECL_ATTRIBUTES (decl
)));
2110 && is_gimple_omp_offloaded (ctx
->stmt
)
2111 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl
)) == NULL_TREE
)
2112 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("noclone"),
2113 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2115 DECL_ATTRIBUTES (decl
)
2116 = tree_cons (get_identifier (target_attr
),
2117 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2120 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2121 RESULT_DECL
, NULL_TREE
, void_type_node
);
2122 DECL_ARTIFICIAL (t
) = 1;
2123 DECL_IGNORED_P (t
) = 1;
2124 DECL_CONTEXT (t
) = decl
;
2125 DECL_RESULT (decl
) = t
;
2127 tree data_name
= get_identifier (".omp_data_i");
2128 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2130 DECL_ARTIFICIAL (t
) = 1;
2131 DECL_NAMELESS (t
) = 1;
2132 DECL_ARG_TYPE (t
) = ptr_type_node
;
2133 DECL_CONTEXT (t
) = current_function_decl
;
2135 TREE_READONLY (t
) = 1;
2136 DECL_ARGUMENTS (decl
) = t
;
2138 ctx
->receiver_decl
= t
;
2141 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2142 PARM_DECL
, get_identifier (".omp_data_o"),
2144 DECL_ARTIFICIAL (t
) = 1;
2145 DECL_NAMELESS (t
) = 1;
2146 DECL_ARG_TYPE (t
) = ptr_type_node
;
2147 DECL_CONTEXT (t
) = current_function_decl
;
2149 TREE_ADDRESSABLE (t
) = 1;
2150 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2151 DECL_ARGUMENTS (decl
) = t
;
2154 /* Allocate memory for the function structure. The call to
2155 allocate_struct_function clobbers CFUN, so we need to restore
2157 push_struct_function (decl
);
2158 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2159 init_tree_ssa (cfun
);
2163 /* Callback for walk_gimple_seq. Check if combined parallel
2164 contains gimple_omp_for_combined_into_p OMP_FOR. */
2167 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2168 bool *handled_ops_p
,
2169 struct walk_stmt_info
*wi
)
2171 gimple
*stmt
= gsi_stmt (*gsi_p
);
2173 *handled_ops_p
= true;
2174 switch (gimple_code (stmt
))
2178 case GIMPLE_OMP_FOR
:
2179 if (gimple_omp_for_combined_into_p (stmt
)
2180 && gimple_omp_for_kind (stmt
)
2181 == *(const enum gf_mask
*) (wi
->info
))
2184 return integer_zero_node
;
2193 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2196 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2197 omp_context
*outer_ctx
)
2199 struct walk_stmt_info wi
;
2201 memset (&wi
, 0, sizeof (wi
));
2203 wi
.info
= (void *) &msk
;
2204 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2205 if (wi
.info
!= (void *) &msk
)
2207 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2208 struct omp_for_data fd
;
2209 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2210 /* We need two temporaries with fd.loop.v type (istart/iend)
2211 and then (fd.collapse - 1) temporaries with the same
2212 type for count2 ... countN-1 vars if not constant. */
2213 size_t count
= 2, i
;
2214 tree type
= fd
.iter_type
;
2216 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2218 count
+= fd
.collapse
- 1;
2219 /* If there are lastprivate clauses on the inner
2220 GIMPLE_OMP_FOR, add one more temporaries for the total number
2221 of iterations (product of count1 ... countN-1). */
2222 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2223 OMP_CLAUSE_LASTPRIVATE
)
2224 || (msk
== GF_OMP_FOR_KIND_FOR
2225 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2226 OMP_CLAUSE_LASTPRIVATE
)))
2228 tree temp
= create_tmp_var (type
);
2229 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2230 OMP_CLAUSE__LOOPTEMP_
);
2231 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2232 OMP_CLAUSE_DECL (c
) = temp
;
2233 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2234 gimple_omp_taskreg_set_clauses (stmt
, c
);
2237 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2238 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2239 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2241 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2242 tree type2
= TREE_TYPE (v
);
2244 for (i
= 0; i
< 3; i
++)
2246 tree temp
= create_tmp_var (type2
);
2247 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2248 OMP_CLAUSE__LOOPTEMP_
);
2249 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2250 OMP_CLAUSE_DECL (c
) = temp
;
2251 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2252 gimple_omp_taskreg_set_clauses (stmt
, c
);
2256 for (i
= 0; i
< count
; i
++)
2258 tree temp
= create_tmp_var (type
);
2259 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2260 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2261 OMP_CLAUSE_DECL (c
) = temp
;
2262 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2263 gimple_omp_taskreg_set_clauses (stmt
, c
);
2266 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2267 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2268 OMP_CLAUSE_REDUCTION
))
2270 tree type
= build_pointer_type (pointer_sized_int_node
);
2271 tree temp
= create_tmp_var (type
);
2272 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2273 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2274 OMP_CLAUSE_DECL (c
) = temp
;
2275 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2276 gimple_omp_task_set_clauses (stmt
, c
);
2280 /* Scan an OpenMP parallel directive. */
2283 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2287 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2289 /* Ignore parallel directives with empty bodies, unless there
2290 are copyin clauses. */
2292 && empty_body_p (gimple_omp_body (stmt
))
2293 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2294 OMP_CLAUSE_COPYIN
) == NULL
)
2296 gsi_replace (gsi
, gimple_build_nop (), false);
2300 if (gimple_omp_parallel_combined_p (stmt
))
2301 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2302 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2303 OMP_CLAUSE_REDUCTION
);
2304 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2305 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2307 tree type
= build_pointer_type (pointer_sized_int_node
);
2308 tree temp
= create_tmp_var (type
);
2309 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2311 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2312 OMP_CLAUSE_DECL (c
) = temp
;
2313 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2314 gimple_omp_parallel_set_clauses (stmt
, c
);
2317 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2320 ctx
= new_omp_context (stmt
, outer_ctx
);
2321 taskreg_contexts
.safe_push (ctx
);
2322 if (taskreg_nesting_level
> 1)
2323 ctx
->is_nested
= true;
2324 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2325 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2326 name
= create_tmp_var_name (".omp_data_s");
2327 name
= build_decl (gimple_location (stmt
),
2328 TYPE_DECL
, name
, ctx
->record_type
);
2329 DECL_ARTIFICIAL (name
) = 1;
2330 DECL_NAMELESS (name
) = 1;
2331 TYPE_NAME (ctx
->record_type
) = name
;
2332 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2333 create_omp_child_function (ctx
, false);
2334 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2336 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2337 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2339 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2340 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2343 /* Scan an OpenMP task directive. */
2346 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2350 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2352 /* Ignore task directives with empty bodies, unless they have depend
2355 && gimple_omp_body (stmt
)
2356 && empty_body_p (gimple_omp_body (stmt
))
2357 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2359 gsi_replace (gsi
, gimple_build_nop (), false);
2363 if (gimple_omp_task_taskloop_p (stmt
))
2364 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2366 ctx
= new_omp_context (stmt
, outer_ctx
);
2368 if (gimple_omp_task_taskwait_p (stmt
))
2370 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2374 taskreg_contexts
.safe_push (ctx
);
2375 if (taskreg_nesting_level
> 1)
2376 ctx
->is_nested
= true;
2377 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2378 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2379 name
= create_tmp_var_name (".omp_data_s");
2380 name
= build_decl (gimple_location (stmt
),
2381 TYPE_DECL
, name
, ctx
->record_type
);
2382 DECL_ARTIFICIAL (name
) = 1;
2383 DECL_NAMELESS (name
) = 1;
2384 TYPE_NAME (ctx
->record_type
) = name
;
2385 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2386 create_omp_child_function (ctx
, false);
2387 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2389 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2391 if (ctx
->srecord_type
)
2393 name
= create_tmp_var_name (".omp_data_a");
2394 name
= build_decl (gimple_location (stmt
),
2395 TYPE_DECL
, name
, ctx
->srecord_type
);
2396 DECL_ARTIFICIAL (name
) = 1;
2397 DECL_NAMELESS (name
) = 1;
2398 TYPE_NAME (ctx
->srecord_type
) = name
;
2399 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2400 create_omp_child_function (ctx
, true);
2403 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2405 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2407 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2408 t
= build_int_cst (long_integer_type_node
, 0);
2409 gimple_omp_task_set_arg_size (stmt
, t
);
2410 t
= build_int_cst (long_integer_type_node
, 1);
2411 gimple_omp_task_set_arg_align (stmt
, t
);
2415 /* Helper function for finish_taskreg_scan, called through walk_tree.
2416 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2417 tree, replace it in the expression. */
2420 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2424 omp_context
*ctx
= (omp_context
*) data
;
2425 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2428 if (DECL_HAS_VALUE_EXPR_P (t
))
2429 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2434 else if (IS_TYPE_OR_DECL_P (*tp
))
2439 /* If any decls have been made addressable during scan_omp,
2440 adjust their fields if needed, and layout record types
2441 of parallel/task constructs. */
2444 finish_taskreg_scan (omp_context
*ctx
)
2446 if (ctx
->record_type
== NULL_TREE
)
2449 /* If any make_addressable_vars were needed, verify all
2450 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2451 statements if use_pointer_for_field hasn't changed
2452 because of that. If it did, update field types now. */
2453 if (make_addressable_vars
)
2457 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2458 c
; c
= OMP_CLAUSE_CHAIN (c
))
2459 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2460 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2462 tree decl
= OMP_CLAUSE_DECL (c
);
2464 /* Global variables don't need to be copied,
2465 the receiver side will use them directly. */
2466 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2468 if (!bitmap_bit_p (make_addressable_vars
, DECL_UID (decl
))
2469 || !use_pointer_for_field (decl
, ctx
))
2471 tree field
= lookup_field (decl
, ctx
);
2472 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2473 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2475 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2476 TREE_THIS_VOLATILE (field
) = 0;
2477 DECL_USER_ALIGN (field
) = 0;
2478 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2479 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2480 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2481 if (ctx
->srecord_type
)
2483 tree sfield
= lookup_sfield (decl
, ctx
);
2484 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2485 TREE_THIS_VOLATILE (sfield
) = 0;
2486 DECL_USER_ALIGN (sfield
) = 0;
2487 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2488 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2489 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2494 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2496 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2497 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2500 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2501 expects to find it at the start of data. */
2502 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2503 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2507 *p
= DECL_CHAIN (*p
);
2511 p
= &DECL_CHAIN (*p
);
2512 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2513 TYPE_FIELDS (ctx
->record_type
) = f
;
2515 layout_type (ctx
->record_type
);
2516 fixup_child_record_type (ctx
);
2518 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2520 layout_type (ctx
->record_type
);
2521 fixup_child_record_type (ctx
);
2525 location_t loc
= gimple_location (ctx
->stmt
);
2526 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2528 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2530 /* Move VLA fields to the end. */
2531 p
= &TYPE_FIELDS (ctx
->record_type
);
2533 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2534 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2537 *p
= TREE_CHAIN (*p
);
2538 TREE_CHAIN (*q
) = NULL_TREE
;
2539 q
= &TREE_CHAIN (*q
);
2542 p
= &DECL_CHAIN (*p
);
2544 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2546 /* Move fields corresponding to first and second _looptemp_
2547 clause first. There are filled by GOMP_taskloop
2548 and thus need to be in specific positions. */
2549 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2550 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2551 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2552 OMP_CLAUSE__LOOPTEMP_
);
2553 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2554 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2555 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2556 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2557 p
= &TYPE_FIELDS (ctx
->record_type
);
2559 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2560 *p
= DECL_CHAIN (*p
);
2562 p
= &DECL_CHAIN (*p
);
2563 DECL_CHAIN (f1
) = f2
;
2566 DECL_CHAIN (f2
) = f3
;
2567 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2570 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2571 TYPE_FIELDS (ctx
->record_type
) = f1
;
2572 if (ctx
->srecord_type
)
2574 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2575 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2577 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2578 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2580 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2581 *p
= DECL_CHAIN (*p
);
2583 p
= &DECL_CHAIN (*p
);
2584 DECL_CHAIN (f1
) = f2
;
2585 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2588 DECL_CHAIN (f2
) = f3
;
2589 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2592 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2593 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2600 /* Look for a firstprivate clause with the detach event handle. */
2601 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2602 c
; c
= OMP_CLAUSE_CHAIN (c
))
2604 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2606 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2607 == OMP_CLAUSE_DECL (detach_clause
))
2612 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2614 /* Move field corresponding to the detach clause first.
2615 This is filled by GOMP_task and needs to be in a
2616 specific position. */
2617 p
= &TYPE_FIELDS (ctx
->record_type
);
2620 *p
= DECL_CHAIN (*p
);
2622 p
= &DECL_CHAIN (*p
);
2623 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2624 TYPE_FIELDS (ctx
->record_type
) = field
;
2625 if (ctx
->srecord_type
)
2627 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2628 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2631 *p
= DECL_CHAIN (*p
);
2633 p
= &DECL_CHAIN (*p
);
2634 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2635 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2638 layout_type (ctx
->record_type
);
2639 fixup_child_record_type (ctx
);
2640 if (ctx
->srecord_type
)
2641 layout_type (ctx
->srecord_type
);
2642 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2643 TYPE_SIZE_UNIT (ctx
->record_type
));
2644 if (TREE_CODE (t
) != INTEGER_CST
)
2646 t
= unshare_expr (t
);
2647 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2649 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2650 t
= build_int_cst (long_integer_type_node
,
2651 TYPE_ALIGN_UNIT (ctx
->record_type
));
2652 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2656 /* Find the enclosing offload context. */
2658 static omp_context
*
2659 enclosing_target_ctx (omp_context
*ctx
)
2661 for (; ctx
; ctx
= ctx
->outer
)
2662 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2668 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2670 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2673 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2675 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2677 gimple
*stmt
= ctx
->stmt
;
2678 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2679 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2686 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2687 (This doesn't include OpenACC 'kernels' decomposed parts.)
2688 Until kernels handling moves to use the same loop indirection
2689 scheme as parallel, we need to do this checking early. */
2692 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2694 bool checking
= true;
2695 unsigned outer_mask
= 0;
2696 unsigned this_mask
= 0;
2697 bool has_seq
= false, has_auto
= false;
2700 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2704 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2706 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2709 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2711 switch (OMP_CLAUSE_CODE (c
))
2713 case OMP_CLAUSE_GANG
:
2714 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2716 case OMP_CLAUSE_WORKER
:
2717 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2719 case OMP_CLAUSE_VECTOR
:
2720 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2722 case OMP_CLAUSE_SEQ
:
2725 case OMP_CLAUSE_AUTO
:
2735 if (has_seq
&& (this_mask
|| has_auto
))
2736 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2737 " OpenACC loop specifiers");
2738 else if (has_auto
&& this_mask
)
2739 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2740 " OpenACC loop specifiers");
2742 if (this_mask
& outer_mask
)
2743 error_at (gimple_location (stmt
), "inner loop uses same"
2744 " OpenACC parallelism as containing loop");
2747 return outer_mask
| this_mask
;
2750 /* Scan a GIMPLE_OMP_FOR. */
2752 static omp_context
*
2753 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2757 tree clauses
= gimple_omp_for_clauses (stmt
);
2759 ctx
= new_omp_context (stmt
, outer_ctx
);
2761 if (is_gimple_omp_oacc (stmt
))
2763 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2765 if (!(tgt
&& is_oacc_kernels (tgt
)))
2766 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2769 switch (OMP_CLAUSE_CODE (c
))
2771 case OMP_CLAUSE_GANG
:
2772 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2775 case OMP_CLAUSE_WORKER
:
2776 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2779 case OMP_CLAUSE_VECTOR
:
2780 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2789 /* By construction, this is impossible for OpenACC 'kernels'
2790 decomposed parts. */
2791 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2793 error_at (OMP_CLAUSE_LOCATION (c
),
2794 "argument not permitted on %qs clause",
2795 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2797 inform (gimple_location (tgt
->stmt
),
2798 "enclosing parent compute construct");
2799 else if (oacc_get_fn_attrib (current_function_decl
))
2800 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2801 "enclosing routine");
2807 if (tgt
&& is_oacc_kernels (tgt
))
2808 check_oacc_kernel_gwv (stmt
, ctx
);
2810 /* Collect all variables named in reductions on this loop. Ensure
2811 that, if this loop has a reduction on some variable v, and there is
2812 a reduction on v somewhere in an outer context, then there is a
2813 reduction on v on all intervening loops as well. */
2814 tree local_reduction_clauses
= NULL
;
2815 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2817 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2818 local_reduction_clauses
2819 = tree_cons (NULL
, c
, local_reduction_clauses
);
2821 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2822 ctx
->outer_reduction_clauses
2823 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2824 ctx
->outer
->outer_reduction_clauses
);
2825 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2826 tree local_iter
= local_reduction_clauses
;
2827 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2829 tree local_clause
= TREE_VALUE (local_iter
);
2830 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2831 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2832 bool have_outer_reduction
= false;
2833 tree ctx_iter
= outer_reduction_clauses
;
2834 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2836 tree outer_clause
= TREE_VALUE (ctx_iter
);
2837 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2838 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2839 if (outer_var
== local_var
&& outer_op
!= local_op
)
2841 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2842 "conflicting reduction operations for %qE",
2844 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2845 "location of the previous reduction for %qE",
2848 if (outer_var
== local_var
)
2850 have_outer_reduction
= true;
2854 if (have_outer_reduction
)
2856 /* There is a reduction on outer_var both on this loop and on
2857 some enclosing loop. Walk up the context tree until such a
2858 loop with a reduction on outer_var is found, and complain
2859 about all intervening loops that do not have such a
2861 struct omp_context
*curr_loop
= ctx
->outer
;
2863 while (curr_loop
!= NULL
)
2865 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2866 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2868 tree curr_clause
= TREE_VALUE (curr_iter
);
2869 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2870 if (curr_var
== local_var
)
2877 warning_at (gimple_location (curr_loop
->stmt
), 0,
2878 "nested loop in reduction needs "
2879 "reduction clause for %qE",
2883 curr_loop
= curr_loop
->outer
;
2887 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2888 ctx
->outer_reduction_clauses
2889 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2890 ctx
->outer_reduction_clauses
);
2892 if (tgt
&& is_oacc_kernels (tgt
))
2894 /* Strip out reductions, as they are not handled yet. */
2895 tree
*prev_ptr
= &clauses
;
2897 while (tree probe
= *prev_ptr
)
2899 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2901 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2902 *prev_ptr
= *next_ptr
;
2904 prev_ptr
= next_ptr
;
2907 gimple_omp_for_set_clauses (stmt
, clauses
);
2911 scan_sharing_clauses (clauses
, ctx
);
2913 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2914 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2916 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2917 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2918 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2919 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2921 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2925 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2928 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2929 omp_context
*outer_ctx
)
2931 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2932 gsi_replace (gsi
, bind
, false);
2933 gimple_seq seq
= NULL
;
2934 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2935 tree cond
= create_tmp_var_raw (integer_type_node
);
2936 DECL_CONTEXT (cond
) = current_function_decl
;
2937 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2938 gimple_bind_set_vars (bind
, cond
);
2939 gimple_call_set_lhs (g
, cond
);
2940 gimple_seq_add_stmt (&seq
, g
);
2941 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2942 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2943 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2944 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2945 gimple_seq_add_stmt (&seq
, g
);
2946 g
= gimple_build_label (lab1
);
2947 gimple_seq_add_stmt (&seq
, g
);
2948 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2949 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2950 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2951 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2952 gimple_omp_for_set_clauses (new_stmt
, clause
);
2953 gimple_seq_add_stmt (&seq
, new_stmt
);
2954 g
= gimple_build_goto (lab3
);
2955 gimple_seq_add_stmt (&seq
, g
);
2956 g
= gimple_build_label (lab2
);
2957 gimple_seq_add_stmt (&seq
, g
);
2958 gimple_seq_add_stmt (&seq
, stmt
);
2959 g
= gimple_build_label (lab3
);
2960 gimple_seq_add_stmt (&seq
, g
);
2961 gimple_bind_set_body (bind
, seq
);
2963 scan_omp_for (new_stmt
, outer_ctx
);
2964 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2967 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2968 struct walk_stmt_info
*);
2969 static omp_context
*maybe_lookup_ctx (gimple
*);
2971 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2972 for scan phase loop. */
2975 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2976 omp_context
*outer_ctx
)
2978 /* The only change between inclusive and exclusive scan will be
2979 within the first simd loop, so just use inclusive in the
2980 worksharing loop. */
2981 outer_ctx
->scan_inclusive
= true;
2982 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2983 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2985 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2986 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2987 gsi_replace (gsi
, input_stmt
, false);
2988 gimple_seq input_body
= NULL
;
2989 gimple_seq_add_stmt (&input_body
, stmt
);
2990 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2992 gimple_stmt_iterator input1_gsi
= gsi_none ();
2993 struct walk_stmt_info wi
;
2994 memset (&wi
, 0, sizeof (wi
));
2996 wi
.info
= (void *) &input1_gsi
;
2997 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2998 gcc_assert (!gsi_end_p (input1_gsi
));
3000 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
3001 gsi_next (&input1_gsi
);
3002 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
3003 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
3004 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
3005 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3006 std::swap (input_stmt1
, scan_stmt1
);
3008 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
3009 gimple_omp_set_body (input_stmt1
, NULL
);
3011 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
3012 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
3014 gimple_omp_set_body (input_stmt1
, input_body1
);
3015 gimple_omp_set_body (scan_stmt1
, NULL
);
3017 gimple_stmt_iterator input2_gsi
= gsi_none ();
3018 memset (&wi
, 0, sizeof (wi
));
3020 wi
.info
= (void *) &input2_gsi
;
3021 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
3023 gcc_assert (!gsi_end_p (input2_gsi
));
3025 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
3026 gsi_next (&input2_gsi
);
3027 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
3028 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
3029 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3030 std::swap (input_stmt2
, scan_stmt2
);
3032 gimple_omp_set_body (input_stmt2
, NULL
);
3034 gimple_omp_set_body (input_stmt
, input_body
);
3035 gimple_omp_set_body (scan_stmt
, scan_body
);
3037 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
3038 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
3040 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
3041 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
3043 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
3046 /* Scan an OpenMP sections directive. */
3049 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
3053 ctx
= new_omp_context (stmt
, outer_ctx
);
3054 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
3055 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3058 /* Scan an OpenMP single directive. */
3061 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
3066 ctx
= new_omp_context (stmt
, outer_ctx
);
3067 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3068 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3069 name
= create_tmp_var_name (".omp_copy_s");
3070 name
= build_decl (gimple_location (stmt
),
3071 TYPE_DECL
, name
, ctx
->record_type
);
3072 TYPE_NAME (ctx
->record_type
) = name
;
3074 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
3075 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3077 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3078 ctx
->record_type
= NULL
;
3080 layout_type (ctx
->record_type
);
3083 /* Scan a GIMPLE_OMP_TARGET. */
3086 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3090 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3091 tree clauses
= gimple_omp_target_clauses (stmt
);
3093 ctx
= new_omp_context (stmt
, outer_ctx
);
3094 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3095 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3096 name
= create_tmp_var_name (".omp_data_t");
3097 name
= build_decl (gimple_location (stmt
),
3098 TYPE_DECL
, name
, ctx
->record_type
);
3099 DECL_ARTIFICIAL (name
) = 1;
3100 DECL_NAMELESS (name
) = 1;
3101 TYPE_NAME (ctx
->record_type
) = name
;
3102 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3106 create_omp_child_function (ctx
, false);
3107 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3110 scan_sharing_clauses (clauses
, ctx
);
3111 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3113 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3114 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3117 TYPE_FIELDS (ctx
->record_type
)
3118 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3121 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3122 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3124 field
= DECL_CHAIN (field
))
3125 gcc_assert (DECL_ALIGN (field
) == align
);
3127 layout_type (ctx
->record_type
);
3129 fixup_child_record_type (ctx
);
3132 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3134 error_at (gimple_location (stmt
),
3135 "%<target%> construct with nested %<teams%> construct "
3136 "contains directives outside of the %<teams%> construct");
3137 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3141 /* Scan an OpenMP teams directive. */
3144 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3146 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3148 if (!gimple_omp_teams_host (stmt
))
3150 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3151 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3154 taskreg_contexts
.safe_push (ctx
);
3155 gcc_assert (taskreg_nesting_level
== 1);
3156 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3157 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3158 tree name
= create_tmp_var_name (".omp_data_s");
3159 name
= build_decl (gimple_location (stmt
),
3160 TYPE_DECL
, name
, ctx
->record_type
);
3161 DECL_ARTIFICIAL (name
) = 1;
3162 DECL_NAMELESS (name
) = 1;
3163 TYPE_NAME (ctx
->record_type
) = name
;
3164 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3165 create_omp_child_function (ctx
, false);
3166 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3168 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3169 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3171 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3172 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3175 /* Check nesting restrictions. */
3177 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3181 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3182 inside an OpenACC CTX. */
3183 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3184 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3185 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3187 else if (!(is_gimple_omp (stmt
)
3188 && is_gimple_omp_oacc (stmt
)))
3190 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3192 error_at (gimple_location (stmt
),
3193 "non-OpenACC construct inside of OpenACC routine");
3197 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3198 if (is_gimple_omp (octx
->stmt
)
3199 && is_gimple_omp_oacc (octx
->stmt
))
3201 error_at (gimple_location (stmt
),
3202 "non-OpenACC construct inside of OpenACC region");
3209 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3210 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3212 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3214 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3216 error_at (gimple_location (stmt
),
3217 "OpenMP constructs are not allowed in target region "
3218 "with %<ancestor%>");
3222 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3223 ctx
->teams_nested_p
= true;
3225 ctx
->nonteams_nested_p
= true;
3227 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3229 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3231 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3232 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3236 if (ctx
->order_concurrent
3237 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3238 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3239 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3241 error_at (gimple_location (stmt
),
3242 "OpenMP constructs other than %<parallel%>, %<loop%>"
3243 " or %<simd%> may not be nested inside a region with"
3244 " the %<order(concurrent)%> clause");
3247 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3249 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3250 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3252 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3253 && (ctx
->outer
== NULL
3254 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3255 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3256 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3257 != GF_OMP_FOR_KIND_FOR
)
3258 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3260 error_at (gimple_location (stmt
),
3261 "%<ordered simd threads%> must be closely "
3262 "nested inside of %<%s simd%> region",
3263 lang_GNU_Fortran () ? "do" : "for");
3269 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3270 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3271 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3273 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3274 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3276 error_at (gimple_location (stmt
),
3277 "OpenMP constructs other than "
3278 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3279 "not be nested inside %<simd%> region");
3282 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3284 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3285 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3286 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3287 OMP_CLAUSE_BIND
) == NULL_TREE
))
3288 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3290 error_at (gimple_location (stmt
),
3291 "only %<distribute%>, %<parallel%> or %<loop%> "
3292 "regions are allowed to be strictly nested inside "
3293 "%<teams%> region");
3297 else if (ctx
->order_concurrent
3298 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3299 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3300 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3301 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3304 error_at (gimple_location (stmt
),
3305 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3306 "%<simd%> may not be nested inside a %<loop%> region");
3308 error_at (gimple_location (stmt
),
3309 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3310 "%<simd%> may not be nested inside a region with "
3311 "the %<order(concurrent)%> clause");
3315 switch (gimple_code (stmt
))
3317 case GIMPLE_OMP_FOR
:
3318 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3320 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3322 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3324 error_at (gimple_location (stmt
),
3325 "%<distribute%> region must be strictly nested "
3326 "inside %<teams%> construct");
3331 /* We split taskloop into task and nested taskloop in it. */
3332 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3334 /* For now, hope this will change and loop bind(parallel) will not
3335 be allowed in lots of contexts. */
3336 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3337 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3339 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3344 switch (gimple_code (ctx
->stmt
))
3346 case GIMPLE_OMP_FOR
:
3347 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3348 == GF_OMP_FOR_KIND_OACC_LOOP
);
3351 case GIMPLE_OMP_TARGET
:
3352 switch (gimple_omp_target_kind (ctx
->stmt
))
3354 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3355 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3356 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3357 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3369 else if (oacc_get_fn_attrib (current_function_decl
))
3373 error_at (gimple_location (stmt
),
3374 "OpenACC loop directive must be associated with"
3375 " an OpenACC compute region");
3381 if (is_gimple_call (stmt
)
3382 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3383 == BUILT_IN_GOMP_CANCEL
3384 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3385 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3387 const char *bad
= NULL
;
3388 const char *kind
= NULL
;
3389 const char *construct
3390 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3391 == BUILT_IN_GOMP_CANCEL
)
3393 : "cancellation point";
3396 error_at (gimple_location (stmt
), "orphaned %qs construct",
3400 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3401 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3405 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3410 ctx
->cancellable
= true;
3414 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3415 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3417 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3418 == BUILT_IN_GOMP_CANCEL
3419 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3421 ctx
->cancellable
= true;
3422 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3424 warning_at (gimple_location (stmt
), 0,
3425 "%<cancel for%> inside "
3426 "%<nowait%> for construct");
3427 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3428 OMP_CLAUSE_ORDERED
))
3429 warning_at (gimple_location (stmt
), 0,
3430 "%<cancel for%> inside "
3431 "%<ordered%> for construct");
3436 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3437 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3439 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3440 == BUILT_IN_GOMP_CANCEL
3441 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3443 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3445 ctx
->cancellable
= true;
3446 if (omp_find_clause (gimple_omp_sections_clauses
3449 warning_at (gimple_location (stmt
), 0,
3450 "%<cancel sections%> inside "
3451 "%<nowait%> sections construct");
3455 gcc_assert (ctx
->outer
3456 && gimple_code (ctx
->outer
->stmt
)
3457 == GIMPLE_OMP_SECTIONS
);
3458 ctx
->outer
->cancellable
= true;
3459 if (omp_find_clause (gimple_omp_sections_clauses
3462 warning_at (gimple_location (stmt
), 0,
3463 "%<cancel sections%> inside "
3464 "%<nowait%> sections construct");
3470 if (!is_task_ctx (ctx
)
3471 && (!is_taskloop_ctx (ctx
)
3472 || ctx
->outer
== NULL
3473 || !is_task_ctx (ctx
->outer
)))
3477 for (omp_context
*octx
= ctx
->outer
;
3478 octx
; octx
= octx
->outer
)
3480 switch (gimple_code (octx
->stmt
))
3482 case GIMPLE_OMP_TASKGROUP
:
3484 case GIMPLE_OMP_TARGET
:
3485 if (gimple_omp_target_kind (octx
->stmt
)
3486 != GF_OMP_TARGET_KIND_REGION
)
3489 case GIMPLE_OMP_PARALLEL
:
3490 case GIMPLE_OMP_TEAMS
:
3491 error_at (gimple_location (stmt
),
3492 "%<%s taskgroup%> construct not closely "
3493 "nested inside of %<taskgroup%> region",
3496 case GIMPLE_OMP_TASK
:
3497 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3499 && is_taskloop_ctx (octx
->outer
))
3502 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3503 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3512 ctx
->cancellable
= true;
3517 error_at (gimple_location (stmt
), "invalid arguments");
3522 error_at (gimple_location (stmt
),
3523 "%<%s %s%> construct not closely nested inside of %qs",
3524 construct
, kind
, bad
);
3529 case GIMPLE_OMP_SECTIONS
:
3530 case GIMPLE_OMP_SINGLE
:
3531 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3532 switch (gimple_code (ctx
->stmt
))
3534 case GIMPLE_OMP_FOR
:
3535 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3536 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3539 case GIMPLE_OMP_SECTIONS
:
3540 case GIMPLE_OMP_SINGLE
:
3541 case GIMPLE_OMP_ORDERED
:
3542 case GIMPLE_OMP_MASTER
:
3543 case GIMPLE_OMP_MASKED
:
3544 case GIMPLE_OMP_TASK
:
3545 case GIMPLE_OMP_CRITICAL
:
3546 if (is_gimple_call (stmt
))
3548 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3549 != BUILT_IN_GOMP_BARRIER
)
3551 error_at (gimple_location (stmt
),
3552 "barrier region may not be closely nested inside "
3553 "of work-sharing, %<loop%>, %<critical%>, "
3554 "%<ordered%>, %<master%>, %<masked%>, explicit "
3555 "%<task%> or %<taskloop%> region");
3558 error_at (gimple_location (stmt
),
3559 "work-sharing region may not be closely nested inside "
3560 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3561 "%<master%>, %<masked%>, explicit %<task%> or "
3562 "%<taskloop%> region");
3564 case GIMPLE_OMP_PARALLEL
:
3565 case GIMPLE_OMP_TEAMS
:
3567 case GIMPLE_OMP_TARGET
:
3568 if (gimple_omp_target_kind (ctx
->stmt
)
3569 == GF_OMP_TARGET_KIND_REGION
)
3576 case GIMPLE_OMP_MASTER
:
3577 case GIMPLE_OMP_MASKED
:
3578 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3579 switch (gimple_code (ctx
->stmt
))
3581 case GIMPLE_OMP_FOR
:
3582 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3583 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3586 case GIMPLE_OMP_SECTIONS
:
3587 case GIMPLE_OMP_SINGLE
:
3588 case GIMPLE_OMP_TASK
:
3589 error_at (gimple_location (stmt
),
3590 "%qs region may not be closely nested inside "
3591 "of work-sharing, %<loop%>, explicit %<task%> or "
3592 "%<taskloop%> region",
3593 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3594 ? "master" : "masked");
3596 case GIMPLE_OMP_PARALLEL
:
3597 case GIMPLE_OMP_TEAMS
:
3599 case GIMPLE_OMP_TARGET
:
3600 if (gimple_omp_target_kind (ctx
->stmt
)
3601 == GF_OMP_TARGET_KIND_REGION
)
3608 case GIMPLE_OMP_SCOPE
:
3609 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3610 switch (gimple_code (ctx
->stmt
))
3612 case GIMPLE_OMP_FOR
:
3613 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3614 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3617 case GIMPLE_OMP_SECTIONS
:
3618 case GIMPLE_OMP_SINGLE
:
3619 case GIMPLE_OMP_TASK
:
3620 case GIMPLE_OMP_CRITICAL
:
3621 case GIMPLE_OMP_ORDERED
:
3622 case GIMPLE_OMP_MASTER
:
3623 case GIMPLE_OMP_MASKED
:
3624 error_at (gimple_location (stmt
),
3625 "%<scope%> region may not be closely nested inside "
3626 "of work-sharing, %<loop%>, explicit %<task%>, "
3627 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3628 "or %<masked%> region");
3630 case GIMPLE_OMP_PARALLEL
:
3631 case GIMPLE_OMP_TEAMS
:
3633 case GIMPLE_OMP_TARGET
:
3634 if (gimple_omp_target_kind (ctx
->stmt
)
3635 == GF_OMP_TARGET_KIND_REGION
)
3642 case GIMPLE_OMP_TASK
:
3643 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3644 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3646 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3647 error_at (OMP_CLAUSE_LOCATION (c
),
3648 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3649 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross",
3650 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3654 case GIMPLE_OMP_ORDERED
:
3655 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3656 c
; c
= OMP_CLAUSE_CHAIN (c
))
3658 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
)
3660 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
3662 error_at (OMP_CLAUSE_LOCATION (c
),
3663 "invalid depend kind in omp %<ordered%> %<depend%>");
3666 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3667 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3672 /* Look for containing ordered(N) loop. */
3674 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3676 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3677 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3679 error_at (OMP_CLAUSE_LOCATION (c
),
3680 "%<ordered%> construct with %<depend%> clause "
3681 "must be closely nested inside an %<ordered%> loop");
3685 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3686 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3688 /* ordered simd must be closely nested inside of simd region,
3689 and simd region must not encounter constructs other than
3690 ordered simd, therefore ordered simd may be either orphaned,
3691 or ctx->stmt must be simd. The latter case is handled already
3695 error_at (gimple_location (stmt
),
3696 "%<ordered%> %<simd%> must be closely nested inside "
3701 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3702 switch (gimple_code (ctx
->stmt
))
3704 case GIMPLE_OMP_CRITICAL
:
3705 case GIMPLE_OMP_TASK
:
3706 case GIMPLE_OMP_ORDERED
:
3707 ordered_in_taskloop
:
3708 error_at (gimple_location (stmt
),
3709 "%<ordered%> region may not be closely nested inside "
3710 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3711 "%<taskloop%> region");
3713 case GIMPLE_OMP_FOR
:
3714 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3715 goto ordered_in_taskloop
;
3717 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3718 OMP_CLAUSE_ORDERED
);
3721 error_at (gimple_location (stmt
),
3722 "%<ordered%> region must be closely nested inside "
3723 "a loop region with an %<ordered%> clause");
3726 if (!gimple_omp_ordered_standalone_p (stmt
))
3728 if (OMP_CLAUSE_ORDERED_DOACROSS (o
))
3730 error_at (gimple_location (stmt
),
3731 "%<ordered%> construct without %<doacross%> or "
3732 "%<depend%> clauses must not have the same "
3733 "binding region as %<ordered%> construct with "
3737 else if (OMP_CLAUSE_ORDERED_EXPR (o
))
3740 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3741 OMP_CLAUSE_COLLAPSE
);
3743 o_n
= tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o
));
3744 HOST_WIDE_INT c_n
= 1;
3746 c_n
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co
));
3749 error_at (gimple_location (stmt
),
3750 "%<ordered%> construct without %<doacross%> "
3751 "or %<depend%> clauses binds to loop where "
3752 "%<collapse%> argument %wd is different from "
3753 "%<ordered%> argument %wd", c_n
, o_n
);
3759 case GIMPLE_OMP_TARGET
:
3760 if (gimple_omp_target_kind (ctx
->stmt
)
3761 != GF_OMP_TARGET_KIND_REGION
)
3764 case GIMPLE_OMP_PARALLEL
:
3765 case GIMPLE_OMP_TEAMS
:
3766 error_at (gimple_location (stmt
),
3767 "%<ordered%> region must be closely nested inside "
3768 "a loop region with an %<ordered%> clause");
3774 case GIMPLE_OMP_CRITICAL
:
3777 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3778 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3779 if (gomp_critical
*other_crit
3780 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3781 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3783 error_at (gimple_location (stmt
),
3784 "%<critical%> region may not be nested inside "
3785 "a %<critical%> region with the same name");
3790 case GIMPLE_OMP_TEAMS
:
3793 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3794 || (gimple_omp_target_kind (ctx
->stmt
)
3795 != GF_OMP_TARGET_KIND_REGION
))
3797 /* Teams construct can appear either strictly nested inside of
3798 target construct with no intervening stmts, or can be encountered
3799 only by initial task (so must not appear inside any OpenMP
3801 error_at (gimple_location (stmt
),
3802 "%<teams%> construct must be closely nested inside of "
3803 "%<target%> construct or not nested in any OpenMP "
3808 case GIMPLE_OMP_TARGET
:
3809 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3810 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3812 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3813 error_at (OMP_CLAUSE_LOCATION (c
),
3814 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3815 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3818 if (is_gimple_omp_offloaded (stmt
)
3819 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3821 error_at (gimple_location (stmt
),
3822 "OpenACC region inside of OpenACC routine, nested "
3823 "parallelism not supported yet");
3826 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3828 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3830 if (is_gimple_omp (stmt
)
3831 && is_gimple_omp_oacc (stmt
)
3832 && is_gimple_omp (ctx
->stmt
))
3834 error_at (gimple_location (stmt
),
3835 "OpenACC construct inside of non-OpenACC region");
3841 const char *stmt_name
, *ctx_stmt_name
;
3842 switch (gimple_omp_target_kind (stmt
))
3844 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3845 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3846 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3847 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3848 stmt_name
= "target enter data"; break;
3849 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3850 stmt_name
= "target exit data"; break;
3851 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3852 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3853 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3854 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3856 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3857 stmt_name
= "enter data"; break;
3858 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3859 stmt_name
= "exit data"; break;
3860 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3861 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3863 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3864 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3865 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3866 /* OpenACC 'kernels' decomposed parts. */
3867 stmt_name
= "kernels"; break;
3868 default: gcc_unreachable ();
3870 switch (gimple_omp_target_kind (ctx
->stmt
))
3872 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3873 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3874 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3875 ctx_stmt_name
= "parallel"; break;
3876 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3877 ctx_stmt_name
= "kernels"; break;
3878 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3879 ctx_stmt_name
= "serial"; break;
3880 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3881 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3882 ctx_stmt_name
= "host_data"; break;
3883 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3884 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3885 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3886 /* OpenACC 'kernels' decomposed parts. */
3887 ctx_stmt_name
= "kernels"; break;
3888 default: gcc_unreachable ();
3891 /* OpenACC/OpenMP mismatch? */
3892 if (is_gimple_omp_oacc (stmt
)
3893 != is_gimple_omp_oacc (ctx
->stmt
))
3895 error_at (gimple_location (stmt
),
3896 "%s %qs construct inside of %s %qs region",
3897 (is_gimple_omp_oacc (stmt
)
3898 ? "OpenACC" : "OpenMP"), stmt_name
,
3899 (is_gimple_omp_oacc (ctx
->stmt
)
3900 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3903 if (is_gimple_omp_offloaded (ctx
->stmt
))
3905 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3906 if (is_gimple_omp_oacc (ctx
->stmt
))
3908 error_at (gimple_location (stmt
),
3909 "%qs construct inside of %qs region",
3910 stmt_name
, ctx_stmt_name
);
3915 if ((gimple_omp_target_kind (ctx
->stmt
)
3916 == GF_OMP_TARGET_KIND_REGION
)
3917 && (gimple_omp_target_kind (stmt
)
3918 == GF_OMP_TARGET_KIND_REGION
))
3920 c
= omp_find_clause (gimple_omp_target_clauses (stmt
),
3922 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3925 warning_at (gimple_location (stmt
), 0,
3926 "%qs construct inside of %qs region",
3927 stmt_name
, ctx_stmt_name
);
3939 /* Helper function scan_omp.
3941 Callback for walk_tree or operators in walk_gimple_stmt used to
3942 scan for OMP directives in TP. */
3945 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3947 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3948 omp_context
*ctx
= (omp_context
*) wi
->info
;
3951 switch (TREE_CODE (t
))
3959 tree repl
= remap_decl (t
, &ctx
->cb
);
3960 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3966 if (ctx
&& TYPE_P (t
))
3967 *tp
= remap_type (t
, &ctx
->cb
);
3968 else if (!DECL_P (t
))
3973 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3974 if (tem
!= TREE_TYPE (t
))
3976 if (TREE_CODE (t
) == INTEGER_CST
)
3977 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3979 TREE_TYPE (t
) = tem
;
3989 /* Return true if FNDECL is a setjmp or a longjmp. */
3992 setjmp_or_longjmp_p (const_tree fndecl
)
3994 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3995 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3998 tree declname
= DECL_NAME (fndecl
);
4000 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4001 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4002 || !TREE_PUBLIC (fndecl
))
4005 const char *name
= IDENTIFIER_POINTER (declname
);
4006 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
4009 /* Return true if FNDECL is an omp_* runtime API call. */
4012 omp_runtime_api_call (const_tree fndecl
)
4014 tree declname
= DECL_NAME (fndecl
);
4016 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4017 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4018 || !TREE_PUBLIC (fndecl
))
4021 const char *name
= IDENTIFIER_POINTER (declname
);
4022 if (!startswith (name
, "omp_"))
4025 static const char *omp_runtime_apis
[] =
4027 /* This array has 3 sections. First omp_* calls that don't
4028 have any suffixes. */
4037 "target_associate_ptr",
4038 "target_disassociate_ptr",
4040 "target_is_accessible",
4041 "target_is_present",
4043 "target_memcpy_async",
4044 "target_memcpy_rect",
4045 "target_memcpy_rect_async",
4047 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4048 DECL_NAME is always omp_* without tailing underscore. */
4050 "destroy_allocator",
4052 "destroy_nest_lock",
4056 "get_affinity_format",
4058 "get_default_allocator",
4059 "get_default_device",
4062 "get_initial_device",
4064 "get_max_active_levels",
4065 "get_max_task_priority",
4074 "get_partition_num_places",
4077 "get_supported_active_levels",
4079 "get_teams_thread_limit",
4088 "is_initial_device",
4090 "pause_resource_all",
4091 "set_affinity_format",
4092 "set_default_allocator",
4100 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4101 as DECL_NAME only omp_* and omp_*_8 appear. */
4103 "get_ancestor_thread_num",
4105 "get_partition_place_nums",
4106 "get_place_num_procs",
4107 "get_place_proc_ids",
4110 "set_default_device",
4112 "set_max_active_levels",
4117 "set_teams_thread_limit"
4121 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4123 if (omp_runtime_apis
[i
] == NULL
)
4128 size_t len
= strlen (omp_runtime_apis
[i
]);
4129 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4130 && (name
[4 + len
] == '\0'
4131 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4137 /* Helper function for scan_omp.
4139 Callback for walk_gimple_stmt used to scan for OMP directives in
4140 the current statement in GSI. */
4143 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4144 struct walk_stmt_info
*wi
)
4146 gimple
*stmt
= gsi_stmt (*gsi
);
4147 omp_context
*ctx
= (omp_context
*) wi
->info
;
4149 if (gimple_has_location (stmt
))
4150 input_location
= gimple_location (stmt
);
4152 /* Check the nesting restrictions. */
4153 bool remove
= false;
4154 if (is_gimple_omp (stmt
))
4155 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4156 else if (is_gimple_call (stmt
))
4158 tree fndecl
= gimple_call_fndecl (stmt
);
4162 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4163 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4164 && setjmp_or_longjmp_p (fndecl
)
4168 error_at (gimple_location (stmt
),
4169 "setjmp/longjmp inside %<simd%> construct");
4171 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4172 switch (DECL_FUNCTION_CODE (fndecl
))
4174 case BUILT_IN_GOMP_BARRIER
:
4175 case BUILT_IN_GOMP_CANCEL
:
4176 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4177 case BUILT_IN_GOMP_TASKYIELD
:
4178 case BUILT_IN_GOMP_TASKWAIT
:
4179 case BUILT_IN_GOMP_TASKGROUP_START
:
4180 case BUILT_IN_GOMP_TASKGROUP_END
:
4181 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4188 omp_context
*octx
= ctx
;
4189 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4191 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4194 error_at (gimple_location (stmt
),
4195 "OpenMP runtime API call %qD in a region with "
4196 "%<order(concurrent)%> clause", fndecl
);
4198 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4199 && omp_runtime_api_call (fndecl
)
4200 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4201 != strlen ("omp_get_num_teams"))
4202 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4203 "omp_get_num_teams") != 0)
4204 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4205 != strlen ("omp_get_team_num"))
4206 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4207 "omp_get_team_num") != 0))
4210 error_at (gimple_location (stmt
),
4211 "OpenMP runtime API call %qD strictly nested in a "
4212 "%<teams%> region", fndecl
);
4214 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4215 && (gimple_omp_target_kind (ctx
->stmt
)
4216 == GF_OMP_TARGET_KIND_REGION
)
4217 && omp_runtime_api_call (fndecl
))
4219 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4220 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4221 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4222 error_at (gimple_location (stmt
),
4223 "OpenMP runtime API call %qD in a region with "
4224 "%<device(ancestor)%> clause", fndecl
);
4231 stmt
= gimple_build_nop ();
4232 gsi_replace (gsi
, stmt
, false);
4235 *handled_ops_p
= true;
4237 switch (gimple_code (stmt
))
4239 case GIMPLE_OMP_PARALLEL
:
4240 taskreg_nesting_level
++;
4241 scan_omp_parallel (gsi
, ctx
);
4242 taskreg_nesting_level
--;
4245 case GIMPLE_OMP_TASK
:
4246 taskreg_nesting_level
++;
4247 scan_omp_task (gsi
, ctx
);
4248 taskreg_nesting_level
--;
4251 case GIMPLE_OMP_FOR
:
4252 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4253 == GF_OMP_FOR_KIND_SIMD
)
4254 && gimple_omp_for_combined_into_p (stmt
)
4255 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4257 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4258 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4259 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4261 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4265 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4266 == GF_OMP_FOR_KIND_SIMD
)
4267 && omp_maybe_offloaded_ctx (ctx
)
4268 && omp_max_simt_vf ()
4269 && gimple_omp_for_collapse (stmt
) == 1)
4270 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4272 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4275 case GIMPLE_OMP_SCOPE
:
4276 ctx
= new_omp_context (stmt
, ctx
);
4277 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4278 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4281 case GIMPLE_OMP_SECTIONS
:
4282 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4285 case GIMPLE_OMP_SINGLE
:
4286 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4289 case GIMPLE_OMP_SCAN
:
4290 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4292 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4293 ctx
->scan_inclusive
= true;
4294 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4295 ctx
->scan_exclusive
= true;
4298 case GIMPLE_OMP_SECTION
:
4299 case GIMPLE_OMP_MASTER
:
4300 case GIMPLE_OMP_ORDERED
:
4301 case GIMPLE_OMP_CRITICAL
:
4302 ctx
= new_omp_context (stmt
, ctx
);
4303 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4306 case GIMPLE_OMP_MASKED
:
4307 ctx
= new_omp_context (stmt
, ctx
);
4308 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4309 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4312 case GIMPLE_OMP_TASKGROUP
:
4313 ctx
= new_omp_context (stmt
, ctx
);
4314 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4315 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4318 case GIMPLE_OMP_TARGET
:
4319 if (is_gimple_omp_offloaded (stmt
))
4321 taskreg_nesting_level
++;
4322 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4323 taskreg_nesting_level
--;
4326 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4329 case GIMPLE_OMP_TEAMS
:
4330 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4332 taskreg_nesting_level
++;
4333 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4334 taskreg_nesting_level
--;
4337 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4344 *handled_ops_p
= false;
4346 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4348 var
= DECL_CHAIN (var
))
4349 insert_decl_map (&ctx
->cb
, var
, var
);
4353 *handled_ops_p
= false;
4361 /* Scan all the statements starting at the current statement. CTX
4362 contains context information about the OMP directives and
4363 clauses found during the scan. */
4366 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4368 location_t saved_location
;
4369 struct walk_stmt_info wi
;
4371 memset (&wi
, 0, sizeof (wi
));
4373 wi
.want_locations
= true;
4375 saved_location
= input_location
;
4376 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4377 input_location
= saved_location
;
4380 /* Re-gimplification and code generation routines. */
4382 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4383 of BIND if in a method. */
4386 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4388 if (DECL_ARGUMENTS (current_function_decl
)
4389 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4390 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4393 tree vars
= gimple_bind_vars (bind
);
4394 for (tree
*pvar
= &vars
; *pvar
; )
4395 if (omp_member_access_dummy_var (*pvar
))
4396 *pvar
= DECL_CHAIN (*pvar
);
4398 pvar
= &DECL_CHAIN (*pvar
);
4399 gimple_bind_set_vars (bind
, vars
);
4403 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4404 block and its subblocks. */
4407 remove_member_access_dummy_vars (tree block
)
4409 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4410 if (omp_member_access_dummy_var (*pvar
))
4411 *pvar
= DECL_CHAIN (*pvar
);
4413 pvar
= &DECL_CHAIN (*pvar
);
4415 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4416 remove_member_access_dummy_vars (block
);
4419 /* If a context was created for STMT when it was scanned, return it. */
4421 static omp_context
*
4422 maybe_lookup_ctx (gimple
*stmt
)
4425 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4426 return n
? (omp_context
*) n
->value
: NULL
;
4430 /* Find the mapping for DECL in CTX or the immediately enclosing
4431 context that has a mapping for DECL.
4433 If CTX is a nested parallel directive, we may have to use the decl
4434 mappings created in CTX's parent context. Suppose that we have the
4435 following parallel nesting (variable UIDs showed for clarity):
4438 #omp parallel shared(iD.1562) -> outer parallel
4439 iD.1562 = iD.1562 + 1;
4441 #omp parallel shared (iD.1562) -> inner parallel
4442 iD.1562 = iD.1562 - 1;
4444 Each parallel structure will create a distinct .omp_data_s structure
4445 for copying iD.1562 in/out of the directive:
4447 outer parallel .omp_data_s.1.i -> iD.1562
4448 inner parallel .omp_data_s.2.i -> iD.1562
4450 A shared variable mapping will produce a copy-out operation before
4451 the parallel directive and a copy-in operation after it. So, in
4452 this case we would have:
4455 .omp_data_o.1.i = iD.1562;
4456 #omp parallel shared(iD.1562) -> outer parallel
4457 .omp_data_i.1 = &.omp_data_o.1
4458 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4460 .omp_data_o.2.i = iD.1562; -> **
4461 #omp parallel shared(iD.1562) -> inner parallel
4462 .omp_data_i.2 = &.omp_data_o.2
4463 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4466 ** This is a problem. The symbol iD.1562 cannot be referenced
4467 inside the body of the outer parallel region. But since we are
4468 emitting this copy operation while expanding the inner parallel
4469 directive, we need to access the CTX structure of the outer
4470 parallel directive to get the correct mapping:
4472 .omp_data_o.2.i = .omp_data_i.1->i
4474 Since there may be other workshare or parallel directives enclosing
4475 the parallel directive, it may be necessary to walk up the context
4476 parent chain. This is not a problem in general because nested
4477 parallelism happens only rarely. */
4480 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4485 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4486 t
= maybe_lookup_decl (decl
, up
);
4488 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4490 return t
? t
: decl
;
4494 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4495 in outer contexts. */
4498 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4503 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4504 t
= maybe_lookup_decl (decl
, up
);
4506 return t
? t
: decl
;
4510 /* Construct the initialization value for reduction operation OP. */
4513 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4522 case TRUTH_ORIF_EXPR
:
4523 case TRUTH_XOR_EXPR
:
4525 return build_zero_cst (type
);
4528 case TRUTH_AND_EXPR
:
4529 case TRUTH_ANDIF_EXPR
:
4531 return fold_convert_loc (loc
, type
, integer_one_node
);
4534 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4537 if (SCALAR_FLOAT_TYPE_P (type
))
4539 REAL_VALUE_TYPE min
;
4540 if (HONOR_INFINITIES (type
))
4541 real_arithmetic (&min
, NEGATE_EXPR
, &dconstinf
, NULL
);
4543 real_maxval (&min
, 1, TYPE_MODE (type
));
4544 return build_real (type
, min
);
4546 else if (POINTER_TYPE_P (type
))
4549 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4550 return wide_int_to_tree (type
, min
);
4554 gcc_assert (INTEGRAL_TYPE_P (type
));
4555 return TYPE_MIN_VALUE (type
);
4559 if (SCALAR_FLOAT_TYPE_P (type
))
4561 REAL_VALUE_TYPE max
;
4562 if (HONOR_INFINITIES (type
))
4565 real_maxval (&max
, 0, TYPE_MODE (type
));
4566 return build_real (type
, max
);
4568 else if (POINTER_TYPE_P (type
))
4571 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4572 return wide_int_to_tree (type
, max
);
4576 gcc_assert (INTEGRAL_TYPE_P (type
));
4577 return TYPE_MAX_VALUE (type
);
4585 /* Construct the initialization value for reduction CLAUSE. */
4588 omp_reduction_init (tree clause
, tree type
)
4590 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4591 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4594 /* Return alignment to be assumed for var in CLAUSE, which should be
4595 OMP_CLAUSE_ALIGNED. */
4598 omp_clause_aligned_alignment (tree clause
)
4600 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4601 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4603 /* Otherwise return implementation defined alignment. */
4604 unsigned int al
= 1;
4605 opt_scalar_mode mode_iter
;
4606 auto_vector_modes modes
;
4607 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4608 static enum mode_class classes
[]
4609 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4610 for (int i
= 0; i
< 4; i
+= 2)
4611 /* The for loop above dictates that we only walk through scalar classes. */
4612 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4614 scalar_mode mode
= mode_iter
.require ();
4615 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4616 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4618 machine_mode alt_vmode
;
4619 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4620 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4621 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4624 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4625 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4627 type
= build_vector_type_for_mode (type
, vmode
);
4628 if (TYPE_MODE (type
) != vmode
)
4630 if (TYPE_ALIGN_UNIT (type
) > al
)
4631 al
= TYPE_ALIGN_UNIT (type
);
4633 return build_int_cst (integer_type_node
, al
);
4637 /* This structure is part of the interface between lower_rec_simd_input_clauses
4638 and lower_rec_input_clauses. */
4640 class omplow_simd_context
{
4642 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4646 vec
<tree
, va_heap
> simt_eargs
;
4647 gimple_seq simt_dlist
;
4648 poly_uint64_pod max_vf
;
4652 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4656 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4657 omplow_simd_context
*sctx
, tree
&ivar
,
4658 tree
&lvar
, tree
*rvar
= NULL
,
4661 if (known_eq (sctx
->max_vf
, 0U))
4663 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4664 if (maybe_gt (sctx
->max_vf
, 1U))
4666 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4667 OMP_CLAUSE_SAFELEN
);
4670 poly_uint64 safe_len
;
4671 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4672 || maybe_lt (safe_len
, 1U))
4675 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4678 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4680 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4681 c
= OMP_CLAUSE_CHAIN (c
))
4683 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4686 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4688 /* UDR reductions are not supported yet for SIMT, disable
4694 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4695 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4697 /* Doing boolean operations on non-integral types is
4698 for conformance only, it's not worth supporting this
4705 if (maybe_gt (sctx
->max_vf
, 1U))
4707 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4708 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4711 if (known_eq (sctx
->max_vf
, 1U))
4716 if (is_gimple_reg (new_var
))
4718 ivar
= lvar
= new_var
;
4721 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4722 ivar
= lvar
= create_tmp_var (type
);
4723 TREE_ADDRESSABLE (ivar
) = 1;
4724 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4725 NULL
, DECL_ATTRIBUTES (ivar
));
4726 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4727 tree clobber
= build_clobber (type
);
4728 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4729 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4733 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4734 tree avar
= create_tmp_var_raw (atype
);
4735 if (TREE_ADDRESSABLE (new_var
))
4736 TREE_ADDRESSABLE (avar
) = 1;
4737 DECL_ATTRIBUTES (avar
)
4738 = tree_cons (get_identifier ("omp simd array"), NULL
,
4739 DECL_ATTRIBUTES (avar
));
4740 gimple_add_tmp_var (avar
);
4742 if (rvar
&& !ctx
->for_simd_scan_phase
)
4744 /* For inscan reductions, create another array temporary,
4745 which will hold the reduced value. */
4746 iavar
= create_tmp_var_raw (atype
);
4747 if (TREE_ADDRESSABLE (new_var
))
4748 TREE_ADDRESSABLE (iavar
) = 1;
4749 DECL_ATTRIBUTES (iavar
)
4750 = tree_cons (get_identifier ("omp simd array"), NULL
,
4751 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4752 DECL_ATTRIBUTES (iavar
)));
4753 gimple_add_tmp_var (iavar
);
4754 ctx
->cb
.decl_map
->put (avar
, iavar
);
4755 if (sctx
->lastlane
== NULL_TREE
)
4756 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4757 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4758 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4759 TREE_THIS_NOTRAP (*rvar
) = 1;
4761 if (ctx
->scan_exclusive
)
4763 /* And for exclusive scan yet another one, which will
4764 hold the value during the scan phase. */
4765 tree savar
= create_tmp_var_raw (atype
);
4766 if (TREE_ADDRESSABLE (new_var
))
4767 TREE_ADDRESSABLE (savar
) = 1;
4768 DECL_ATTRIBUTES (savar
)
4769 = tree_cons (get_identifier ("omp simd array"), NULL
,
4770 tree_cons (get_identifier ("omp simd inscan "
4772 DECL_ATTRIBUTES (savar
)));
4773 gimple_add_tmp_var (savar
);
4774 ctx
->cb
.decl_map
->put (iavar
, savar
);
4775 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4776 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4777 TREE_THIS_NOTRAP (*rvar2
) = 1;
4780 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4781 NULL_TREE
, NULL_TREE
);
4782 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4783 NULL_TREE
, NULL_TREE
);
4784 TREE_THIS_NOTRAP (ivar
) = 1;
4785 TREE_THIS_NOTRAP (lvar
) = 1;
4787 if (DECL_P (new_var
))
4789 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4790 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4795 /* Helper function of lower_rec_input_clauses. For a reference
4796 in simd reduction, add an underlying variable it will reference. */
4799 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4801 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4802 if (TREE_CONSTANT (z
))
4804 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4805 get_name (new_vard
));
4806 gimple_add_tmp_var (z
);
4807 TREE_ADDRESSABLE (z
) = 1;
4808 z
= build_fold_addr_expr_loc (loc
, z
);
4809 gimplify_assign (new_vard
, z
, ilist
);
4813 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4814 code to emit (type) (tskred_temp[idx]). */
4817 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4820 unsigned HOST_WIDE_INT sz
4821 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4822 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4823 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4825 tree v
= create_tmp_var (pointer_sized_int_node
);
4826 gimple
*g
= gimple_build_assign (v
, r
);
4827 gimple_seq_add_stmt (ilist
, g
);
4828 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4830 v
= create_tmp_var (type
);
4831 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4832 gimple_seq_add_stmt (ilist
, g
);
4837 /* Lower early initialization of privatized variable NEW_VAR
4838 if it needs an allocator (has allocate clause). */
4841 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4842 tree
&allocate_ptr
, gimple_seq
*ilist
,
4843 omp_context
*ctx
, bool is_ref
, tree size
)
4847 gcc_assert (allocate_ptr
== NULL_TREE
);
4848 if (ctx
->allocate_map
4849 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4850 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4851 allocator
= *allocatorp
;
4852 if (allocator
== NULL_TREE
)
4854 if (!is_ref
&& omp_privatize_by_reference (var
))
4856 allocator
= NULL_TREE
;
4860 unsigned HOST_WIDE_INT ialign
= 0;
4861 if (TREE_CODE (allocator
) == TREE_LIST
)
4863 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4864 allocator
= TREE_PURPOSE (allocator
);
4866 if (TREE_CODE (allocator
) != INTEGER_CST
)
4867 allocator
= build_outer_var_ref (allocator
, ctx
, OMP_CLAUSE_ALLOCATE
);
4868 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4869 if (TREE_CODE (allocator
) != INTEGER_CST
)
4871 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4872 gimplify_assign (var
, allocator
, ilist
);
4876 tree ptr_type
, align
, sz
= size
;
4877 if (TYPE_P (new_var
))
4879 ptr_type
= build_pointer_type (new_var
);
4880 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4884 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4885 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4889 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4890 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4891 if (sz
== NULL_TREE
)
4892 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4894 align
= build_int_cst (size_type_node
, ialign
);
4895 if (TREE_CODE (sz
) != INTEGER_CST
)
4897 tree szvar
= create_tmp_var (size_type_node
);
4898 gimplify_assign (szvar
, sz
, ilist
);
4901 allocate_ptr
= create_tmp_var (ptr_type
);
4902 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4903 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4904 gimple_call_set_lhs (g
, allocate_ptr
);
4905 gimple_seq_add_stmt (ilist
, g
);
4908 tree x
= build_simple_mem_ref (allocate_ptr
);
4909 TREE_THIS_NOTRAP (x
) = 1;
4910 SET_DECL_VALUE_EXPR (new_var
, x
);
4911 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4916 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4917 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4918 private variables. Initialization statements go in ILIST, while calls
4919 to destructors go in DLIST. */
4922 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4923 omp_context
*ctx
, struct omp_for_data
*fd
)
4925 tree c
, copyin_seq
, x
, ptr
;
4926 bool copyin_by_ref
= false;
4927 bool lastprivate_firstprivate
= false;
4928 bool reduction_omp_orig_ref
= false;
4930 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4931 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4932 omplow_simd_context sctx
= omplow_simd_context ();
4933 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4934 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4935 gimple_seq llist
[4] = { };
4936 tree nonconst_simd_if
= NULL_TREE
;
4939 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4941 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4942 with data sharing clauses referencing variable sized vars. That
4943 is unnecessarily hard to support and very unlikely to result in
4944 vectorized code anyway. */
4946 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4947 switch (OMP_CLAUSE_CODE (c
))
4949 case OMP_CLAUSE_LINEAR
:
4950 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4953 case OMP_CLAUSE_PRIVATE
:
4954 case OMP_CLAUSE_FIRSTPRIVATE
:
4955 case OMP_CLAUSE_LASTPRIVATE
:
4956 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4958 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4960 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4961 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4965 case OMP_CLAUSE_REDUCTION
:
4966 case OMP_CLAUSE_IN_REDUCTION
:
4967 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4968 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4970 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4972 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4973 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4978 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4980 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4981 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4983 case OMP_CLAUSE_SIMDLEN
:
4984 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4987 case OMP_CLAUSE__CONDTEMP_
:
4988 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4996 /* Add a placeholder for simduid. */
4997 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4998 sctx
.simt_eargs
.safe_push (NULL_TREE
);
5000 unsigned task_reduction_cnt
= 0;
5001 unsigned task_reduction_cntorig
= 0;
5002 unsigned task_reduction_cnt_full
= 0;
5003 unsigned task_reduction_cntorig_full
= 0;
5004 unsigned task_reduction_other_cnt
= 0;
5005 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
5006 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
5007 /* Do all the fixed sized types in the first pass, and the variable sized
5008 types in the second pass. This makes sure that the scalar arguments to
5009 the variable sized types are processed before we use them in the
5010 variable sized operations. For task reductions we use 4 passes, in the
5011 first two we ignore them, in the third one gather arguments for
5012 GOMP_task_reduction_remap call and in the last pass actually handle
5013 the task reductions. */
5014 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
5017 if (pass
== 2 && task_reduction_cnt
)
5020 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
5021 + task_reduction_cntorig
);
5022 tskred_avar
= create_tmp_var_raw (tskred_atype
);
5023 gimple_add_tmp_var (tskred_avar
);
5024 TREE_ADDRESSABLE (tskred_avar
) = 1;
5025 task_reduction_cnt_full
= task_reduction_cnt
;
5026 task_reduction_cntorig_full
= task_reduction_cntorig
;
5028 else if (pass
== 3 && task_reduction_cnt
)
5030 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
5032 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
5033 size_int (task_reduction_cntorig
),
5034 build_fold_addr_expr (tskred_avar
));
5035 gimple_seq_add_stmt (ilist
, g
);
5037 if (pass
== 3 && task_reduction_other_cnt
)
5039 /* For reduction clauses, build
5040 tskred_base = (void *) tskred_temp[2]
5041 + omp_get_thread_num () * tskred_temp[1]
5042 or if tskred_temp[1] is known to be constant, that constant
5043 directly. This is the start of the private reduction copy block
5044 for the current thread. */
5045 tree v
= create_tmp_var (integer_type_node
);
5046 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
5047 gimple
*g
= gimple_build_call (x
, 0);
5048 gimple_call_set_lhs (g
, v
);
5049 gimple_seq_add_stmt (ilist
, g
);
5050 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
5051 tskred_temp
= OMP_CLAUSE_DECL (c
);
5052 if (is_taskreg_ctx (ctx
))
5053 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
5054 tree v2
= create_tmp_var (sizetype
);
5055 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
5056 gimple_seq_add_stmt (ilist
, g
);
5057 if (ctx
->task_reductions
[0])
5058 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
5060 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
5061 tree v3
= create_tmp_var (sizetype
);
5062 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
5063 gimple_seq_add_stmt (ilist
, g
);
5064 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
5065 tskred_base
= create_tmp_var (ptr_type_node
);
5066 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
5067 gimple_seq_add_stmt (ilist
, g
);
5069 task_reduction_cnt
= 0;
5070 task_reduction_cntorig
= 0;
5071 task_reduction_other_cnt
= 0;
5072 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5074 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
5077 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5078 bool task_reduction_p
= false;
5079 bool task_reduction_needs_orig_p
= false;
5080 tree cond
= NULL_TREE
;
5081 tree allocator
, allocate_ptr
;
5085 case OMP_CLAUSE_PRIVATE
:
5086 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
5089 case OMP_CLAUSE_SHARED
:
5090 /* Ignore shared directives in teams construct inside
5091 of target construct. */
5092 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5093 && !is_host_teams_ctx (ctx
))
5095 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
5097 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
5098 || is_global_var (OMP_CLAUSE_DECL (c
)));
5101 case OMP_CLAUSE_FIRSTPRIVATE
:
5102 case OMP_CLAUSE_COPYIN
:
5104 case OMP_CLAUSE_LINEAR
:
5105 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5106 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5107 lastprivate_firstprivate
= true;
5109 case OMP_CLAUSE_REDUCTION
:
5110 case OMP_CLAUSE_IN_REDUCTION
:
5111 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5112 || is_task_ctx (ctx
)
5113 || OMP_CLAUSE_REDUCTION_TASK (c
))
5115 task_reduction_p
= true;
5116 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5118 task_reduction_other_cnt
++;
5123 task_reduction_cnt
++;
5124 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5126 var
= OMP_CLAUSE_DECL (c
);
5127 /* If var is a global variable that isn't privatized
5128 in outer contexts, we don't need to look up the
5129 original address, it is always the address of the
5130 global variable itself. */
5132 || omp_privatize_by_reference (var
)
5134 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5136 task_reduction_needs_orig_p
= true;
5137 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5138 task_reduction_cntorig
++;
5142 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5143 reduction_omp_orig_ref
= true;
5145 case OMP_CLAUSE__REDUCTEMP_
:
5146 if (!is_taskreg_ctx (ctx
))
5149 case OMP_CLAUSE__LOOPTEMP_
:
5150 /* Handle _looptemp_/_reductemp_ clauses only on
5155 case OMP_CLAUSE_LASTPRIVATE
:
5156 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5158 lastprivate_firstprivate
= true;
5159 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5162 /* Even without corresponding firstprivate, if
5163 decl is Fortran allocatable, it needs outer var
5166 && lang_hooks
.decls
.omp_private_outer_ref
5167 (OMP_CLAUSE_DECL (c
)))
5168 lastprivate_firstprivate
= true;
5170 case OMP_CLAUSE_ALIGNED
:
5173 var
= OMP_CLAUSE_DECL (c
);
5174 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5175 && !is_global_var (var
))
5177 new_var
= maybe_lookup_decl (var
, ctx
);
5178 if (new_var
== NULL_TREE
)
5179 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5180 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5181 tree alarg
= omp_clause_aligned_alignment (c
);
5182 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5183 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5184 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5185 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5186 gimplify_and_add (x
, ilist
);
5188 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5189 && is_global_var (var
))
5191 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5192 new_var
= lookup_decl (var
, ctx
);
5193 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5194 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5195 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5196 tree alarg
= omp_clause_aligned_alignment (c
);
5197 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5198 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5199 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5200 x
= create_tmp_var (ptype
);
5201 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5202 gimplify_and_add (t
, ilist
);
5203 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5204 SET_DECL_VALUE_EXPR (new_var
, t
);
5205 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5208 case OMP_CLAUSE__CONDTEMP_
:
5209 if (is_parallel_ctx (ctx
)
5210 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5217 if (task_reduction_p
!= (pass
>= 2))
5220 allocator
= NULL_TREE
;
5221 allocate_ptr
= NULL_TREE
;
5222 new_var
= var
= OMP_CLAUSE_DECL (c
);
5223 if ((c_kind
== OMP_CLAUSE_REDUCTION
5224 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5225 && TREE_CODE (var
) == MEM_REF
)
5227 var
= TREE_OPERAND (var
, 0);
5228 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5229 var
= TREE_OPERAND (var
, 0);
5230 if (TREE_CODE (var
) == INDIRECT_REF
5231 || TREE_CODE (var
) == ADDR_EXPR
)
5232 var
= TREE_OPERAND (var
, 0);
5233 if (is_variable_sized (var
))
5235 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5236 var
= DECL_VALUE_EXPR (var
);
5237 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5238 var
= TREE_OPERAND (var
, 0);
5239 gcc_assert (DECL_P (var
));
5243 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5245 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5246 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5248 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5249 new_var
= lookup_decl (var
, ctx
);
5251 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5256 /* C/C++ array section reductions. */
5257 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5258 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5259 && var
!= OMP_CLAUSE_DECL (c
))
5264 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5265 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5267 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5269 tree b
= TREE_OPERAND (orig_var
, 1);
5270 if (is_omp_target (ctx
->stmt
))
5273 b
= maybe_lookup_decl (b
, ctx
);
5276 b
= TREE_OPERAND (orig_var
, 1);
5277 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5279 if (integer_zerop (bias
))
5283 bias
= fold_convert_loc (clause_loc
,
5284 TREE_TYPE (b
), bias
);
5285 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5286 TREE_TYPE (b
), b
, bias
);
5288 orig_var
= TREE_OPERAND (orig_var
, 0);
5292 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5293 if (is_global_var (out
)
5294 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5295 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5296 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5299 else if (is_omp_target (ctx
->stmt
))
5303 bool by_ref
= use_pointer_for_field (var
, NULL
);
5304 x
= build_receiver_ref (var
, by_ref
, ctx
);
5305 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5306 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5308 x
= build_fold_addr_expr (x
);
5310 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5311 x
= build_simple_mem_ref (x
);
5312 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5314 if (var
== TREE_OPERAND (orig_var
, 0))
5315 x
= build_fold_addr_expr (x
);
5317 bias
= fold_convert (sizetype
, bias
);
5318 x
= fold_convert (ptr_type_node
, x
);
5319 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5320 TREE_TYPE (x
), x
, bias
);
5321 unsigned cnt
= task_reduction_cnt
- 1;
5322 if (!task_reduction_needs_orig_p
)
5323 cnt
+= (task_reduction_cntorig_full
5324 - task_reduction_cntorig
);
5326 cnt
= task_reduction_cntorig
- 1;
5327 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5328 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5329 gimplify_assign (r
, x
, ilist
);
5333 if (TREE_CODE (orig_var
) == INDIRECT_REF
5334 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5335 orig_var
= TREE_OPERAND (orig_var
, 0);
5336 tree d
= OMP_CLAUSE_DECL (c
);
5337 tree type
= TREE_TYPE (d
);
5338 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5339 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5341 const char *name
= get_name (orig_var
);
5342 if (pass
!= 3 && !TREE_CONSTANT (v
))
5345 if (is_omp_target (ctx
->stmt
))
5348 t
= maybe_lookup_decl (v
, ctx
);
5352 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5353 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5354 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5356 build_int_cst (TREE_TYPE (v
), 1));
5357 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5359 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5363 tree xv
= create_tmp_var (ptr_type_node
);
5364 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5366 unsigned cnt
= task_reduction_cnt
- 1;
5367 if (!task_reduction_needs_orig_p
)
5368 cnt
+= (task_reduction_cntorig_full
5369 - task_reduction_cntorig
);
5371 cnt
= task_reduction_cntorig
- 1;
5372 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5373 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5375 gimple
*g
= gimple_build_assign (xv
, x
);
5376 gimple_seq_add_stmt (ilist
, g
);
5380 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5382 if (ctx
->task_reductions
[1 + idx
])
5383 off
= fold_convert (sizetype
,
5384 ctx
->task_reductions
[1 + idx
]);
5386 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5388 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5390 gimple_seq_add_stmt (ilist
, g
);
5392 x
= fold_convert (build_pointer_type (boolean_type_node
),
5394 if (TREE_CONSTANT (v
))
5395 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5396 TYPE_SIZE_UNIT (type
));
5400 if (is_omp_target (ctx
->stmt
))
5403 t
= maybe_lookup_decl (v
, ctx
);
5407 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5408 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5410 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5412 build_int_cst (TREE_TYPE (v
), 1));
5413 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5415 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5416 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5418 cond
= create_tmp_var (TREE_TYPE (x
));
5419 gimplify_assign (cond
, x
, ilist
);
5422 else if (lower_private_allocate (var
, type
, allocator
,
5423 allocate_ptr
, ilist
, ctx
,
5426 ? TYPE_SIZE_UNIT (type
)
5429 else if (TREE_CONSTANT (v
))
5431 x
= create_tmp_var_raw (type
, name
);
5432 gimple_add_tmp_var (x
);
5433 TREE_ADDRESSABLE (x
) = 1;
5434 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5439 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5440 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5441 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5444 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5445 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5446 tree y
= create_tmp_var (ptype
, name
);
5447 gimplify_assign (y
, x
, ilist
);
5451 if (!integer_zerop (bias
))
5453 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5455 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5457 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5458 pointer_sized_int_node
, yb
, bias
);
5459 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5460 yb
= create_tmp_var (ptype
, name
);
5461 gimplify_assign (yb
, x
, ilist
);
5465 d
= TREE_OPERAND (d
, 0);
5466 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5467 d
= TREE_OPERAND (d
, 0);
5468 if (TREE_CODE (d
) == ADDR_EXPR
)
5470 if (orig_var
!= var
)
5472 gcc_assert (is_variable_sized (orig_var
));
5473 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5475 gimplify_assign (new_var
, x
, ilist
);
5476 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5477 tree t
= build_fold_indirect_ref (new_var
);
5478 DECL_IGNORED_P (new_var
) = 0;
5479 TREE_THIS_NOTRAP (t
) = 1;
5480 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5481 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5485 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5486 build_int_cst (ptype
, 0));
5487 SET_DECL_VALUE_EXPR (new_var
, x
);
5488 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5493 gcc_assert (orig_var
== var
);
5494 if (TREE_CODE (d
) == INDIRECT_REF
)
5496 x
= create_tmp_var (ptype
, name
);
5497 TREE_ADDRESSABLE (x
) = 1;
5498 gimplify_assign (x
, yb
, ilist
);
5499 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5501 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5502 gimplify_assign (new_var
, x
, ilist
);
5504 /* GOMP_taskgroup_reduction_register memsets the whole
5505 array to zero. If the initializer is zero, we don't
5506 need to initialize it again, just mark it as ever
5507 used unconditionally, i.e. cond = true. */
5509 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5510 && initializer_zerop (omp_reduction_init (c
,
5513 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5515 gimple_seq_add_stmt (ilist
, g
);
5518 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5522 if (!is_parallel_ctx (ctx
))
5524 tree condv
= create_tmp_var (boolean_type_node
);
5525 g
= gimple_build_assign (condv
,
5526 build_simple_mem_ref (cond
));
5527 gimple_seq_add_stmt (ilist
, g
);
5528 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5529 g
= gimple_build_cond (NE_EXPR
, condv
,
5530 boolean_false_node
, end
, lab1
);
5531 gimple_seq_add_stmt (ilist
, g
);
5532 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5534 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5536 gimple_seq_add_stmt (ilist
, g
);
5539 tree y1
= create_tmp_var (ptype
);
5540 gimplify_assign (y1
, y
, ilist
);
5541 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5542 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5543 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5544 if (task_reduction_needs_orig_p
)
5546 y3
= create_tmp_var (ptype
);
5548 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5549 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5550 size_int (task_reduction_cnt_full
5551 + task_reduction_cntorig
- 1),
5552 NULL_TREE
, NULL_TREE
);
5555 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5556 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5559 gimplify_assign (y3
, ref
, ilist
);
5561 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5565 y2
= create_tmp_var (ptype
);
5566 gimplify_assign (y2
, y
, ilist
);
5568 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5570 tree ref
= build_outer_var_ref (var
, ctx
);
5571 /* For ref build_outer_var_ref already performs this. */
5572 if (TREE_CODE (d
) == INDIRECT_REF
)
5573 gcc_assert (omp_privatize_by_reference (var
));
5574 else if (TREE_CODE (d
) == ADDR_EXPR
)
5575 ref
= build_fold_addr_expr (ref
);
5576 else if (omp_privatize_by_reference (var
))
5577 ref
= build_fold_addr_expr (ref
);
5578 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5579 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5580 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5582 y3
= create_tmp_var (ptype
);
5583 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5587 y4
= create_tmp_var (ptype
);
5588 gimplify_assign (y4
, ref
, dlist
);
5592 tree i
= create_tmp_var (TREE_TYPE (v
));
5593 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5594 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5595 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5598 i2
= create_tmp_var (TREE_TYPE (v
));
5599 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5600 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5601 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5602 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5604 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5606 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5607 tree decl_placeholder
5608 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5609 SET_DECL_VALUE_EXPR (decl_placeholder
,
5610 build_simple_mem_ref (y1
));
5611 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5612 SET_DECL_VALUE_EXPR (placeholder
,
5613 y3
? build_simple_mem_ref (y3
)
5615 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5616 x
= lang_hooks
.decls
.omp_clause_default_ctor
5617 (c
, build_simple_mem_ref (y1
),
5618 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5620 gimplify_and_add (x
, ilist
);
5621 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5623 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5624 lower_omp (&tseq
, ctx
);
5625 gimple_seq_add_seq (ilist
, tseq
);
5627 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5630 SET_DECL_VALUE_EXPR (decl_placeholder
,
5631 build_simple_mem_ref (y2
));
5632 SET_DECL_VALUE_EXPR (placeholder
,
5633 build_simple_mem_ref (y4
));
5634 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5635 lower_omp (&tseq
, ctx
);
5636 gimple_seq_add_seq (dlist
, tseq
);
5637 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5639 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5640 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5643 x
= lang_hooks
.decls
.omp_clause_dtor
5644 (c
, build_simple_mem_ref (y2
));
5646 gimplify_and_add (x
, dlist
);
5651 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5652 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5654 /* reduction(-:var) sums up the partial results, so it
5655 acts identically to reduction(+:var). */
5656 if (code
== MINUS_EXPR
)
5659 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5662 x
= build2 (code
, TREE_TYPE (type
),
5663 build_simple_mem_ref (y4
),
5664 build_simple_mem_ref (y2
));
5665 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5669 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5670 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5671 gimple_seq_add_stmt (ilist
, g
);
5674 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5675 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5676 gimple_seq_add_stmt (ilist
, g
);
5678 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5679 build_int_cst (TREE_TYPE (i
), 1));
5680 gimple_seq_add_stmt (ilist
, g
);
5681 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5682 gimple_seq_add_stmt (ilist
, g
);
5683 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5686 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5687 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5688 gimple_seq_add_stmt (dlist
, g
);
5691 g
= gimple_build_assign
5692 (y4
, POINTER_PLUS_EXPR
, y4
,
5693 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5694 gimple_seq_add_stmt (dlist
, g
);
5696 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5697 build_int_cst (TREE_TYPE (i2
), 1));
5698 gimple_seq_add_stmt (dlist
, g
);
5699 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5700 gimple_seq_add_stmt (dlist
, g
);
5701 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5705 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5706 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5707 gimple_seq_add_stmt (dlist
, g
);
5713 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5714 if (is_global_var (out
))
5716 else if (is_omp_target (ctx
->stmt
))
5720 bool by_ref
= use_pointer_for_field (var
, ctx
);
5721 x
= build_receiver_ref (var
, by_ref
, ctx
);
5723 if (!omp_privatize_by_reference (var
))
5724 x
= build_fold_addr_expr (x
);
5725 x
= fold_convert (ptr_type_node
, x
);
5726 unsigned cnt
= task_reduction_cnt
- 1;
5727 if (!task_reduction_needs_orig_p
)
5728 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5730 cnt
= task_reduction_cntorig
- 1;
5731 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5732 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5733 gimplify_assign (r
, x
, ilist
);
5738 tree type
= TREE_TYPE (new_var
);
5739 if (!omp_privatize_by_reference (var
))
5740 type
= build_pointer_type (type
);
5741 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5743 unsigned cnt
= task_reduction_cnt
- 1;
5744 if (!task_reduction_needs_orig_p
)
5745 cnt
+= (task_reduction_cntorig_full
5746 - task_reduction_cntorig
);
5748 cnt
= task_reduction_cntorig
- 1;
5749 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5750 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5754 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5756 if (ctx
->task_reductions
[1 + idx
])
5757 off
= fold_convert (sizetype
,
5758 ctx
->task_reductions
[1 + idx
]);
5760 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5762 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5765 x
= fold_convert (type
, x
);
5767 if (omp_privatize_by_reference (var
))
5769 gimplify_assign (new_var
, x
, ilist
);
5771 new_var
= build_simple_mem_ref (new_var
);
5775 t
= create_tmp_var (type
);
5776 gimplify_assign (t
, x
, ilist
);
5777 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5778 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5780 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5781 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5782 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5783 cond
= create_tmp_var (TREE_TYPE (t
));
5784 gimplify_assign (cond
, t
, ilist
);
5786 else if (is_variable_sized (var
))
5788 /* For variable sized types, we need to allocate the
5789 actual storage here. Call alloca and store the
5790 result in the pointer decl that we created elsewhere. */
5794 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5798 ptr
= DECL_VALUE_EXPR (new_var
);
5799 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5800 ptr
= TREE_OPERAND (ptr
, 0);
5801 gcc_assert (DECL_P (ptr
));
5802 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5804 if (lower_private_allocate (var
, new_var
, allocator
,
5805 allocate_ptr
, ilist
, ctx
,
5810 /* void *tmp = __builtin_alloca */
5812 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5814 = gimple_build_call (atmp
, 2, x
,
5815 size_int (DECL_ALIGN (var
)));
5816 cfun
->calls_alloca
= 1;
5817 tmp
= create_tmp_var_raw (ptr_type_node
);
5818 gimple_add_tmp_var (tmp
);
5819 gimple_call_set_lhs (stmt
, tmp
);
5821 gimple_seq_add_stmt (ilist
, stmt
);
5824 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5825 gimplify_assign (ptr
, x
, ilist
);
5828 else if (omp_privatize_by_reference (var
)
5829 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5830 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5832 /* For references that are being privatized for Fortran,
5833 allocate new backing storage for the new pointer
5834 variable. This allows us to avoid changing all the
5835 code that expects a pointer to something that expects
5836 a direct variable. */
5840 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5841 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5843 x
= build_receiver_ref (var
, false, ctx
);
5844 if (ctx
->allocate_map
)
5845 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5847 allocator
= *allocatep
;
5848 if (TREE_CODE (allocator
) == TREE_LIST
)
5849 allocator
= TREE_PURPOSE (allocator
);
5850 if (TREE_CODE (allocator
) != INTEGER_CST
)
5851 allocator
= build_outer_var_ref (allocator
, ctx
);
5852 allocator
= fold_convert (pointer_sized_int_node
,
5854 allocate_ptr
= unshare_expr (x
);
5856 if (allocator
== NULL_TREE
)
5857 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5859 else if (lower_private_allocate (var
, new_var
, allocator
,
5861 ilist
, ctx
, true, x
))
5863 else if (TREE_CONSTANT (x
))
5865 /* For reduction in SIMD loop, defer adding the
5866 initialization of the reference, because if we decide
5867 to use SIMD array for it, the initilization could cause
5868 expansion ICE. Ditto for other privatization clauses. */
5873 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5875 gimple_add_tmp_var (x
);
5876 TREE_ADDRESSABLE (x
) = 1;
5877 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5883 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5884 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5885 tree al
= size_int (TYPE_ALIGN (rtype
));
5886 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5891 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5892 gimplify_assign (new_var
, x
, ilist
);
5895 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5897 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5898 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5899 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5907 switch (OMP_CLAUSE_CODE (c
))
5909 case OMP_CLAUSE_SHARED
:
5910 /* Ignore shared directives in teams construct inside
5911 target construct. */
5912 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5913 && !is_host_teams_ctx (ctx
))
5915 /* Shared global vars are just accessed directly. */
5916 if (is_global_var (new_var
))
5918 /* For taskloop firstprivate/lastprivate, represented
5919 as firstprivate and shared clause on the task, new_var
5920 is the firstprivate var. */
5921 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5923 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5924 needs to be delayed until after fixup_child_record_type so
5925 that we get the correct type during the dereference. */
5926 by_ref
= use_pointer_for_field (var
, ctx
);
5927 x
= build_receiver_ref (var
, by_ref
, ctx
);
5928 SET_DECL_VALUE_EXPR (new_var
, x
);
5929 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5931 /* ??? If VAR is not passed by reference, and the variable
5932 hasn't been initialized yet, then we'll get a warning for
5933 the store into the omp_data_s structure. Ideally, we'd be
5934 able to notice this and not store anything at all, but
5935 we're generating code too early. Suppress the warning. */
5937 suppress_warning (var
, OPT_Wuninitialized
);
5940 case OMP_CLAUSE__CONDTEMP_
:
5941 if (is_parallel_ctx (ctx
))
5943 x
= build_receiver_ref (var
, false, ctx
);
5944 SET_DECL_VALUE_EXPR (new_var
, x
);
5945 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5947 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5949 x
= build_zero_cst (TREE_TYPE (var
));
5954 case OMP_CLAUSE_LASTPRIVATE
:
5955 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5959 case OMP_CLAUSE_PRIVATE
:
5960 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5961 x
= build_outer_var_ref (var
, ctx
);
5962 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5964 if (is_task_ctx (ctx
))
5965 x
= build_receiver_ref (var
, false, ctx
);
5967 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5975 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5976 ilist
, ctx
, false, NULL_TREE
);
5977 nx
= unshare_expr (new_var
);
5979 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5980 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5983 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5985 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5988 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5989 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5990 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5991 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5992 || (gimple_omp_for_index (ctx
->stmt
, 0)
5994 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5995 || omp_privatize_by_reference (var
))
5996 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5999 if (omp_privatize_by_reference (var
))
6001 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6002 tree new_vard
= TREE_OPERAND (new_var
, 0);
6003 gcc_assert (DECL_P (new_vard
));
6004 SET_DECL_VALUE_EXPR (new_vard
,
6005 build_fold_addr_expr (lvar
));
6006 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6011 tree iv
= unshare_expr (ivar
);
6013 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
6016 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
6020 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
6022 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
6023 unshare_expr (ivar
), x
);
6027 gimplify_and_add (x
, &llist
[0]);
6028 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6029 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6034 gcc_assert (TREE_CODE (v
) == MEM_REF
);
6035 v
= TREE_OPERAND (v
, 0);
6036 gcc_assert (DECL_P (v
));
6038 v
= *ctx
->lastprivate_conditional_map
->get (v
);
6039 tree t
= create_tmp_var (TREE_TYPE (v
));
6040 tree z
= build_zero_cst (TREE_TYPE (v
));
6042 = build_outer_var_ref (var
, ctx
,
6043 OMP_CLAUSE_LASTPRIVATE
);
6044 gimple_seq_add_stmt (dlist
,
6045 gimple_build_assign (t
, z
));
6046 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
6047 tree civar
= DECL_VALUE_EXPR (v
);
6048 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
6049 civar
= unshare_expr (civar
);
6050 TREE_OPERAND (civar
, 1) = sctx
.idx
;
6051 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
6052 unshare_expr (civar
));
6053 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
6054 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
6055 orig_v
, unshare_expr (ivar
)));
6056 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
6058 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
6060 gimple_seq tseq
= NULL
;
6061 gimplify_and_add (x
, &tseq
);
6063 lower_omp (&tseq
, ctx
->outer
);
6064 gimple_seq_add_seq (&llist
[1], tseq
);
6066 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6067 && ctx
->for_simd_scan_phase
)
6069 x
= unshare_expr (ivar
);
6071 = build_outer_var_ref (var
, ctx
,
6072 OMP_CLAUSE_LASTPRIVATE
);
6073 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6075 gimplify_and_add (x
, &llist
[0]);
6079 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6081 gimplify_and_add (y
, &llist
[1]);
6085 if (omp_privatize_by_reference (var
))
6087 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6088 tree new_vard
= TREE_OPERAND (new_var
, 0);
6089 gcc_assert (DECL_P (new_vard
));
6090 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6091 x
= TYPE_SIZE_UNIT (type
);
6092 if (TREE_CONSTANT (x
))
6094 x
= create_tmp_var_raw (type
, get_name (var
));
6095 gimple_add_tmp_var (x
);
6096 TREE_ADDRESSABLE (x
) = 1;
6097 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6098 x
= fold_convert_loc (clause_loc
,
6099 TREE_TYPE (new_vard
), x
);
6100 gimplify_assign (new_vard
, x
, ilist
);
6105 gimplify_and_add (nx
, ilist
);
6106 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6108 && ctx
->for_simd_scan_phase
)
6110 tree orig_v
= build_outer_var_ref (var
, ctx
,
6111 OMP_CLAUSE_LASTPRIVATE
);
6112 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6114 gimplify_and_add (x
, ilist
);
6119 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6121 gimplify_and_add (x
, dlist
);
6124 if (!is_gimple_val (allocator
))
6126 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6127 gimplify_assign (avar
, allocator
, dlist
);
6130 if (!is_gimple_val (allocate_ptr
))
6132 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6133 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6134 allocate_ptr
= apvar
;
6136 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6138 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6139 gimple_seq_add_stmt (dlist
, g
);
6143 case OMP_CLAUSE_LINEAR
:
6144 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6145 goto do_firstprivate
;
6146 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6149 x
= build_outer_var_ref (var
, ctx
);
6152 case OMP_CLAUSE_FIRSTPRIVATE
:
6153 if (is_task_ctx (ctx
))
6155 if ((omp_privatize_by_reference (var
)
6156 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6157 || is_variable_sized (var
))
6159 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6161 || use_pointer_for_field (var
, NULL
))
6163 x
= build_receiver_ref (var
, false, ctx
);
6164 if (ctx
->allocate_map
)
6165 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6167 allocator
= *allocatep
;
6168 if (TREE_CODE (allocator
) == TREE_LIST
)
6169 allocator
= TREE_PURPOSE (allocator
);
6170 if (TREE_CODE (allocator
) != INTEGER_CST
)
6171 allocator
= build_outer_var_ref (allocator
, ctx
);
6172 allocator
= fold_convert (pointer_sized_int_node
,
6174 allocate_ptr
= unshare_expr (x
);
6175 x
= build_simple_mem_ref (x
);
6176 TREE_THIS_NOTRAP (x
) = 1;
6178 SET_DECL_VALUE_EXPR (new_var
, x
);
6179 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6183 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6184 && omp_privatize_by_reference (var
))
6186 x
= build_outer_var_ref (var
, ctx
);
6187 gcc_assert (TREE_CODE (x
) == MEM_REF
6188 && integer_zerop (TREE_OPERAND (x
, 1)));
6189 x
= TREE_OPERAND (x
, 0);
6190 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6191 (c
, unshare_expr (new_var
), x
);
6192 gimplify_and_add (x
, ilist
);
6196 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6197 ilist
, ctx
, false, NULL_TREE
);
6198 x
= build_outer_var_ref (var
, ctx
);
6201 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6202 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6204 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6206 t
= build_outer_var_ref (t
, ctx
);
6207 tree stept
= TREE_TYPE (t
);
6208 tree ct
= omp_find_clause (clauses
,
6209 OMP_CLAUSE__LOOPTEMP_
);
6211 tree l
= OMP_CLAUSE_DECL (ct
);
6212 tree n1
= fd
->loop
.n1
;
6213 tree step
= fd
->loop
.step
;
6214 tree itype
= TREE_TYPE (l
);
6215 if (POINTER_TYPE_P (itype
))
6216 itype
= signed_type_for (itype
);
6217 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6218 if (TYPE_UNSIGNED (itype
)
6219 && fd
->loop
.cond_code
== GT_EXPR
)
6220 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6221 fold_build1 (NEGATE_EXPR
, itype
, l
),
6222 fold_build1 (NEGATE_EXPR
,
6225 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6226 t
= fold_build2 (MULT_EXPR
, stept
,
6227 fold_convert (stept
, l
), t
);
6229 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6231 if (omp_privatize_by_reference (var
))
6233 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6234 tree new_vard
= TREE_OPERAND (new_var
, 0);
6235 gcc_assert (DECL_P (new_vard
));
6236 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6237 nx
= TYPE_SIZE_UNIT (type
);
6238 if (TREE_CONSTANT (nx
))
6240 nx
= create_tmp_var_raw (type
,
6242 gimple_add_tmp_var (nx
);
6243 TREE_ADDRESSABLE (nx
) = 1;
6244 nx
= build_fold_addr_expr_loc (clause_loc
,
6246 nx
= fold_convert_loc (clause_loc
,
6247 TREE_TYPE (new_vard
),
6249 gimplify_assign (new_vard
, nx
, ilist
);
6253 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6255 gimplify_and_add (x
, ilist
);
6259 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6260 x
= fold_build_pointer_plus (x
, t
);
6262 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
,
6263 fold_convert (TREE_TYPE (x
), t
));
6266 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6267 || TREE_ADDRESSABLE (new_var
)
6268 || omp_privatize_by_reference (var
))
6269 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6272 if (omp_privatize_by_reference (var
))
6274 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6275 tree new_vard
= TREE_OPERAND (new_var
, 0);
6276 gcc_assert (DECL_P (new_vard
));
6277 SET_DECL_VALUE_EXPR (new_vard
,
6278 build_fold_addr_expr (lvar
));
6279 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6281 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6283 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6284 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6285 gimplify_and_add (x
, ilist
);
6286 gimple_stmt_iterator gsi
6287 = gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6289 = gimple_build_assign (unshare_expr (lvar
), iv
);
6290 gsi_insert_before_without_update (&gsi
, g
,
6292 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6293 enum tree_code code
= PLUS_EXPR
;
6294 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6295 code
= POINTER_PLUS_EXPR
;
6296 g
= gimple_build_assign (iv
, code
, iv
, t
);
6297 gsi_insert_before_without_update (&gsi
, g
,
6301 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6302 (c
, unshare_expr (ivar
), x
);
6303 gimplify_and_add (x
, &llist
[0]);
6304 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6306 gimplify_and_add (x
, &llist
[1]);
6309 if (omp_privatize_by_reference (var
))
6311 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6312 tree new_vard
= TREE_OPERAND (new_var
, 0);
6313 gcc_assert (DECL_P (new_vard
));
6314 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6315 nx
= TYPE_SIZE_UNIT (type
);
6316 if (TREE_CONSTANT (nx
))
6318 nx
= create_tmp_var_raw (type
, get_name (var
));
6319 gimple_add_tmp_var (nx
);
6320 TREE_ADDRESSABLE (nx
) = 1;
6321 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6322 nx
= fold_convert_loc (clause_loc
,
6323 TREE_TYPE (new_vard
), nx
);
6324 gimplify_assign (new_vard
, nx
, ilist
);
6328 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6329 (c
, unshare_expr (new_var
), x
);
6330 gimplify_and_add (x
, ilist
);
6333 case OMP_CLAUSE__LOOPTEMP_
:
6334 case OMP_CLAUSE__REDUCTEMP_
:
6335 gcc_assert (is_taskreg_ctx (ctx
));
6336 x
= build_outer_var_ref (var
, ctx
);
6337 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6338 gimplify_and_add (x
, ilist
);
6341 case OMP_CLAUSE_COPYIN
:
6342 by_ref
= use_pointer_for_field (var
, NULL
);
6343 x
= build_receiver_ref (var
, by_ref
, ctx
);
6344 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6345 append_to_statement_list (x
, ©in_seq
);
6346 copyin_by_ref
|= by_ref
;
6349 case OMP_CLAUSE_REDUCTION
:
6350 case OMP_CLAUSE_IN_REDUCTION
:
6351 /* OpenACC reductions are initialized using the
6352 GOACC_REDUCTION internal function. */
6353 if (is_gimple_omp_oacc (ctx
->stmt
))
6355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6357 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6359 tree ptype
= TREE_TYPE (placeholder
);
6362 x
= error_mark_node
;
6363 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6364 && !task_reduction_needs_orig_p
)
6366 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6368 tree pptype
= build_pointer_type (ptype
);
6369 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6370 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6371 size_int (task_reduction_cnt_full
6372 + task_reduction_cntorig
- 1),
6373 NULL_TREE
, NULL_TREE
);
6377 = *ctx
->task_reduction_map
->get (c
);
6378 x
= task_reduction_read (ilist
, tskred_temp
,
6379 pptype
, 7 + 3 * idx
);
6381 x
= fold_convert (pptype
, x
);
6382 x
= build_simple_mem_ref (x
);
6387 lower_private_allocate (var
, new_var
, allocator
,
6388 allocate_ptr
, ilist
, ctx
, false,
6390 x
= build_outer_var_ref (var
, ctx
);
6392 if (omp_privatize_by_reference (var
)
6393 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6394 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6396 SET_DECL_VALUE_EXPR (placeholder
, x
);
6397 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6398 tree new_vard
= new_var
;
6399 if (omp_privatize_by_reference (var
))
6401 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6402 new_vard
= TREE_OPERAND (new_var
, 0);
6403 gcc_assert (DECL_P (new_vard
));
6405 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6407 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6408 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6411 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6415 if (new_vard
== new_var
)
6417 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6418 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6422 SET_DECL_VALUE_EXPR (new_vard
,
6423 build_fold_addr_expr (ivar
));
6424 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6426 x
= lang_hooks
.decls
.omp_clause_default_ctor
6427 (c
, unshare_expr (ivar
),
6428 build_outer_var_ref (var
, ctx
));
6429 if (rvarp
&& ctx
->for_simd_scan_phase
)
6432 gimplify_and_add (x
, &llist
[0]);
6433 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6435 gimplify_and_add (x
, &llist
[1]);
6442 gimplify_and_add (x
, &llist
[0]);
6444 tree ivar2
= unshare_expr (lvar
);
6445 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6446 x
= lang_hooks
.decls
.omp_clause_default_ctor
6447 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6448 gimplify_and_add (x
, &llist
[0]);
6452 x
= lang_hooks
.decls
.omp_clause_default_ctor
6453 (c
, unshare_expr (rvar2
),
6454 build_outer_var_ref (var
, ctx
));
6455 gimplify_and_add (x
, &llist
[0]);
6458 /* For types that need construction, add another
6459 private var which will be default constructed
6460 and optionally initialized with
6461 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6462 loop we want to assign this value instead of
6463 constructing and destructing it in each
6465 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6466 gimple_add_tmp_var (nv
);
6467 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6471 x
= lang_hooks
.decls
.omp_clause_default_ctor
6472 (c
, nv
, build_outer_var_ref (var
, ctx
));
6473 gimplify_and_add (x
, ilist
);
6475 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6477 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6478 x
= DECL_VALUE_EXPR (new_vard
);
6480 if (new_vard
!= new_var
)
6481 vexpr
= build_fold_addr_expr (nv
);
6482 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6483 lower_omp (&tseq
, ctx
);
6484 SET_DECL_VALUE_EXPR (new_vard
, x
);
6485 gimple_seq_add_seq (ilist
, tseq
);
6486 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6489 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6491 gimplify_and_add (x
, dlist
);
6494 tree ref
= build_outer_var_ref (var
, ctx
);
6495 x
= unshare_expr (ivar
);
6496 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6498 gimplify_and_add (x
, &llist
[0]);
6500 ref
= build_outer_var_ref (var
, ctx
);
6501 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6503 gimplify_and_add (x
, &llist
[3]);
6505 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6506 if (new_vard
== new_var
)
6507 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6509 SET_DECL_VALUE_EXPR (new_vard
,
6510 build_fold_addr_expr (lvar
));
6512 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6514 gimplify_and_add (x
, &llist
[1]);
6516 tree ivar2
= unshare_expr (lvar
);
6517 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6518 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6520 gimplify_and_add (x
, &llist
[1]);
6524 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6526 gimplify_and_add (x
, &llist
[1]);
6531 gimplify_and_add (x
, &llist
[0]);
6532 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6534 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6535 lower_omp (&tseq
, ctx
);
6536 gimple_seq_add_seq (&llist
[0], tseq
);
6538 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6539 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6540 lower_omp (&tseq
, ctx
);
6541 gimple_seq_add_seq (&llist
[1], tseq
);
6542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6543 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6544 if (new_vard
== new_var
)
6545 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6547 SET_DECL_VALUE_EXPR (new_vard
,
6548 build_fold_addr_expr (lvar
));
6549 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6551 gimplify_and_add (x
, &llist
[1]);
6554 /* If this is a reference to constant size reduction var
6555 with placeholder, we haven't emitted the initializer
6556 for it because it is undesirable if SIMD arrays are used.
6557 But if they aren't used, we need to emit the deferred
6558 initialization now. */
6559 else if (omp_privatize_by_reference (var
) && is_simd
)
6560 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6562 tree lab2
= NULL_TREE
;
6566 if (!is_parallel_ctx (ctx
))
6568 tree condv
= create_tmp_var (boolean_type_node
);
6569 tree m
= build_simple_mem_ref (cond
);
6570 g
= gimple_build_assign (condv
, m
);
6571 gimple_seq_add_stmt (ilist
, g
);
6573 = create_artificial_label (UNKNOWN_LOCATION
);
6574 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6575 g
= gimple_build_cond (NE_EXPR
, condv
,
6578 gimple_seq_add_stmt (ilist
, g
);
6579 gimple_seq_add_stmt (ilist
,
6580 gimple_build_label (lab1
));
6582 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6584 gimple_seq_add_stmt (ilist
, g
);
6586 x
= lang_hooks
.decls
.omp_clause_default_ctor
6587 (c
, unshare_expr (new_var
),
6589 : build_outer_var_ref (var
, ctx
));
6591 gimplify_and_add (x
, ilist
);
6593 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6594 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6596 if (ctx
->for_simd_scan_phase
)
6599 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6601 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6602 gimple_add_tmp_var (nv
);
6603 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6604 x
= lang_hooks
.decls
.omp_clause_default_ctor
6605 (c
, nv
, build_outer_var_ref (var
, ctx
));
6607 gimplify_and_add (x
, ilist
);
6608 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6610 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6612 if (new_vard
!= new_var
)
6613 vexpr
= build_fold_addr_expr (nv
);
6614 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6615 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6616 lower_omp (&tseq
, ctx
);
6617 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6618 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6619 gimple_seq_add_seq (ilist
, tseq
);
6621 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6622 if (is_simd
&& ctx
->scan_exclusive
)
6625 = create_tmp_var_raw (TREE_TYPE (new_var
));
6626 gimple_add_tmp_var (nv2
);
6627 ctx
->cb
.decl_map
->put (nv
, nv2
);
6628 x
= lang_hooks
.decls
.omp_clause_default_ctor
6629 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6630 gimplify_and_add (x
, ilist
);
6631 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6633 gimplify_and_add (x
, dlist
);
6635 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6637 gimplify_and_add (x
, dlist
);
6640 && ctx
->scan_exclusive
6641 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6643 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6644 gimple_add_tmp_var (nv2
);
6645 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6646 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6648 gimplify_and_add (x
, dlist
);
6650 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6654 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6656 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6657 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6658 && is_omp_target (ctx
->stmt
))
6660 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6661 tree oldv
= NULL_TREE
;
6663 if (DECL_HAS_VALUE_EXPR_P (d
))
6664 oldv
= DECL_VALUE_EXPR (d
);
6665 SET_DECL_VALUE_EXPR (d
, new_vard
);
6666 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6667 lower_omp (&tseq
, ctx
);
6669 SET_DECL_VALUE_EXPR (d
, oldv
);
6672 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6673 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6677 lower_omp (&tseq
, ctx
);
6678 gimple_seq_add_seq (ilist
, tseq
);
6680 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6683 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6684 lower_omp (&tseq
, ctx
);
6685 gimple_seq_add_seq (dlist
, tseq
);
6686 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6688 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6692 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6699 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6700 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6701 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6706 tree lab2
= NULL_TREE
;
6707 /* GOMP_taskgroup_reduction_register memsets the whole
6708 array to zero. If the initializer is zero, we don't
6709 need to initialize it again, just mark it as ever
6710 used unconditionally, i.e. cond = true. */
6711 if (initializer_zerop (x
))
6713 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6715 gimple_seq_add_stmt (ilist
, g
);
6720 if (!cond) { cond = true; new_var = x; } */
6721 if (!is_parallel_ctx (ctx
))
6723 tree condv
= create_tmp_var (boolean_type_node
);
6724 tree m
= build_simple_mem_ref (cond
);
6725 g
= gimple_build_assign (condv
, m
);
6726 gimple_seq_add_stmt (ilist
, g
);
6728 = create_artificial_label (UNKNOWN_LOCATION
);
6729 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6730 g
= gimple_build_cond (NE_EXPR
, condv
,
6733 gimple_seq_add_stmt (ilist
, g
);
6734 gimple_seq_add_stmt (ilist
,
6735 gimple_build_label (lab1
));
6737 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6739 gimple_seq_add_stmt (ilist
, g
);
6740 gimplify_assign (new_var
, x
, ilist
);
6742 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6746 /* reduction(-:var) sums up the partial results, so it
6747 acts identically to reduction(+:var). */
6748 if (code
== MINUS_EXPR
)
6752 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6753 tree new_vard
= new_var
;
6754 if (is_simd
&& omp_privatize_by_reference (var
))
6756 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6757 new_vard
= TREE_OPERAND (new_var
, 0);
6758 gcc_assert (DECL_P (new_vard
));
6760 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6762 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6763 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6766 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6770 if (new_vard
!= new_var
)
6772 SET_DECL_VALUE_EXPR (new_vard
,
6773 build_fold_addr_expr (lvar
));
6774 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6777 tree ref
= build_outer_var_ref (var
, ctx
);
6781 if (ctx
->for_simd_scan_phase
)
6783 gimplify_assign (ivar
, ref
, &llist
[0]);
6784 ref
= build_outer_var_ref (var
, ctx
);
6785 gimplify_assign (ref
, rvar
, &llist
[3]);
6789 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6794 simt_lane
= create_tmp_var (unsigned_type_node
);
6795 x
= build_call_expr_internal_loc
6796 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6797 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6798 /* Make sure x is evaluated unconditionally. */
6799 tree bfly_var
= create_tmp_var (TREE_TYPE (ivar
));
6800 gimplify_assign (bfly_var
, x
, &llist
[2]);
6801 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, bfly_var
);
6802 gimplify_assign (ivar
, x
, &llist
[2]);
6808 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6809 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6810 boolean_type_node
, ivar
,
6812 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6813 boolean_type_node
, ref
,
6816 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6818 x
= fold_convert (TREE_TYPE (ref
), x
);
6819 ref
= build_outer_var_ref (var
, ctx
);
6820 gimplify_assign (ref
, x
, &llist
[1]);
6825 lower_private_allocate (var
, new_var
, allocator
,
6826 allocate_ptr
, ilist
, ctx
,
6828 if (omp_privatize_by_reference (var
) && is_simd
)
6829 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6830 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6831 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6833 gimplify_assign (new_var
, x
, ilist
);
6836 tree ref
= build_outer_var_ref (var
, ctx
);
6837 tree new_var2
= new_var
;
6841 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6843 = fold_build2_loc (clause_loc
, NE_EXPR
,
6844 boolean_type_node
, new_var
,
6846 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6847 boolean_type_node
, ref
,
6850 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6852 x
= fold_convert (TREE_TYPE (new_var
), x
);
6853 ref
= build_outer_var_ref (var
, ctx
);
6854 gimplify_assign (ref
, x
, dlist
);
6869 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6870 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6873 if (known_eq (sctx
.max_vf
, 1U))
6875 sctx
.is_simt
= false;
6876 if (ctx
->lastprivate_conditional_map
)
6878 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6880 /* Signal to lower_omp_1 that it should use parent context. */
6881 ctx
->combined_into_simd_safelen1
= true;
6882 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6883 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6884 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6886 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6887 omp_context
*outer
= ctx
->outer
;
6888 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6889 outer
= outer
->outer
;
6890 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6891 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6892 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6898 /* When not vectorized, treat lastprivate(conditional:) like
6899 normal lastprivate, as there will be just one simd lane
6900 writing the privatized variable. */
6901 delete ctx
->lastprivate_conditional_map
;
6902 ctx
->lastprivate_conditional_map
= NULL
;
6907 if (nonconst_simd_if
)
6909 if (sctx
.lane
== NULL_TREE
)
6911 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6912 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6914 /* FIXME: For now. */
6915 sctx
.is_simt
= false;
6918 if (sctx
.lane
|| sctx
.is_simt
)
6920 uid
= create_tmp_var (ptr_type_node
, "simduid");
6921 /* Don't want uninit warnings on simduid, it is always uninitialized,
6922 but we use it not for the value, but for the DECL_UID only. */
6923 suppress_warning (uid
, OPT_Wuninitialized
);
6924 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6925 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6926 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6927 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6929 /* Emit calls denoting privatized variables and initializing a pointer to
6930 structure that holds private variables as fields after ompdevlow pass. */
6933 sctx
.simt_eargs
[0] = uid
;
6935 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6936 gimple_call_set_lhs (g
, uid
);
6937 gimple_seq_add_stmt (ilist
, g
);
6938 sctx
.simt_eargs
.release ();
6940 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6941 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6942 gimple_call_set_lhs (g
, simtrec
);
6943 gimple_seq_add_stmt (ilist
, g
);
6947 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6948 2 + (nonconst_simd_if
!= NULL
),
6949 uid
, integer_zero_node
,
6951 gimple_call_set_lhs (g
, sctx
.lane
);
6952 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6953 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6954 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6955 build_int_cst (unsigned_type_node
, 0));
6956 gimple_seq_add_stmt (ilist
, g
);
6959 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6961 gimple_call_set_lhs (g
, sctx
.lastlane
);
6962 gimple_seq_add_stmt (dlist
, g
);
6963 gimple_seq_add_seq (dlist
, llist
[3]);
6965 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6968 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6969 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6970 gimple_call_set_lhs (g
, simt_vf
);
6971 gimple_seq_add_stmt (dlist
, g
);
6973 tree t
= build_int_cst (unsigned_type_node
, 1);
6974 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6975 gimple_seq_add_stmt (dlist
, g
);
6977 t
= build_int_cst (unsigned_type_node
, 0);
6978 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6979 gimple_seq_add_stmt (dlist
, g
);
6981 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6982 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6983 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6984 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6985 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6987 gimple_seq_add_seq (dlist
, llist
[2]);
6989 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6990 gimple_seq_add_stmt (dlist
, g
);
6992 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6993 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6994 gimple_seq_add_stmt (dlist
, g
);
6996 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6998 for (int i
= 0; i
< 2; i
++)
7001 tree vf
= create_tmp_var (unsigned_type_node
);
7002 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
7003 gimple_call_set_lhs (g
, vf
);
7004 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
7005 gimple_seq_add_stmt (seq
, g
);
7006 tree t
= build_int_cst (unsigned_type_node
, 0);
7007 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
7008 gimple_seq_add_stmt (seq
, g
);
7009 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7010 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
7011 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7012 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
7013 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
7014 gimple_seq_add_seq (seq
, llist
[i
]);
7015 t
= build_int_cst (unsigned_type_node
, 1);
7016 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
7017 gimple_seq_add_stmt (seq
, g
);
7018 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
7019 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
7020 gimple_seq_add_stmt (seq
, g
);
7021 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
7026 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
7028 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
7029 gimple_seq_add_stmt (dlist
, g
);
7032 /* The copyin sequence is not to be executed by the main thread, since
7033 that would result in self-copies. Perhaps not visible to scalars,
7034 but it certainly is to C++ operator=. */
7037 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
7039 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
7040 build_int_cst (TREE_TYPE (x
), 0));
7041 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
7042 gimplify_and_add (x
, ilist
);
7045 /* If any copyin variable is passed by reference, we must ensure the
7046 master thread doesn't modify it before it is copied over in all
7047 threads. Similarly for variables in both firstprivate and
7048 lastprivate clauses we need to ensure the lastprivate copying
7049 happens after firstprivate copying in all threads. And similarly
7050 for UDRs if initializer expression refers to omp_orig. */
7051 if (copyin_by_ref
|| lastprivate_firstprivate
7052 || (reduction_omp_orig_ref
7053 && !ctx
->scan_inclusive
7054 && !ctx
->scan_exclusive
))
7056 /* Don't add any barrier for #pragma omp simd or
7057 #pragma omp distribute. */
7058 if (!is_task_ctx (ctx
)
7059 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
7060 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
7061 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
7064 /* If max_vf is non-zero, then we can use only a vectorization factor
7065 up to the max_vf we chose. So stick it into the safelen clause. */
7066 if (maybe_ne (sctx
.max_vf
, 0U))
7068 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
7069 OMP_CLAUSE_SAFELEN
);
7070 poly_uint64 safe_len
;
7072 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
7073 && maybe_gt (safe_len
, sctx
.max_vf
)))
7075 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
7076 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
7078 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
7079 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
7084 /* Create temporary variables for lastprivate(conditional:) implementation
7085 in context CTX with CLAUSES. */
7088 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
7090 tree iter_type
= NULL_TREE
;
7091 tree cond_ptr
= NULL_TREE
;
7092 tree iter_var
= NULL_TREE
;
7093 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7094 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
7095 tree next
= *clauses
;
7096 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7098 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
7102 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7104 if (iter_type
== NULL_TREE
)
7106 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7107 iter_var
= create_tmp_var_raw (iter_type
);
7108 DECL_CONTEXT (iter_var
) = current_function_decl
;
7109 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7110 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7111 ctx
->block_vars
= iter_var
;
7113 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7114 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7115 OMP_CLAUSE_DECL (c3
) = iter_var
;
7116 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7118 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7120 next
= OMP_CLAUSE_CHAIN (cc
);
7121 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7122 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7123 ctx
->lastprivate_conditional_map
->put (o
, v
);
7126 if (iter_type
== NULL
)
7128 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7130 struct omp_for_data fd
;
7131 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7133 iter_type
= unsigned_type_for (fd
.iter_type
);
7135 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7136 iter_type
= unsigned_type_node
;
7137 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7141 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7142 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7146 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7147 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7148 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7149 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7150 ctx
->block_vars
= cond_ptr
;
7151 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7152 OMP_CLAUSE__CONDTEMP_
);
7153 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7154 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7157 iter_var
= create_tmp_var_raw (iter_type
);
7158 DECL_CONTEXT (iter_var
) = current_function_decl
;
7159 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7160 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7161 ctx
->block_vars
= iter_var
;
7163 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7164 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7165 OMP_CLAUSE_DECL (c3
) = iter_var
;
7166 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7167 OMP_CLAUSE_CHAIN (c2
) = c3
;
7168 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7170 tree v
= create_tmp_var_raw (iter_type
);
7171 DECL_CONTEXT (v
) = current_function_decl
;
7172 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7173 DECL_CHAIN (v
) = ctx
->block_vars
;
7174 ctx
->block_vars
= v
;
7175 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7176 ctx
->lastprivate_conditional_map
->put (o
, v
);
7181 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7182 both parallel and workshare constructs. PREDICATE may be NULL if it's
7183 always true. BODY_P is the sequence to insert early initialization
7184 if needed, STMT_LIST is where the non-conditional lastprivate handling
7185 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7189 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7190 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7193 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7194 bool par_clauses
= false;
7195 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7196 unsigned HOST_WIDE_INT conditional_off
= 0;
7197 gimple_seq post_stmt_list
= NULL
;
7199 /* Early exit if there are no lastprivate or linear clauses. */
7200 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7201 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7202 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7203 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7205 if (clauses
== NULL
)
7207 /* If this was a workshare clause, see if it had been combined
7208 with its parallel. In that case, look for the clauses on the
7209 parallel statement itself. */
7210 if (is_parallel_ctx (ctx
))
7214 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7217 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7218 OMP_CLAUSE_LASTPRIVATE
);
7219 if (clauses
== NULL
)
7224 bool maybe_simt
= false;
7225 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7226 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7228 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7229 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7231 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7237 tree label_true
, arm1
, arm2
;
7238 enum tree_code pred_code
= TREE_CODE (predicate
);
7240 label
= create_artificial_label (UNKNOWN_LOCATION
);
7241 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7242 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7244 arm1
= TREE_OPERAND (predicate
, 0);
7245 arm2
= TREE_OPERAND (predicate
, 1);
7246 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7247 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7252 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7253 arm2
= boolean_false_node
;
7254 pred_code
= NE_EXPR
;
7258 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7259 c
= fold_convert (integer_type_node
, c
);
7260 simtcond
= create_tmp_var (integer_type_node
);
7261 gimplify_assign (simtcond
, c
, stmt_list
);
7262 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7264 c
= create_tmp_var (integer_type_node
);
7265 gimple_call_set_lhs (g
, c
);
7266 gimple_seq_add_stmt (stmt_list
, g
);
7267 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7271 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7272 gimple_seq_add_stmt (stmt_list
, stmt
);
7273 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7276 tree cond_ptr
= NULL_TREE
;
7277 for (c
= clauses
; c
;)
7280 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7281 gimple_seq
*this_stmt_list
= stmt_list
;
7282 tree lab2
= NULL_TREE
;
7284 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7285 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7286 && ctx
->lastprivate_conditional_map
7287 && !ctx
->combined_into_simd_safelen1
)
7289 gcc_assert (body_p
);
7292 if (cond_ptr
== NULL_TREE
)
7294 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7295 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7297 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7298 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7299 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7300 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7301 this_stmt_list
= cstmt_list
;
7303 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7305 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7306 build_int_cst (TREE_TYPE (cond_ptr
),
7308 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7311 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7312 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7313 tree mem2
= copy_node (mem
);
7314 gimple_seq seq
= NULL
;
7315 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7316 gimple_seq_add_seq (this_stmt_list
, seq
);
7317 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7318 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7319 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7320 gimple_seq_add_stmt (this_stmt_list
, g
);
7321 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7322 gimplify_assign (mem2
, v
, this_stmt_list
);
7325 && ctx
->combined_into_simd_safelen1
7326 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7327 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7328 && ctx
->lastprivate_conditional_map
)
7329 this_stmt_list
= &post_stmt_list
;
7331 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7332 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7333 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7335 var
= OMP_CLAUSE_DECL (c
);
7336 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7337 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7338 && is_taskloop_ctx (ctx
))
7340 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7341 new_var
= lookup_decl (var
, ctx
->outer
);
7345 new_var
= lookup_decl (var
, ctx
);
7346 /* Avoid uninitialized warnings for lastprivate and
7347 for linear iterators. */
7349 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7350 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7351 suppress_warning (new_var
, OPT_Wuninitialized
);
7354 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7356 tree val
= DECL_VALUE_EXPR (new_var
);
7357 if (TREE_CODE (val
) == ARRAY_REF
7358 && VAR_P (TREE_OPERAND (val
, 0))
7359 && lookup_attribute ("omp simd array",
7360 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7363 if (lastlane
== NULL
)
7365 lastlane
= create_tmp_var (unsigned_type_node
);
7367 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7369 TREE_OPERAND (val
, 1));
7370 gimple_call_set_lhs (g
, lastlane
);
7371 gimple_seq_add_stmt (this_stmt_list
, g
);
7373 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7374 TREE_OPERAND (val
, 0), lastlane
,
7375 NULL_TREE
, NULL_TREE
);
7376 TREE_THIS_NOTRAP (new_var
) = 1;
7379 else if (maybe_simt
)
7381 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7382 ? DECL_VALUE_EXPR (new_var
)
7384 if (simtlast
== NULL
)
7386 simtlast
= create_tmp_var (unsigned_type_node
);
7387 gcall
*g
= gimple_build_call_internal
7388 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7389 gimple_call_set_lhs (g
, simtlast
);
7390 gimple_seq_add_stmt (this_stmt_list
, g
);
7392 x
= build_call_expr_internal_loc
7393 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7394 TREE_TYPE (val
), 2, val
, simtlast
);
7395 new_var
= unshare_expr (new_var
);
7396 gimplify_assign (new_var
, x
, this_stmt_list
);
7397 new_var
= unshare_expr (new_var
);
7400 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7401 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7403 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7404 gimple_seq_add_seq (this_stmt_list
,
7405 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7406 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7408 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7409 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7411 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7412 gimple_seq_add_seq (this_stmt_list
,
7413 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7414 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7418 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7419 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7420 && is_taskloop_ctx (ctx
))
7422 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7424 if (is_global_var (ovar
))
7428 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7429 if (omp_privatize_by_reference (var
))
7430 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7431 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7432 gimplify_and_add (x
, this_stmt_list
);
7435 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7439 c
= OMP_CLAUSE_CHAIN (c
);
7440 if (c
== NULL
&& !par_clauses
)
7442 /* If this was a workshare clause, see if it had been combined
7443 with its parallel. In that case, continue looking for the
7444 clauses also on the parallel statement itself. */
7445 if (is_parallel_ctx (ctx
))
7449 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7452 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7453 OMP_CLAUSE_LASTPRIVATE
);
7459 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7460 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7463 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7464 (which might be a placeholder). INNER is true if this is an inner
7465 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7466 join markers. Generate the before-loop forking sequence in
7467 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7468 general form of these sequences is
7470 GOACC_REDUCTION_SETUP
7472 GOACC_REDUCTION_INIT
7474 GOACC_REDUCTION_FINI
7476 GOACC_REDUCTION_TEARDOWN. */
7479 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7480 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7481 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7484 gimple_seq before_fork
= NULL
;
7485 gimple_seq after_fork
= NULL
;
7486 gimple_seq before_join
= NULL
;
7487 gimple_seq after_join
= NULL
;
7488 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7489 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7490 unsigned offset
= 0;
7492 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7493 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7495 /* No 'reduction' clauses on OpenACC 'kernels'. */
7496 gcc_checking_assert (!is_oacc_kernels (ctx
));
7497 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7498 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7500 tree orig
= OMP_CLAUSE_DECL (c
);
7501 tree var
= maybe_lookup_decl (orig
, ctx
);
7502 tree ref_to_res
= NULL_TREE
;
7503 tree incoming
, outgoing
, v1
, v2
, v3
;
7504 bool is_private
= false;
7506 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7507 if (rcode
== MINUS_EXPR
)
7509 else if (rcode
== TRUTH_ANDIF_EXPR
)
7510 rcode
= BIT_AND_EXPR
;
7511 else if (rcode
== TRUTH_ORIF_EXPR
)
7512 rcode
= BIT_IOR_EXPR
;
7513 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7518 incoming
= outgoing
= var
;
7522 /* See if an outer construct also reduces this variable. */
7523 omp_context
*outer
= ctx
;
7525 while (omp_context
*probe
= outer
->outer
)
7527 enum gimple_code type
= gimple_code (probe
->stmt
);
7532 case GIMPLE_OMP_FOR
:
7533 cls
= gimple_omp_for_clauses (probe
->stmt
);
7536 case GIMPLE_OMP_TARGET
:
7537 /* No 'reduction' clauses inside OpenACC 'kernels'
7539 gcc_checking_assert (!is_oacc_kernels (probe
));
7541 if (!is_gimple_omp_offloaded (probe
->stmt
))
7544 cls
= gimple_omp_target_clauses (probe
->stmt
);
7552 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7553 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7554 && orig
== OMP_CLAUSE_DECL (cls
))
7556 incoming
= outgoing
= lookup_decl (orig
, probe
);
7557 goto has_outer_reduction
;
7559 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7560 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7561 && orig
== OMP_CLAUSE_DECL (cls
))
7569 /* This is the outermost construct with this reduction,
7570 see if there's a mapping for it. */
7571 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7572 && maybe_lookup_field (orig
, outer
) && !is_private
)
7574 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7575 if (omp_privatize_by_reference (orig
))
7576 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7578 tree type
= TREE_TYPE (var
);
7579 if (POINTER_TYPE_P (type
))
7580 type
= TREE_TYPE (type
);
7583 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7587 /* Try to look at enclosing contexts for reduction var,
7588 use original if no mapping found. */
7590 omp_context
*c
= ctx
->outer
;
7593 t
= maybe_lookup_decl (orig
, c
);
7596 incoming
= outgoing
= (t
? t
: orig
);
7599 has_outer_reduction
:;
7603 ref_to_res
= integer_zero_node
;
7605 if (omp_privatize_by_reference (orig
))
7607 tree type
= TREE_TYPE (var
);
7608 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7612 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7613 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7616 v1
= create_tmp_var (type
, id
);
7617 v2
= create_tmp_var (type
, id
);
7618 v3
= create_tmp_var (type
, id
);
7620 gimplify_assign (v1
, var
, fork_seq
);
7621 gimplify_assign (v2
, var
, fork_seq
);
7622 gimplify_assign (v3
, var
, fork_seq
);
7624 var
= build_simple_mem_ref (var
);
7625 v1
= build_simple_mem_ref (v1
);
7626 v2
= build_simple_mem_ref (v2
);
7627 v3
= build_simple_mem_ref (v3
);
7628 outgoing
= build_simple_mem_ref (outgoing
);
7630 if (!TREE_CONSTANT (incoming
))
7631 incoming
= build_simple_mem_ref (incoming
);
7634 /* Note that 'var' might be a mem ref. */
7637 /* Determine position in reduction buffer, which may be used
7638 by target. The parser has ensured that this is not a
7639 variable-sized type. */
7640 fixed_size_mode mode
7641 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7642 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7643 offset
= (offset
+ align
- 1) & ~(align
- 1);
7644 tree off
= build_int_cst (sizetype
, offset
);
7645 offset
+= GET_MODE_SIZE (mode
);
7649 init_code
= build_int_cst (integer_type_node
,
7650 IFN_GOACC_REDUCTION_INIT
);
7651 fini_code
= build_int_cst (integer_type_node
,
7652 IFN_GOACC_REDUCTION_FINI
);
7653 setup_code
= build_int_cst (integer_type_node
,
7654 IFN_GOACC_REDUCTION_SETUP
);
7655 teardown_code
= build_int_cst (integer_type_node
,
7656 IFN_GOACC_REDUCTION_TEARDOWN
);
7660 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7661 TREE_TYPE (var
), 6, setup_code
,
7662 unshare_expr (ref_to_res
),
7663 unshare_expr (incoming
),
7666 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7667 TREE_TYPE (var
), 6, init_code
,
7668 unshare_expr (ref_to_res
),
7669 unshare_expr (v1
), level
, op
, off
);
7671 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7672 TREE_TYPE (var
), 6, fini_code
,
7673 unshare_expr (ref_to_res
),
7674 unshare_expr (v2
), level
, op
, off
);
7676 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7677 TREE_TYPE (var
), 6, teardown_code
,
7678 ref_to_res
, unshare_expr (v3
),
7681 gimplify_assign (unshare_expr (v1
), setup_call
, &before_fork
);
7682 gimplify_assign (unshare_expr (v2
), init_call
, &after_fork
);
7683 gimplify_assign (unshare_expr (v3
), fini_call
, &before_join
);
7684 gimplify_assign (unshare_expr (outgoing
), teardown_call
, &after_join
);
7687 /* Now stitch things together. */
7688 gimple_seq_add_seq (fork_seq
, before_fork
);
7690 gimple_seq_add_stmt (fork_seq
, private_marker
);
7692 gimple_seq_add_stmt (fork_seq
, fork
);
7693 gimple_seq_add_seq (fork_seq
, after_fork
);
7695 gimple_seq_add_seq (join_seq
, before_join
);
7697 gimple_seq_add_stmt (join_seq
, join
);
7698 gimple_seq_add_seq (join_seq
, after_join
);
7701 /* Generate code to implement the REDUCTION clauses, append it
7702 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7703 that should be emitted also inside of the critical section,
7704 in that case clear *CLIST afterwards, otherwise leave it as is
7705 and let the caller emit it itself. */
7708 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7709 gimple_seq
*clist
, omp_context
*ctx
)
7711 gimple_seq sub_seq
= NULL
;
7716 /* OpenACC loop reductions are handled elsewhere. */
7717 if (is_gimple_omp_oacc (ctx
->stmt
))
7720 /* SIMD reductions are handled in lower_rec_input_clauses. */
7721 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7722 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7725 /* inscan reductions are handled elsewhere. */
7726 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7729 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7730 update in that case, otherwise use a lock. */
7731 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7732 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7733 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7735 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7736 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7738 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7748 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7750 tree var
, ref
, new_var
, orig_var
;
7751 enum tree_code code
;
7752 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7754 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7755 || OMP_CLAUSE_REDUCTION_TASK (c
))
7758 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7759 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7760 if (TREE_CODE (var
) == MEM_REF
)
7762 var
= TREE_OPERAND (var
, 0);
7763 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7764 var
= TREE_OPERAND (var
, 0);
7765 if (TREE_CODE (var
) == ADDR_EXPR
)
7766 var
= TREE_OPERAND (var
, 0);
7769 /* If this is a pointer or referenced based array
7770 section, the var could be private in the outer
7771 context e.g. on orphaned loop construct. Pretend this
7772 is private variable's outer reference. */
7773 ccode
= OMP_CLAUSE_PRIVATE
;
7774 if (TREE_CODE (var
) == INDIRECT_REF
)
7775 var
= TREE_OPERAND (var
, 0);
7778 if (is_variable_sized (var
))
7780 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7781 var
= DECL_VALUE_EXPR (var
);
7782 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7783 var
= TREE_OPERAND (var
, 0);
7784 gcc_assert (DECL_P (var
));
7787 new_var
= lookup_decl (var
, ctx
);
7788 if (var
== OMP_CLAUSE_DECL (c
)
7789 && omp_privatize_by_reference (var
))
7790 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7791 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7792 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7794 /* reduction(-:var) sums up the partial results, so it acts
7795 identically to reduction(+:var). */
7796 if (code
== MINUS_EXPR
)
7799 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7802 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7804 addr
= save_expr (addr
);
7805 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7806 tree new_var2
= new_var
;
7810 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7811 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7812 boolean_type_node
, new_var
, zero
);
7813 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7816 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7819 x
= fold_convert (TREE_TYPE (new_var
), x
);
7820 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7821 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7822 gimplify_and_add (x
, stmt_seqp
);
7825 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7827 tree d
= OMP_CLAUSE_DECL (c
);
7828 tree type
= TREE_TYPE (d
);
7829 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7830 tree i
= create_tmp_var (TREE_TYPE (v
));
7831 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7832 tree bias
= TREE_OPERAND (d
, 1);
7833 d
= TREE_OPERAND (d
, 0);
7834 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7836 tree b
= TREE_OPERAND (d
, 1);
7837 b
= maybe_lookup_decl (b
, ctx
);
7840 b
= TREE_OPERAND (d
, 1);
7841 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7843 if (integer_zerop (bias
))
7847 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7848 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7849 TREE_TYPE (b
), b
, bias
);
7851 d
= TREE_OPERAND (d
, 0);
7853 /* For ref build_outer_var_ref already performs this, so
7854 only new_var needs a dereference. */
7855 if (TREE_CODE (d
) == INDIRECT_REF
)
7857 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7858 gcc_assert (omp_privatize_by_reference (var
)
7859 && var
== orig_var
);
7861 else if (TREE_CODE (d
) == ADDR_EXPR
)
7863 if (orig_var
== var
)
7865 new_var
= build_fold_addr_expr (new_var
);
7866 ref
= build_fold_addr_expr (ref
);
7871 gcc_assert (orig_var
== var
);
7872 if (omp_privatize_by_reference (var
))
7873 ref
= build_fold_addr_expr (ref
);
7877 tree t
= maybe_lookup_decl (v
, ctx
);
7881 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7882 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7884 if (!integer_zerop (bias
))
7886 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7887 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7888 TREE_TYPE (new_var
), new_var
,
7889 unshare_expr (bias
));
7890 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7891 TREE_TYPE (ref
), ref
, bias
);
7893 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7894 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7895 tree m
= create_tmp_var (ptype
);
7896 gimplify_assign (m
, new_var
, stmt_seqp
);
7898 m
= create_tmp_var (ptype
);
7899 gimplify_assign (m
, ref
, stmt_seqp
);
7901 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7902 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7903 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7904 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7905 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7906 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7907 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7909 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7910 tree decl_placeholder
7911 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7912 SET_DECL_VALUE_EXPR (placeholder
, out
);
7913 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7914 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7915 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7916 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7917 gimple_seq_add_seq (&sub_seq
,
7918 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7919 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7920 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7921 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7929 tree zero
= build_zero_cst (TREE_TYPE (out
));
7930 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7931 boolean_type_node
, out
, zero
);
7932 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7933 boolean_type_node
, priv
, zero
);
7935 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7937 x
= fold_convert (TREE_TYPE (out
), x
);
7938 out
= unshare_expr (out
);
7939 gimplify_assign (out
, x
, &sub_seq
);
7941 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7942 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7943 gimple_seq_add_stmt (&sub_seq
, g
);
7944 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7945 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7946 gimple_seq_add_stmt (&sub_seq
, g
);
7947 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7948 build_int_cst (TREE_TYPE (i
), 1));
7949 gimple_seq_add_stmt (&sub_seq
, g
);
7950 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7951 gimple_seq_add_stmt (&sub_seq
, g
);
7952 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7954 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7956 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7958 if (omp_privatize_by_reference (var
)
7959 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7961 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7962 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7963 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7964 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7965 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7966 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7967 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7971 tree new_var2
= new_var
;
7975 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7976 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7977 boolean_type_node
, new_var
, zero
);
7978 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7981 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7983 x
= fold_convert (TREE_TYPE (new_var
), x
);
7984 ref
= build_outer_var_ref (var
, ctx
);
7985 gimplify_assign (ref
, x
, &sub_seq
);
7989 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7991 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7993 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7997 gimple_seq_add_seq (stmt_seqp
, *clist
);
8001 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
8003 gimple_seq_add_stmt (stmt_seqp
, stmt
);
8007 /* Generate code to implement the COPYPRIVATE clauses. */
8010 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
8015 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8017 tree var
, new_var
, ref
, x
;
8019 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8021 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
8024 var
= OMP_CLAUSE_DECL (c
);
8025 by_ref
= use_pointer_for_field (var
, NULL
);
8027 ref
= build_sender_ref (var
, ctx
);
8028 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
8031 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
8032 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
8034 gimplify_assign (ref
, x
, slist
);
8036 ref
= build_receiver_ref (var
, false, ctx
);
8039 ref
= fold_convert_loc (clause_loc
,
8040 build_pointer_type (TREE_TYPE (new_var
)),
8042 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
8044 if (omp_privatize_by_reference (var
))
8046 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
8047 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
8048 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8050 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
8051 gimplify_and_add (x
, rlist
);
8056 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8057 and REDUCTION from the sender (aka parent) side. */
8060 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
8064 int ignored_looptemp
= 0;
8065 bool is_taskloop
= false;
8067 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8068 by GOMP_taskloop. */
8069 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
8071 ignored_looptemp
= 2;
8075 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8077 tree val
, ref
, x
, var
;
8078 bool by_ref
, do_in
= false, do_out
= false;
8079 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8081 switch (OMP_CLAUSE_CODE (c
))
8083 case OMP_CLAUSE_PRIVATE
:
8084 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8087 case OMP_CLAUSE_FIRSTPRIVATE
:
8088 case OMP_CLAUSE_COPYIN
:
8089 case OMP_CLAUSE_LASTPRIVATE
:
8090 case OMP_CLAUSE_IN_REDUCTION
:
8091 case OMP_CLAUSE__REDUCTEMP_
:
8093 case OMP_CLAUSE_REDUCTION
:
8094 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
8097 case OMP_CLAUSE_SHARED
:
8098 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8101 case OMP_CLAUSE__LOOPTEMP_
:
8102 if (ignored_looptemp
)
8112 val
= OMP_CLAUSE_DECL (c
);
8113 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8114 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8115 && TREE_CODE (val
) == MEM_REF
)
8117 val
= TREE_OPERAND (val
, 0);
8118 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8119 val
= TREE_OPERAND (val
, 0);
8120 if (TREE_CODE (val
) == INDIRECT_REF
8121 || TREE_CODE (val
) == ADDR_EXPR
)
8122 val
= TREE_OPERAND (val
, 0);
8123 if (is_variable_sized (val
))
8127 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8128 outer taskloop region. */
8129 omp_context
*ctx_for_o
= ctx
;
8131 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8132 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8133 ctx_for_o
= ctx
->outer
;
8135 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8137 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8138 && is_global_var (var
)
8139 && (val
== OMP_CLAUSE_DECL (c
)
8140 || !is_task_ctx (ctx
)
8141 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8142 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8143 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8144 != POINTER_TYPE
)))))
8147 t
= omp_member_access_dummy_var (var
);
8150 var
= DECL_VALUE_EXPR (var
);
8151 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8153 var
= unshare_and_remap (var
, t
, o
);
8155 var
= unshare_expr (var
);
8158 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8160 /* Handle taskloop firstprivate/lastprivate, where the
8161 lastprivate on GIMPLE_OMP_TASK is represented as
8162 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8163 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8164 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8165 if (use_pointer_for_field (val
, ctx
))
8166 var
= build_fold_addr_expr (var
);
8167 gimplify_assign (x
, var
, ilist
);
8168 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8172 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8173 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8174 || val
== OMP_CLAUSE_DECL (c
))
8175 && is_variable_sized (val
))
8177 by_ref
= use_pointer_for_field (val
, NULL
);
8179 switch (OMP_CLAUSE_CODE (c
))
8181 case OMP_CLAUSE_FIRSTPRIVATE
:
8182 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8184 && is_task_ctx (ctx
))
8185 suppress_warning (var
);
8189 case OMP_CLAUSE_PRIVATE
:
8190 case OMP_CLAUSE_COPYIN
:
8191 case OMP_CLAUSE__LOOPTEMP_
:
8192 case OMP_CLAUSE__REDUCTEMP_
:
8196 case OMP_CLAUSE_LASTPRIVATE
:
8197 if (by_ref
|| omp_privatize_by_reference (val
))
8199 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8206 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8211 case OMP_CLAUSE_REDUCTION
:
8212 case OMP_CLAUSE_IN_REDUCTION
:
8214 if (val
== OMP_CLAUSE_DECL (c
))
8216 if (is_task_ctx (ctx
))
8217 by_ref
= use_pointer_for_field (val
, ctx
);
8219 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8222 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8231 ref
= build_sender_ref (val
, ctx
);
8232 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8233 gimplify_assign (ref
, x
, ilist
);
8234 if (is_task_ctx (ctx
))
8235 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8240 ref
= build_sender_ref (val
, ctx
);
8241 gimplify_assign (var
, ref
, olist
);
8246 /* Generate code to implement SHARED from the sender (aka parent)
8247 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8248 list things that got automatically shared. */
8251 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8253 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8255 if (ctx
->record_type
== NULL
)
8258 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8259 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8261 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8262 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8265 nvar
= maybe_lookup_decl (ovar
, ctx
);
8267 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8268 || (ctx
->allocate_map
8269 && ctx
->allocate_map
->get (ovar
)))
8272 /* If CTX is a nested parallel directive. Find the immediately
8273 enclosing parallel or workshare construct that contains a
8274 mapping for OVAR. */
8275 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8277 t
= omp_member_access_dummy_var (var
);
8280 var
= DECL_VALUE_EXPR (var
);
8281 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8283 var
= unshare_and_remap (var
, t
, o
);
8285 var
= unshare_expr (var
);
8288 if (use_pointer_for_field (ovar
, ctx
))
8290 x
= build_sender_ref (ovar
, ctx
);
8291 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8292 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8294 gcc_assert (is_parallel_ctx (ctx
)
8295 && DECL_ARTIFICIAL (ovar
));
8296 /* _condtemp_ clause. */
8297 var
= build_constructor (TREE_TYPE (x
), NULL
);
8300 var
= build_fold_addr_expr (var
);
8301 gimplify_assign (x
, var
, ilist
);
8305 x
= build_sender_ref (ovar
, ctx
);
8306 gimplify_assign (x
, var
, ilist
);
8308 if (!TREE_READONLY (var
)
8309 /* We don't need to receive a new reference to a result
8310 or parm decl. In fact we may not store to it as we will
8311 invalidate any pending RSO and generate wrong gimple
8313 && !((TREE_CODE (var
) == RESULT_DECL
8314 || TREE_CODE (var
) == PARM_DECL
)
8315 && DECL_BY_REFERENCE (var
)))
8317 x
= build_sender_ref (ovar
, ctx
);
8318 gimplify_assign (var
, x
, olist
);
8324 /* Emit an OpenACC head marker call, encapulating the partitioning and
8325 other information that must be processed by the target compiler.
8326 Return the maximum number of dimensions the associated loop might
8327 be partitioned over. */
8330 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8331 gimple_seq
*seq
, omp_context
*ctx
)
8333 unsigned levels
= 0;
8335 tree gang_static
= NULL_TREE
;
8336 auto_vec
<tree
, 5> args
;
8338 args
.quick_push (build_int_cst
8339 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8340 args
.quick_push (ddvar
);
8341 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8343 switch (OMP_CLAUSE_CODE (c
))
8345 case OMP_CLAUSE_GANG
:
8346 tag
|= OLF_DIM_GANG
;
8347 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8348 /* static:* is represented by -1, and we can ignore it, as
8349 scheduling is always static. */
8350 if (gang_static
&& integer_minus_onep (gang_static
))
8351 gang_static
= NULL_TREE
;
8355 case OMP_CLAUSE_WORKER
:
8356 tag
|= OLF_DIM_WORKER
;
8360 case OMP_CLAUSE_VECTOR
:
8361 tag
|= OLF_DIM_VECTOR
;
8365 case OMP_CLAUSE_SEQ
:
8369 case OMP_CLAUSE_AUTO
:
8373 case OMP_CLAUSE_INDEPENDENT
:
8374 tag
|= OLF_INDEPENDENT
;
8377 case OMP_CLAUSE_TILE
:
8381 case OMP_CLAUSE_REDUCTION
:
8382 tag
|= OLF_REDUCTION
;
8392 if (DECL_P (gang_static
))
8393 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8394 tag
|= OLF_GANG_STATIC
;
8397 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8398 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8400 else if (is_oacc_kernels (tgt
))
8401 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8403 else if (is_oacc_kernels_decomposed_part (tgt
))
8408 /* In a parallel region, loops are implicitly INDEPENDENT. */
8409 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8410 tag
|= OLF_INDEPENDENT
;
8412 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8413 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8414 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8416 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8417 gcc_assert (!(tag
& OLF_AUTO
));
8421 /* Tiling could use all 3 levels. */
8425 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8426 Ensure at least one level, or 2 for possible auto
8428 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8429 << OLF_DIM_BASE
) | OLF_SEQ
));
8431 if (levels
< 1u + maybe_auto
)
8432 levels
= 1u + maybe_auto
;
8435 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8436 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8438 args
.quick_push (gang_static
);
8440 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8441 gimple_set_location (call
, loc
);
8442 gimple_set_lhs (call
, ddvar
);
8443 gimple_seq_add_stmt (seq
, call
);
8448 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8449 partitioning level of the enclosed region. */
8452 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8453 tree tofollow
, gimple_seq
*seq
)
8455 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8456 : IFN_UNIQUE_OACC_TAIL_MARK
);
8457 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8458 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8459 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8460 marker
, ddvar
, tofollow
);
8461 gimple_set_location (call
, loc
);
8462 gimple_set_lhs (call
, ddvar
);
8463 gimple_seq_add_stmt (seq
, call
);
8466 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8467 the loop clauses, from which we extract reductions. Initialize
8471 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8472 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8475 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8476 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8478 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8482 gimple_set_location (private_marker
, loc
);
8483 gimple_call_set_lhs (private_marker
, ddvar
);
8484 gimple_call_set_arg (private_marker
, 1, ddvar
);
8487 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8488 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8491 for (unsigned done
= 1; count
; count
--, done
++)
8493 gimple_seq fork_seq
= NULL
;
8494 gimple_seq join_seq
= NULL
;
8496 tree place
= build_int_cst (integer_type_node
, -1);
8497 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8498 fork_kind
, ddvar
, place
);
8499 gimple_set_location (fork
, loc
);
8500 gimple_set_lhs (fork
, ddvar
);
8502 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8503 join_kind
, ddvar
, place
);
8504 gimple_set_location (join
, loc
);
8505 gimple_set_lhs (join
, ddvar
);
8507 /* Mark the beginning of this level sequence. */
8509 lower_oacc_loop_marker (loc
, ddvar
, true,
8510 build_int_cst (integer_type_node
, count
),
8512 lower_oacc_loop_marker (loc
, ddvar
, false,
8513 build_int_cst (integer_type_node
, done
),
8516 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8517 fork
, (count
== 1) ? private_marker
: NULL
,
8518 join
, &fork_seq
, &join_seq
, ctx
);
8520 /* Append this level to head. */
8521 gimple_seq_add_seq (head
, fork_seq
);
8522 /* Prepend it to tail. */
8523 gimple_seq_add_seq (&join_seq
, *tail
);
8529 /* Mark the end of the sequence. */
8530 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8531 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8534 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8535 catch handler and return it. This prevents programs from violating the
8536 structured block semantics with throws. */
8539 maybe_catch_exception (gimple_seq body
)
8544 if (!flag_exceptions
)
8547 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8548 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8550 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8552 g
= gimple_build_eh_must_not_throw (decl
);
8553 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8556 return gimple_seq_alloc_with_stmt (g
);
8560 /* Routines to lower OMP directives into OMP-GIMPLE. */
8562 /* If ctx is a worksharing context inside of a cancellable parallel
8563 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8564 and conditional branch to parallel's cancel_label to handle
8565 cancellation in the implicit barrier. */
8568 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8571 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8572 if (gimple_omp_return_nowait_p (omp_return
))
8574 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8575 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8576 && outer
->cancellable
)
8578 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8579 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8580 tree lhs
= create_tmp_var (c_bool_type
);
8581 gimple_omp_return_set_lhs (omp_return
, lhs
);
8582 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8583 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8584 fold_convert (c_bool_type
,
8585 boolean_false_node
),
8586 outer
->cancel_label
, fallthru_label
);
8587 gimple_seq_add_stmt (body
, g
);
8588 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8590 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8591 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8595 /* Find the first task_reduction or reduction clause or return NULL
8596 if there are none. */
8599 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8600 enum omp_clause_code ccode
)
8604 clauses
= omp_find_clause (clauses
, ccode
);
8605 if (clauses
== NULL_TREE
)
8607 if (ccode
!= OMP_CLAUSE_REDUCTION
8608 || code
== OMP_TASKLOOP
8609 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8611 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8615 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8616 gimple_seq
*, gimple_seq
*);
8618 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8619 CTX is the enclosing OMP context for the current statement. */
8622 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8624 tree block
, control
;
8625 gimple_stmt_iterator tgsi
;
8626 gomp_sections
*stmt
;
8628 gbind
*new_stmt
, *bind
;
8629 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8631 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8633 push_gimplify_context ();
8639 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8640 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8641 tree rtmp
= NULL_TREE
;
8644 tree type
= build_pointer_type (pointer_sized_int_node
);
8645 tree temp
= create_tmp_var (type
);
8646 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8647 OMP_CLAUSE_DECL (c
) = temp
;
8648 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8649 gimple_omp_sections_set_clauses (stmt
, c
);
8650 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8651 gimple_omp_sections_clauses (stmt
),
8652 &ilist
, &tred_dlist
);
8654 rtmp
= make_ssa_name (type
);
8655 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8658 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8659 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8661 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8662 &ilist
, &dlist
, ctx
, NULL
);
8664 control
= create_tmp_var (unsigned_type_node
, ".section");
8665 gimple_omp_sections_set_control (stmt
, control
);
8667 new_body
= gimple_omp_body (stmt
);
8668 gimple_omp_set_body (stmt
, NULL
);
8669 tgsi
= gsi_start (new_body
);
8670 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8675 sec_start
= gsi_stmt (tgsi
);
8676 sctx
= maybe_lookup_ctx (sec_start
);
8679 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8680 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8681 GSI_CONTINUE_LINKING
);
8682 gimple_omp_set_body (sec_start
, NULL
);
8684 if (gsi_one_before_end_p (tgsi
))
8686 gimple_seq l
= NULL
;
8687 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8688 &ilist
, &l
, &clist
, ctx
);
8689 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8690 gimple_omp_section_set_last (sec_start
);
8693 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8694 GSI_CONTINUE_LINKING
);
8697 block
= make_node (BLOCK
);
8698 bind
= gimple_build_bind (NULL
, new_body
, block
);
8701 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8705 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8706 gcall
*g
= gimple_build_call (fndecl
, 0);
8707 gimple_seq_add_stmt (&olist
, g
);
8708 gimple_seq_add_seq (&olist
, clist
);
8709 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8710 g
= gimple_build_call (fndecl
, 0);
8711 gimple_seq_add_stmt (&olist
, g
);
8714 block
= make_node (BLOCK
);
8715 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8716 gsi_replace (gsi_p
, new_stmt
, true);
8718 pop_gimplify_context (new_stmt
);
8719 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8720 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8721 if (BLOCK_VARS (block
))
8722 TREE_USED (block
) = 1;
8725 gimple_seq_add_seq (&new_body
, ilist
);
8726 gimple_seq_add_stmt (&new_body
, stmt
);
8727 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8728 gimple_seq_add_stmt (&new_body
, bind
);
8730 t
= gimple_build_omp_continue (control
, control
);
8731 gimple_seq_add_stmt (&new_body
, t
);
8733 gimple_seq_add_seq (&new_body
, olist
);
8734 if (ctx
->cancellable
)
8735 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8736 gimple_seq_add_seq (&new_body
, dlist
);
8738 new_body
= maybe_catch_exception (new_body
);
8740 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8741 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8742 t
= gimple_build_omp_return (nowait
);
8743 gimple_seq_add_stmt (&new_body
, t
);
8744 gimple_seq_add_seq (&new_body
, tred_dlist
);
8745 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8748 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8750 gimple_bind_set_body (new_stmt
, new_body
);
8754 /* A subroutine of lower_omp_single. Expand the simple form of
8755 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8757 if (GOMP_single_start ())
8759 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8761 FIXME. It may be better to delay expanding the logic of this until
8762 pass_expand_omp. The expanded logic may make the job more difficult
8763 to a synchronization analysis pass. */
8766 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8768 location_t loc
= gimple_location (single_stmt
);
8769 tree tlabel
= create_artificial_label (loc
);
8770 tree flabel
= create_artificial_label (loc
);
8771 gimple
*call
, *cond
;
8774 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8775 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8776 call
= gimple_build_call (decl
, 0);
8777 gimple_call_set_lhs (call
, lhs
);
8778 gimple_seq_add_stmt (pre_p
, call
);
8780 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8781 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8784 gimple_seq_add_stmt (pre_p
, cond
);
8785 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8786 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8787 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8791 /* A subroutine of lower_omp_single. Expand the simple form of
8792 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8794 #pragma omp single copyprivate (a, b, c)
8796 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8799 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8805 GOMP_single_copy_end (©out);
8816 FIXME. It may be better to delay expanding the logic of this until
8817 pass_expand_omp. The expanded logic may make the job more difficult
8818 to a synchronization analysis pass. */
8821 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8824 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8825 gimple_seq copyin_seq
;
8826 location_t loc
= gimple_location (single_stmt
);
8828 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8830 ptr_type
= build_pointer_type (ctx
->record_type
);
8831 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8833 l0
= create_artificial_label (loc
);
8834 l1
= create_artificial_label (loc
);
8835 l2
= create_artificial_label (loc
);
8837 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8838 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8839 t
= fold_convert_loc (loc
, ptr_type
, t
);
8840 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8842 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8843 build_int_cst (ptr_type
, 0));
8844 t
= build3 (COND_EXPR
, void_type_node
, t
,
8845 build_and_jump (&l0
), build_and_jump (&l1
));
8846 gimplify_and_add (t
, pre_p
);
8848 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8850 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8853 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8856 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8857 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8858 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8859 gimplify_and_add (t
, pre_p
);
8861 t
= build_and_jump (&l2
);
8862 gimplify_and_add (t
, pre_p
);
8864 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8866 gimple_seq_add_seq (pre_p
, copyin_seq
);
8868 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8872 /* Expand code for an OpenMP single directive. */
8875 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8878 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8880 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8882 push_gimplify_context ();
8884 block
= make_node (BLOCK
);
8885 bind
= gimple_build_bind (NULL
, NULL
, block
);
8886 gsi_replace (gsi_p
, bind
, true);
8889 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8890 &bind_body
, &dlist
, ctx
, NULL
);
8891 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8893 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8895 if (ctx
->record_type
)
8896 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8898 lower_omp_single_simple (single_stmt
, &bind_body
);
8900 gimple_omp_set_body (single_stmt
, NULL
);
8902 gimple_seq_add_seq (&bind_body
, dlist
);
8904 bind_body
= maybe_catch_exception (bind_body
);
8906 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8907 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8908 gimple
*g
= gimple_build_omp_return (nowait
);
8909 gimple_seq_add_stmt (&bind_body_tail
, g
);
8910 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8911 if (ctx
->record_type
)
8913 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8914 tree clobber
= build_clobber (ctx
->record_type
);
8915 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8916 clobber
), GSI_SAME_STMT
);
8918 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8919 gimple_bind_set_body (bind
, bind_body
);
8921 pop_gimplify_context (bind
);
8923 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8924 BLOCK_VARS (block
) = ctx
->block_vars
;
8925 if (BLOCK_VARS (block
))
8926 TREE_USED (block
) = 1;
8930 /* Lower code for an OMP scope directive. */
8933 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8936 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8938 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8939 gimple_seq tred_dlist
= NULL
;
8941 push_gimplify_context ();
8943 block
= make_node (BLOCK
);
8944 bind
= gimple_build_bind (NULL
, NULL
, block
);
8945 gsi_replace (gsi_p
, bind
, true);
8950 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8951 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8954 tree type
= build_pointer_type (pointer_sized_int_node
);
8955 tree temp
= create_tmp_var (type
);
8956 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8957 OMP_CLAUSE_DECL (c
) = temp
;
8958 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8959 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8960 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8961 gimple_omp_scope_clauses (scope_stmt
),
8962 &bind_body
, &tred_dlist
);
8964 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8965 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8966 gimple_seq_add_stmt (&bind_body
, stmt
);
8969 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8970 &bind_body
, &dlist
, ctx
, NULL
);
8971 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8973 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8975 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8977 gimple_omp_set_body (scope_stmt
, NULL
);
8979 gimple_seq clist
= NULL
;
8980 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8981 &bind_body
, &clist
, ctx
);
8984 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8985 gcall
*g
= gimple_build_call (fndecl
, 0);
8986 gimple_seq_add_stmt (&bind_body
, g
);
8987 gimple_seq_add_seq (&bind_body
, clist
);
8988 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8989 g
= gimple_build_call (fndecl
, 0);
8990 gimple_seq_add_stmt (&bind_body
, g
);
8993 gimple_seq_add_seq (&bind_body
, dlist
);
8995 bind_body
= maybe_catch_exception (bind_body
);
8997 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8998 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8999 gimple
*g
= gimple_build_omp_return (nowait
);
9000 gimple_seq_add_stmt (&bind_body_tail
, g
);
9001 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
9002 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
9003 if (ctx
->record_type
)
9005 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
9006 tree clobber
= build_clobber (ctx
->record_type
);
9007 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
9008 clobber
), GSI_SAME_STMT
);
9010 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
9012 gimple_bind_set_body (bind
, bind_body
);
9014 pop_gimplify_context (bind
);
9016 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9017 BLOCK_VARS (block
) = ctx
->block_vars
;
9018 if (BLOCK_VARS (block
))
9019 TREE_USED (block
) = 1;
9021 /* Expand code for an OpenMP master or masked directive. */
9024 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9026 tree block
, lab
= NULL
, x
, bfn_decl
;
9027 gimple
*stmt
= gsi_stmt (*gsi_p
);
9029 location_t loc
= gimple_location (stmt
);
9031 tree filter
= integer_zero_node
;
9033 push_gimplify_context ();
9035 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
9037 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
9040 filter
= fold_convert (integer_type_node
,
9041 OMP_CLAUSE_FILTER_EXPR (filter
));
9043 filter
= integer_zero_node
;
9045 block
= make_node (BLOCK
);
9046 bind
= gimple_build_bind (NULL
, NULL
, block
);
9047 gsi_replace (gsi_p
, bind
, true);
9048 gimple_bind_add_stmt (bind
, stmt
);
9050 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9051 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
9052 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
9053 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
9055 gimplify_and_add (x
, &tseq
);
9056 gimple_bind_add_seq (bind
, tseq
);
9058 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9059 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9060 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9061 gimple_omp_set_body (stmt
, NULL
);
9063 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
9065 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9067 pop_gimplify_context (bind
);
9069 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9070 BLOCK_VARS (block
) = ctx
->block_vars
;
9073 /* Helper function for lower_omp_task_reductions. For a specific PASS
9074 find out the current clause it should be processed, or return false
9075 if all have been processed already. */
9078 omp_task_reduction_iterate (int pass
, enum tree_code code
,
9079 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
9080 tree
*type
, tree
*next
)
9082 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
9084 if (ccode
== OMP_CLAUSE_REDUCTION
9085 && code
!= OMP_TASKLOOP
9086 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
9088 *decl
= OMP_CLAUSE_DECL (*c
);
9089 *type
= TREE_TYPE (*decl
);
9090 if (TREE_CODE (*decl
) == MEM_REF
)
9097 if (omp_privatize_by_reference (*decl
))
9098 *type
= TREE_TYPE (*type
);
9099 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
9102 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9111 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9112 OMP_TASKGROUP only with task modifier). Register mapping of those in
9113 START sequence and reducing them and unregister them in the END sequence. */
9116 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9117 gimple_seq
*start
, gimple_seq
*end
)
9119 enum omp_clause_code ccode
9120 = (code
== OMP_TASKGROUP
9121 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9122 tree cancellable
= NULL_TREE
;
9123 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9124 if (clauses
== NULL_TREE
)
9126 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9128 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9129 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9130 && outer
->cancellable
)
9132 cancellable
= error_mark_node
;
9135 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9136 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9139 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9140 tree
*last
= &TYPE_FIELDS (record_type
);
9144 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9146 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9149 DECL_CHAIN (field
) = ifield
;
9150 last
= &DECL_CHAIN (ifield
);
9151 DECL_CONTEXT (field
) = record_type
;
9152 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9153 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9154 DECL_CONTEXT (ifield
) = record_type
;
9155 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9156 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9158 for (int pass
= 0; pass
< 2; pass
++)
9160 tree decl
, type
, next
;
9161 for (tree c
= clauses
;
9162 omp_task_reduction_iterate (pass
, code
, ccode
,
9163 &c
, &decl
, &type
, &next
); c
= next
)
9166 tree new_type
= type
;
9168 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9170 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9171 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9173 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9175 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9176 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9177 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9180 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9181 DECL_CONTEXT (field
) = record_type
;
9182 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9183 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9185 last
= &DECL_CHAIN (field
);
9187 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9189 DECL_CONTEXT (bfield
) = record_type
;
9190 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9191 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9193 last
= &DECL_CHAIN (bfield
);
9197 layout_type (record_type
);
9199 /* Build up an array which registers with the runtime all the reductions
9200 and deregisters them at the end. Format documented in libgomp/task.c. */
9201 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9202 tree avar
= create_tmp_var_raw (atype
);
9203 gimple_add_tmp_var (avar
);
9204 TREE_ADDRESSABLE (avar
) = 1;
9205 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9206 NULL_TREE
, NULL_TREE
);
9207 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9208 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9209 gimple_seq seq
= NULL
;
9210 tree sz
= fold_convert (pointer_sized_int_node
,
9211 TYPE_SIZE_UNIT (record_type
));
9213 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9214 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9215 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9216 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9217 ctx
->task_reductions
.create (1 + cnt
);
9218 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9219 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9221 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9222 gimple_seq_add_seq (start
, seq
);
9223 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9224 NULL_TREE
, NULL_TREE
);
9225 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9226 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9227 NULL_TREE
, NULL_TREE
);
9228 t
= build_int_cst (pointer_sized_int_node
,
9229 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9230 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9231 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9232 NULL_TREE
, NULL_TREE
);
9233 t
= build_int_cst (pointer_sized_int_node
, -1);
9234 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9235 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9236 NULL_TREE
, NULL_TREE
);
9237 t
= build_int_cst (pointer_sized_int_node
, 0);
9238 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9240 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9241 and for each task reduction checks a bool right after the private variable
9242 within that thread's chunk; if the bool is clear, it hasn't been
9243 initialized and thus isn't going to be reduced nor destructed, otherwise
9244 reduce and destruct it. */
9245 tree idx
= create_tmp_var (size_type_node
);
9246 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9247 tree num_thr_sz
= create_tmp_var (size_type_node
);
9248 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9249 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9250 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9252 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9254 /* For worksharing constructs or scope, only perform it in the master
9255 thread, with the exception of cancelled implicit barriers - then only
9256 handle the current thread. */
9257 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9258 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9259 tree thr_num
= create_tmp_var (integer_type_node
);
9260 g
= gimple_build_call (t
, 0);
9261 gimple_call_set_lhs (g
, thr_num
);
9262 gimple_seq_add_stmt (end
, g
);
9266 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9267 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9268 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9269 if (code
== OMP_FOR
)
9270 c
= gimple_omp_for_clauses (ctx
->stmt
);
9271 else if (code
== OMP_SECTIONS
)
9272 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9273 else /* if (code == OMP_SCOPE) */
9274 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9275 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9277 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9279 gimple_seq_add_stmt (end
, g
);
9280 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9281 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9282 gimple_seq_add_stmt (end
, g
);
9283 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9284 build_one_cst (TREE_TYPE (idx
)));
9285 gimple_seq_add_stmt (end
, g
);
9286 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9287 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9289 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9290 gimple_seq_add_stmt (end
, g
);
9291 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9293 if (code
!= OMP_PARALLEL
)
9295 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9296 tree num_thr
= create_tmp_var (integer_type_node
);
9297 g
= gimple_build_call (t
, 0);
9298 gimple_call_set_lhs (g
, num_thr
);
9299 gimple_seq_add_stmt (end
, g
);
9300 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9301 gimple_seq_add_stmt (end
, g
);
9303 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9307 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9308 OMP_CLAUSE__REDUCTEMP_
);
9309 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9310 t
= fold_convert (size_type_node
, t
);
9311 gimplify_assign (num_thr_sz
, t
, end
);
9313 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9314 NULL_TREE
, NULL_TREE
);
9315 tree data
= create_tmp_var (pointer_sized_int_node
);
9316 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9317 if (code
== OMP_TASKLOOP
)
9319 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9320 g
= gimple_build_cond (NE_EXPR
, data
,
9321 build_zero_cst (pointer_sized_int_node
),
9323 gimple_seq_add_stmt (end
, g
);
9325 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9327 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9328 ptr
= create_tmp_var (build_pointer_type (record_type
));
9330 ptr
= create_tmp_var (ptr_type_node
);
9331 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9333 tree field
= TYPE_FIELDS (record_type
);
9336 field
= DECL_CHAIN (DECL_CHAIN (field
));
9337 for (int pass
= 0; pass
< 2; pass
++)
9339 tree decl
, type
, next
;
9340 for (tree c
= clauses
;
9341 omp_task_reduction_iterate (pass
, code
, ccode
,
9342 &c
, &decl
, &type
, &next
); c
= next
)
9344 tree var
= decl
, ref
;
9345 if (TREE_CODE (decl
) == MEM_REF
)
9347 var
= TREE_OPERAND (var
, 0);
9348 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9349 var
= TREE_OPERAND (var
, 0);
9351 if (TREE_CODE (var
) == ADDR_EXPR
)
9352 var
= TREE_OPERAND (var
, 0);
9353 else if (TREE_CODE (var
) == INDIRECT_REF
)
9354 var
= TREE_OPERAND (var
, 0);
9355 tree orig_var
= var
;
9356 if (is_variable_sized (var
))
9358 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9359 var
= DECL_VALUE_EXPR (var
);
9360 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9361 var
= TREE_OPERAND (var
, 0);
9362 gcc_assert (DECL_P (var
));
9364 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9365 if (orig_var
!= var
)
9366 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9367 else if (TREE_CODE (v
) == ADDR_EXPR
)
9368 t
= build_fold_addr_expr (t
);
9369 else if (TREE_CODE (v
) == INDIRECT_REF
)
9370 t
= build_fold_indirect_ref (t
);
9371 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9373 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9374 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9375 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9377 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9378 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9379 fold_convert (size_type_node
,
9380 TREE_OPERAND (decl
, 1)));
9384 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9385 if (!omp_privatize_by_reference (decl
))
9386 t
= build_fold_addr_expr (t
);
9388 t
= fold_convert (pointer_sized_int_node
, t
);
9390 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9391 gimple_seq_add_seq (start
, seq
);
9392 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9393 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9394 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9395 t
= unshare_expr (byte_position (field
));
9396 t
= fold_convert (pointer_sized_int_node
, t
);
9397 ctx
->task_reduction_map
->put (c
, cnt
);
9398 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9401 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9402 gimple_seq_add_seq (start
, seq
);
9403 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9404 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9405 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9407 tree bfield
= DECL_CHAIN (field
);
9409 if (code
== OMP_PARALLEL
9411 || code
== OMP_SECTIONS
9412 || code
== OMP_SCOPE
)
9413 /* In parallel, worksharing or scope all threads unconditionally
9414 initialize all their task reduction private variables. */
9415 cond
= boolean_true_node
;
9416 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9418 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9419 unshare_expr (byte_position (bfield
)));
9421 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9422 gimple_seq_add_seq (end
, seq
);
9423 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9424 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9425 build_int_cst (pbool
, 0));
9428 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9429 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9430 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9431 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9432 tree condv
= create_tmp_var (boolean_type_node
);
9433 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9434 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9436 gimple_seq_add_stmt (end
, g
);
9437 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9438 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9440 /* If this reduction doesn't need destruction and parallel
9441 has been cancelled, there is nothing to do for this
9442 reduction, so jump around the merge operation. */
9443 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9444 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9445 build_zero_cst (TREE_TYPE (cancellable
)),
9447 gimple_seq_add_stmt (end
, g
);
9448 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9452 if (TREE_TYPE (ptr
) == ptr_type_node
)
9454 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9455 unshare_expr (byte_position (field
)));
9457 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9458 gimple_seq_add_seq (end
, seq
);
9459 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9460 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9461 build_int_cst (pbool
, 0));
9464 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9465 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9467 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9468 if (TREE_CODE (decl
) != MEM_REF
9469 && omp_privatize_by_reference (decl
))
9470 ref
= build_simple_mem_ref (ref
);
9471 /* reduction(-:var) sums up the partial results, so it acts
9472 identically to reduction(+:var). */
9473 if (rcode
== MINUS_EXPR
)
9475 if (TREE_CODE (decl
) == MEM_REF
)
9477 tree type
= TREE_TYPE (new_var
);
9478 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9479 tree i
= create_tmp_var (TREE_TYPE (v
));
9480 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9483 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9484 tree vv
= create_tmp_var (TREE_TYPE (v
));
9485 gimplify_assign (vv
, v
, start
);
9488 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9489 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9490 new_var
= build_fold_addr_expr (new_var
);
9491 new_var
= fold_convert (ptype
, new_var
);
9492 ref
= fold_convert (ptype
, ref
);
9493 tree m
= create_tmp_var (ptype
);
9494 gimplify_assign (m
, new_var
, end
);
9496 m
= create_tmp_var (ptype
);
9497 gimplify_assign (m
, ref
, end
);
9499 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9500 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9501 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9502 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9503 tree priv
= build_simple_mem_ref (new_var
);
9504 tree out
= build_simple_mem_ref (ref
);
9505 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9507 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9508 tree decl_placeholder
9509 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9510 tree lab6
= NULL_TREE
;
9513 /* If this reduction needs destruction and parallel
9514 has been cancelled, jump around the merge operation
9515 to the destruction. */
9516 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9517 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9518 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9519 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9521 gimple_seq_add_stmt (end
, g
);
9522 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9524 SET_DECL_VALUE_EXPR (placeholder
, out
);
9525 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9526 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9527 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9528 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9529 gimple_seq_add_seq (end
,
9530 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9531 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9532 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9534 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9535 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9538 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9539 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9542 gimple_seq tseq
= NULL
;
9543 gimplify_stmt (&x
, &tseq
);
9544 gimple_seq_add_seq (end
, tseq
);
9549 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9550 out
= unshare_expr (out
);
9551 gimplify_assign (out
, x
, end
);
9554 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9555 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9556 gimple_seq_add_stmt (end
, g
);
9557 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9558 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9559 gimple_seq_add_stmt (end
, g
);
9560 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9561 build_int_cst (TREE_TYPE (i
), 1));
9562 gimple_seq_add_stmt (end
, g
);
9563 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9564 gimple_seq_add_stmt (end
, g
);
9565 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9567 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9569 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9570 tree oldv
= NULL_TREE
;
9571 tree lab6
= NULL_TREE
;
9574 /* If this reduction needs destruction and parallel
9575 has been cancelled, jump around the merge operation
9576 to the destruction. */
9577 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9578 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9579 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9580 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9582 gimple_seq_add_stmt (end
, g
);
9583 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9585 if (omp_privatize_by_reference (decl
)
9586 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9588 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9589 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9590 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9591 gimplify_assign (refv
, ref
, end
);
9592 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9593 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9594 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9595 tree d
= maybe_lookup_decl (decl
, ctx
);
9597 if (DECL_HAS_VALUE_EXPR_P (d
))
9598 oldv
= DECL_VALUE_EXPR (d
);
9599 if (omp_privatize_by_reference (var
))
9601 tree v
= fold_convert (TREE_TYPE (d
),
9602 build_fold_addr_expr (new_var
));
9603 SET_DECL_VALUE_EXPR (d
, v
);
9606 SET_DECL_VALUE_EXPR (d
, new_var
);
9607 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9608 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9610 SET_DECL_VALUE_EXPR (d
, oldv
);
9613 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9614 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9616 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9617 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9618 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9619 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9621 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9622 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9625 gimple_seq tseq
= NULL
;
9626 gimplify_stmt (&x
, &tseq
);
9627 gimple_seq_add_seq (end
, tseq
);
9632 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9633 ref
= unshare_expr (ref
);
9634 gimplify_assign (ref
, x
, end
);
9636 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9638 field
= DECL_CHAIN (bfield
);
9642 if (code
== OMP_TASKGROUP
)
9644 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9645 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9646 gimple_seq_add_stmt (start
, g
);
9651 if (code
== OMP_FOR
)
9652 c
= gimple_omp_for_clauses (ctx
->stmt
);
9653 else if (code
== OMP_SECTIONS
)
9654 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9655 else if (code
== OMP_SCOPE
)
9656 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9658 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9659 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9660 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9661 build_fold_addr_expr (avar
));
9662 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9665 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9666 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9668 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9669 gimple_seq_add_stmt (end
, g
);
9670 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9671 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9673 enum built_in_function bfn
9674 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9675 t
= builtin_decl_explicit (bfn
);
9676 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9680 arg
= create_tmp_var (c_bool_type
);
9681 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9685 arg
= build_int_cst (c_bool_type
, 0);
9686 g
= gimple_build_call (t
, 1, arg
);
9690 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9691 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9693 gimple_seq_add_stmt (end
, g
);
9695 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9696 t
= build_constructor (atype
, NULL
);
9697 TREE_THIS_VOLATILE (t
) = 1;
9698 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9701 /* Expand code for an OpenMP taskgroup directive. */
9704 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9706 gimple
*stmt
= gsi_stmt (*gsi_p
);
9709 gimple_seq dseq
= NULL
;
9710 tree block
= make_node (BLOCK
);
9712 bind
= gimple_build_bind (NULL
, NULL
, block
);
9713 gsi_replace (gsi_p
, bind
, true);
9714 gimple_bind_add_stmt (bind
, stmt
);
9716 push_gimplify_context ();
9718 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9720 gimple_bind_add_stmt (bind
, x
);
9722 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9723 gimple_omp_taskgroup_clauses (stmt
),
9724 gimple_bind_body_ptr (bind
), &dseq
);
9726 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9727 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9728 gimple_omp_set_body (stmt
, NULL
);
9730 gimple_bind_add_seq (bind
, dseq
);
9732 pop_gimplify_context (bind
);
9734 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9735 BLOCK_VARS (block
) = ctx
->block_vars
;
9739 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9742 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9745 struct omp_for_data fd
;
9746 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9749 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9750 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9751 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9755 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9756 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9757 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
9758 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
9760 /* Merge depend clauses from multiple adjacent
9761 #pragma omp ordered depend(sink:...) constructs
9762 into one #pragma omp ordered depend(sink:...), so that
9763 we can optimize them together. */
9764 gimple_stmt_iterator gsi
= *gsi_p
;
9766 while (!gsi_end_p (gsi
))
9768 gimple
*stmt
= gsi_stmt (gsi
);
9769 if (is_gimple_debug (stmt
)
9770 || gimple_code (stmt
) == GIMPLE_NOP
)
9775 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9777 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9778 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9780 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
9781 || OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9784 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9786 gsi_remove (&gsi
, true);
9790 /* Canonicalize sink dependence clauses into one folded clause if
9793 The basic algorithm is to create a sink vector whose first
9794 element is the GCD of all the first elements, and whose remaining
9795 elements are the minimum of the subsequent columns.
9797 We ignore dependence vectors whose first element is zero because
9798 such dependencies are known to be executed by the same thread.
9800 We take into account the direction of the loop, so a minimum
9801 becomes a maximum if the loop is iterating forwards. We also
9802 ignore sink clauses where the loop direction is unknown, or where
9803 the offsets are clearly invalid because they are not a multiple
9804 of the loop increment.
9808 #pragma omp for ordered(2)
9809 for (i=0; i < N; ++i)
9810 for (j=0; j < M; ++j)
9812 #pragma omp ordered \
9813 depend(sink:i-8,j-2) \
9814 depend(sink:i,j-1) \ // Completely ignored because i+0.
9815 depend(sink:i-4,j-3) \
9816 depend(sink:i-6,j-4)
9817 #pragma omp ordered depend(source)
9822 depend(sink:-gcd(8,4,6),-min(2,3,4))
9827 /* FIXME: Computing GCD's where the first element is zero is
9828 non-trivial in the presence of collapsed loops. Do this later. */
9829 if (fd
.collapse
> 1)
9832 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9834 /* wide_int is not a POD so it must be default-constructed. */
9835 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9836 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9838 tree folded_dep
= NULL_TREE
;
9839 /* TRUE if the first dimension's offset is negative. */
9840 bool neg_offset_p
= false;
9842 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9844 while ((c
= *list_p
) != NULL
)
9846 bool remove
= false;
9848 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
);
9849 if (OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9850 goto next_ordered_clause
;
9853 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9854 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9855 vec
= TREE_CHAIN (vec
), ++i
)
9857 gcc_assert (i
< len
);
9859 /* omp_extract_for_data has canonicalized the condition. */
9860 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9861 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9862 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9863 bool maybe_lexically_later
= true;
9865 /* While the committee makes up its mind, bail if we have any
9866 non-constant steps. */
9867 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9868 goto lower_omp_ordered_ret
;
9870 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9871 if (POINTER_TYPE_P (itype
))
9873 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9874 TYPE_PRECISION (itype
),
9877 /* Ignore invalid offsets that are not multiples of the step. */
9878 if (!wi::multiple_of_p (wi::abs (offset
),
9879 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9882 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9883 "ignoring sink clause with offset that is not "
9884 "a multiple of the loop step");
9886 goto next_ordered_clause
;
9889 /* Calculate the first dimension. The first dimension of
9890 the folded dependency vector is the GCD of the first
9891 elements, while ignoring any first elements whose offset
9895 /* Ignore dependence vectors whose first dimension is 0. */
9899 goto next_ordered_clause
;
9903 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9905 error_at (OMP_CLAUSE_LOCATION (c
),
9906 "first offset must be in opposite direction "
9907 "of loop iterations");
9908 goto lower_omp_ordered_ret
;
9912 neg_offset_p
= forward
;
9913 /* Initialize the first time around. */
9914 if (folded_dep
== NULL_TREE
)
9917 folded_deps
[0] = offset
;
9920 folded_deps
[0] = wi::gcd (folded_deps
[0],
9924 /* Calculate minimum for the remaining dimensions. */
9927 folded_deps
[len
+ i
- 1] = offset
;
9928 if (folded_dep
== c
)
9929 folded_deps
[i
] = offset
;
9930 else if (maybe_lexically_later
9931 && !wi::eq_p (folded_deps
[i
], offset
))
9933 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9937 for (j
= 1; j
<= i
; j
++)
9938 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9941 maybe_lexically_later
= false;
9945 gcc_assert (i
== len
);
9949 next_ordered_clause
:
9951 *list_p
= OMP_CLAUSE_CHAIN (c
);
9953 list_p
= &OMP_CLAUSE_CHAIN (c
);
9959 folded_deps
[0] = -folded_deps
[0];
9961 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9962 if (POINTER_TYPE_P (itype
))
9965 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9966 = wide_int_to_tree (itype
, folded_deps
[0]);
9967 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9968 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9971 lower_omp_ordered_ret
:
9973 /* Ordered without clauses is #pragma omp threads, while we want
9974 a nop instead if we remove all clauses. */
9975 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9976 gsi_replace (gsi_p
, gimple_build_nop (), true);
9980 /* Expand code for an OpenMP ordered directive. */
9983 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9986 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9987 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9990 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9992 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9995 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9996 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9997 OMP_CLAUSE_THREADS
);
9999 if (gimple_omp_ordered_standalone_p (ord_stmt
))
10001 /* FIXME: This is needs to be moved to the expansion to verify various
10002 conditions only testable on cfg with dominators computed, and also
10003 all the depend clauses to be merged still might need to be available
10004 for the runtime checks. */
10006 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
10010 push_gimplify_context ();
10012 block
= make_node (BLOCK
);
10013 bind
= gimple_build_bind (NULL
, NULL
, block
);
10014 gsi_replace (gsi_p
, bind
, true);
10015 gimple_bind_add_stmt (bind
, stmt
);
10019 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
10020 build_int_cst (NULL_TREE
, threads
));
10021 cfun
->has_simduid_loops
= true;
10024 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
10026 gimple_bind_add_stmt (bind
, x
);
10028 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
10031 counter
= create_tmp_var (integer_type_node
);
10032 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
10033 gimple_call_set_lhs (g
, counter
);
10034 gimple_bind_add_stmt (bind
, g
);
10036 body
= create_artificial_label (UNKNOWN_LOCATION
);
10037 test
= create_artificial_label (UNKNOWN_LOCATION
);
10038 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
10040 tree simt_pred
= create_tmp_var (integer_type_node
);
10041 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
10042 gimple_call_set_lhs (g
, simt_pred
);
10043 gimple_bind_add_stmt (bind
, g
);
10045 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
10046 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
10047 gimple_bind_add_stmt (bind
, g
);
10049 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
10051 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10052 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10053 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10054 gimple_omp_set_body (stmt
, NULL
);
10058 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
10059 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
10060 gimple_bind_add_stmt (bind
, g
);
10062 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
10063 tree nonneg
= create_tmp_var (integer_type_node
);
10064 gimple_seq tseq
= NULL
;
10065 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
10066 gimple_bind_add_seq (bind
, tseq
);
10068 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
10069 gimple_call_set_lhs (g
, nonneg
);
10070 gimple_bind_add_stmt (bind
, g
);
10072 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
10073 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
10074 gimple_bind_add_stmt (bind
, g
);
10076 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
10079 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
10080 build_int_cst (NULL_TREE
, threads
));
10082 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
10084 gimple_bind_add_stmt (bind
, x
);
10086 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10088 pop_gimplify_context (bind
);
10090 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10091 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10095 /* Expand code for an OpenMP scan directive and the structured block
10096 before the scan directive. */
10099 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10101 gimple
*stmt
= gsi_stmt (*gsi_p
);
10103 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
10104 tree lane
= NULL_TREE
;
10105 gimple_seq before
= NULL
;
10106 omp_context
*octx
= ctx
->outer
;
10108 if (octx
->scan_exclusive
&& !has_clauses
)
10110 gimple_stmt_iterator gsi2
= *gsi_p
;
10112 gimple
*stmt2
= gsi_stmt (gsi2
);
10113 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10114 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10115 the one with exclusive clause(s), comes first. */
10117 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10118 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10120 gsi_remove (gsi_p
, false);
10121 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10122 ctx
= maybe_lookup_ctx (stmt2
);
10124 lower_omp_scan (gsi_p
, ctx
);
10129 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10130 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10131 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10132 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10133 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10134 && !gimple_omp_for_combined_p (octx
->stmt
));
10135 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10136 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10139 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10140 OMP_CLAUSE__SIMDUID_
))
10142 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10143 lane
= create_tmp_var (unsigned_type_node
);
10144 tree t
= build_int_cst (integer_type_node
,
10146 : octx
->scan_inclusive
? 2 : 3);
10148 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10149 gimple_call_set_lhs (g
, lane
);
10150 gimple_seq_add_stmt (&before
, g
);
10153 if (is_simd
|| is_for
)
10155 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10156 c
; c
= OMP_CLAUSE_CHAIN (c
))
10157 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10158 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10160 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10161 tree var
= OMP_CLAUSE_DECL (c
);
10162 tree new_var
= lookup_decl (var
, octx
);
10163 tree val
= new_var
;
10164 tree var2
= NULL_TREE
;
10165 tree var3
= NULL_TREE
;
10166 tree var4
= NULL_TREE
;
10167 tree lane0
= NULL_TREE
;
10168 tree new_vard
= new_var
;
10169 if (omp_privatize_by_reference (var
))
10171 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10174 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10176 val
= DECL_VALUE_EXPR (new_vard
);
10177 if (new_vard
!= new_var
)
10179 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10180 val
= TREE_OPERAND (val
, 0);
10182 if (TREE_CODE (val
) == ARRAY_REF
10183 && VAR_P (TREE_OPERAND (val
, 0)))
10185 tree v
= TREE_OPERAND (val
, 0);
10186 if (lookup_attribute ("omp simd array",
10187 DECL_ATTRIBUTES (v
)))
10189 val
= unshare_expr (val
);
10190 lane0
= TREE_OPERAND (val
, 1);
10191 TREE_OPERAND (val
, 1) = lane
;
10192 var2
= lookup_decl (v
, octx
);
10193 if (octx
->scan_exclusive
)
10194 var4
= lookup_decl (var2
, octx
);
10196 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10197 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10200 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10201 var2
, lane
, NULL_TREE
, NULL_TREE
);
10202 TREE_THIS_NOTRAP (var2
) = 1;
10203 if (octx
->scan_exclusive
)
10205 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10206 var4
, lane
, NULL_TREE
,
10208 TREE_THIS_NOTRAP (var4
) = 1;
10219 var2
= build_outer_var_ref (var
, octx
);
10220 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10222 var3
= maybe_lookup_decl (new_vard
, octx
);
10223 if (var3
== new_vard
|| var3
== NULL_TREE
)
10225 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10227 var4
= maybe_lookup_decl (var3
, octx
);
10228 if (var4
== var3
|| var4
== NULL_TREE
)
10230 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10241 && octx
->scan_exclusive
10243 && var4
== NULL_TREE
)
10244 var4
= create_tmp_var (TREE_TYPE (val
));
10246 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10248 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10253 /* If we've added a separate identity element
10254 variable, copy it over into val. */
10255 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10257 gimplify_and_add (x
, &before
);
10259 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10261 /* Otherwise, assign to it the identity element. */
10262 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10264 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10265 tree ref
= build_outer_var_ref (var
, octx
);
10266 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10267 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10270 if (new_vard
!= new_var
)
10271 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10272 SET_DECL_VALUE_EXPR (new_vard
, val
);
10274 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10275 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10276 lower_omp (&tseq
, octx
);
10278 SET_DECL_VALUE_EXPR (new_vard
, x
);
10279 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10280 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10281 gimple_seq_add_seq (&before
, tseq
);
10283 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10289 if (octx
->scan_exclusive
)
10291 tree v4
= unshare_expr (var4
);
10292 tree v2
= unshare_expr (var2
);
10293 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10294 gimplify_and_add (x
, &before
);
10296 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10297 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10298 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10300 if (x
&& new_vard
!= new_var
)
10301 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10303 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10304 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10305 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10306 lower_omp (&tseq
, octx
);
10307 gimple_seq_add_seq (&before
, tseq
);
10308 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10310 SET_DECL_VALUE_EXPR (new_vard
, x
);
10311 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10312 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10313 if (octx
->scan_inclusive
)
10315 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10317 gimplify_and_add (x
, &before
);
10319 else if (lane0
== NULL_TREE
)
10321 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10323 gimplify_and_add (x
, &before
);
10331 /* input phase. Set val to initializer before
10333 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10334 gimplify_assign (val
, x
, &before
);
10339 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10340 if (code
== MINUS_EXPR
)
10343 tree x
= build2 (code
, TREE_TYPE (var2
),
10344 unshare_expr (var2
), unshare_expr (val
));
10345 if (octx
->scan_inclusive
)
10347 gimplify_assign (unshare_expr (var2
), x
, &before
);
10348 gimplify_assign (val
, var2
, &before
);
10352 gimplify_assign (unshare_expr (var4
),
10353 unshare_expr (var2
), &before
);
10354 gimplify_assign (var2
, x
, &before
);
10355 if (lane0
== NULL_TREE
)
10356 gimplify_assign (val
, var4
, &before
);
10360 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10362 tree vexpr
= unshare_expr (var4
);
10363 TREE_OPERAND (vexpr
, 1) = lane0
;
10364 if (new_vard
!= new_var
)
10365 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10366 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10370 if (is_simd
&& !is_for_simd
)
10372 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10373 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10374 gsi_replace (gsi_p
, gimple_build_nop (), true);
10377 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10380 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (stmt
));
10381 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10386 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10387 substitution of a couple of function calls. But in the NAMED case,
10388 requires that languages coordinate a symbol name. It is therefore
10389 best put here in common code. */
10391 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10394 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10397 tree name
, lock
, unlock
;
10398 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10400 location_t loc
= gimple_location (stmt
);
10403 name
= gimple_omp_critical_name (stmt
);
10408 if (!critical_name_mutexes
)
10409 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10411 tree
*n
= critical_name_mutexes
->get (name
);
10416 decl
= create_tmp_var_raw (ptr_type_node
);
10418 new_str
= ACONCAT ((".gomp_critical_user_",
10419 IDENTIFIER_POINTER (name
), NULL
));
10420 DECL_NAME (decl
) = get_identifier (new_str
);
10421 TREE_PUBLIC (decl
) = 1;
10422 TREE_STATIC (decl
) = 1;
10423 DECL_COMMON (decl
) = 1;
10424 DECL_ARTIFICIAL (decl
) = 1;
10425 DECL_IGNORED_P (decl
) = 1;
10427 varpool_node::finalize_decl (decl
);
10429 critical_name_mutexes
->put (name
, decl
);
10434 /* If '#pragma omp critical' is inside offloaded region or
10435 inside function marked as offloadable, the symbol must be
10436 marked as offloadable too. */
10438 if (cgraph_node::get (current_function_decl
)->offloadable
)
10439 varpool_node::get_create (decl
)->offloadable
= 1;
10441 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10442 if (is_gimple_omp_offloaded (octx
->stmt
))
10444 varpool_node::get_create (decl
)->offloadable
= 1;
10448 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10449 lock
= build_call_expr_loc (loc
, lock
, 1,
10450 build_fold_addr_expr_loc (loc
, decl
));
10452 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10453 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10454 build_fold_addr_expr_loc (loc
, decl
));
10458 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10459 lock
= build_call_expr_loc (loc
, lock
, 0);
10461 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10462 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10465 push_gimplify_context ();
10467 block
= make_node (BLOCK
);
10468 bind
= gimple_build_bind (NULL
, NULL
, block
);
10469 gsi_replace (gsi_p
, bind
, true);
10470 gimple_bind_add_stmt (bind
, stmt
);
10472 tbody
= gimple_bind_body (bind
);
10473 gimplify_and_add (lock
, &tbody
);
10474 gimple_bind_set_body (bind
, tbody
);
10476 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10477 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10478 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10479 gimple_omp_set_body (stmt
, NULL
);
10481 tbody
= gimple_bind_body (bind
);
10482 gimplify_and_add (unlock
, &tbody
);
10483 gimple_bind_set_body (bind
, tbody
);
10485 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10487 pop_gimplify_context (bind
);
10488 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10489 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10492 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10493 for a lastprivate clause. Given a loop control predicate of (V
10494 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10495 is appended to *DLIST, iterator initialization is appended to
10496 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10497 to be emitted in a critical section. */
10500 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10501 gimple_seq
*dlist
, gimple_seq
*clist
,
10502 struct omp_context
*ctx
)
10504 tree clauses
, cond
, vinit
;
10505 enum tree_code cond_code
;
10508 cond_code
= fd
->loop
.cond_code
;
10509 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10511 /* When possible, use a strict equality expression. This can let VRP
10512 type optimizations deduce the value and remove a copy. */
10513 if (tree_fits_shwi_p (fd
->loop
.step
))
10515 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10516 if (step
== 1 || step
== -1)
10517 cond_code
= EQ_EXPR
;
10520 tree n2
= fd
->loop
.n2
;
10521 if (fd
->collapse
> 1
10522 && TREE_CODE (n2
) != INTEGER_CST
10523 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10525 struct omp_context
*taskreg_ctx
= NULL
;
10526 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10528 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10529 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10530 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10532 if (gimple_omp_for_combined_into_p (gfor
))
10534 gcc_assert (ctx
->outer
->outer
10535 && is_parallel_ctx (ctx
->outer
->outer
));
10536 taskreg_ctx
= ctx
->outer
->outer
;
10540 struct omp_for_data outer_fd
;
10541 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10542 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10545 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10546 taskreg_ctx
= ctx
->outer
->outer
;
10548 else if (is_taskreg_ctx (ctx
->outer
))
10549 taskreg_ctx
= ctx
->outer
;
10553 tree taskreg_clauses
10554 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10555 tree innerc
= omp_find_clause (taskreg_clauses
,
10556 OMP_CLAUSE__LOOPTEMP_
);
10557 gcc_assert (innerc
);
10558 int count
= fd
->collapse
;
10560 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10561 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10562 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10564 for (i
= 0; i
< count
; i
++)
10566 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10567 OMP_CLAUSE__LOOPTEMP_
);
10568 gcc_assert (innerc
);
10570 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10571 OMP_CLAUSE__LOOPTEMP_
);
10573 n2
= fold_convert (TREE_TYPE (n2
),
10574 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10578 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10580 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10582 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10583 if (!gimple_seq_empty_p (stmts
))
10585 gimple_seq_add_seq (&stmts
, *dlist
);
10588 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10589 vinit
= fd
->loop
.n1
;
10590 if (cond_code
== EQ_EXPR
10591 && tree_fits_shwi_p (fd
->loop
.n2
)
10592 && ! integer_zerop (fd
->loop
.n2
))
10593 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10595 vinit
= unshare_expr (vinit
);
10597 /* Initialize the iterator variable, so that threads that don't execute
10598 any iterations don't execute the lastprivate clauses by accident. */
10599 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10603 /* OpenACC privatization.
10605 Or, in other words, *sharing* at the respective OpenACC level of
10608 From a correctness perspective, a non-addressable variable can't be accessed
10609 outside the current thread, so it can go in a (faster than shared memory)
10610 register -- though that register may need to be broadcast in some
10611 circumstances. A variable can only meaningfully be "shared" across workers
10612 or vector lanes if its address is taken, e.g. by a call to an atomic
10615 From an optimisation perspective, the answer might be fuzzier: maybe
10616 sometimes, using shared memory directly would be faster than
10620 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10621 const location_t loc
, const tree c
,
10624 const dump_user_location_t d_u_loc
10625 = dump_user_location_t::from_location_t (loc
);
10626 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10628 # pragma GCC diagnostic push
10629 # pragma GCC diagnostic ignored "-Wformat"
10631 dump_printf_loc (l_dump_flags
, d_u_loc
,
10632 "variable %<%T%> ", decl
);
10634 # pragma GCC diagnostic pop
10637 dump_printf (l_dump_flags
,
10639 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10641 dump_printf (l_dump_flags
,
10642 "declared in block ");
10646 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10649 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10651 /* There is some differentiation depending on block vs. clause. */
10656 if (res
&& !VAR_P (decl
))
10658 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10659 privatized into a new VAR_DECL. */
10660 gcc_checking_assert (TREE_CODE (decl
) != PARM_DECL
);
10664 if (dump_enabled_p ())
10666 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10667 dump_printf (l_dump_flags
,
10668 "potentially has improper OpenACC privatization level: %qs\n",
10669 get_tree_code_name (TREE_CODE (decl
)));
10673 if (res
&& block
&& TREE_STATIC (decl
))
10677 if (dump_enabled_p ())
10679 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10680 dump_printf (l_dump_flags
,
10681 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10686 if (res
&& block
&& DECL_EXTERNAL (decl
))
10690 if (dump_enabled_p ())
10692 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10693 dump_printf (l_dump_flags
,
10694 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10699 if (res
&& !TREE_ADDRESSABLE (decl
))
10703 if (dump_enabled_p ())
10705 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10706 dump_printf (l_dump_flags
,
10707 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10708 "not addressable");
10714 if (dump_enabled_p ())
10716 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10717 dump_printf (l_dump_flags
,
10718 "is candidate for adjusting OpenACC privatization level\n");
10722 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10724 print_generic_decl (dump_file
, decl
, dump_flags
);
10725 fprintf (dump_file
, "\n");
10731 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10735 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10737 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10738 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10740 tree decl
= OMP_CLAUSE_DECL (c
);
10742 tree new_decl
= lookup_decl (decl
, ctx
);
10744 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
,
10748 gcc_checking_assert
10749 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10750 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10754 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10758 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10760 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10762 tree new_decl
= lookup_decl (decl
, ctx
);
10763 gcc_checking_assert (new_decl
== decl
);
10765 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
,
10769 gcc_checking_assert
10770 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10771 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10775 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10778 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10779 struct walk_stmt_info
*wi
)
10781 gimple
*stmt
= gsi_stmt (*gsi_p
);
10783 *handled_ops_p
= true;
10784 switch (gimple_code (stmt
))
10788 case GIMPLE_OMP_FOR
:
10789 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10790 && gimple_omp_for_combined_into_p (stmt
))
10791 *handled_ops_p
= false;
10794 case GIMPLE_OMP_SCAN
:
10795 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10796 return integer_zero_node
;
10803 /* Helper function for lower_omp_for, add transformations for a worksharing
10804 loop with scan directives inside of it.
10805 For worksharing loop not combined with simd, transform:
10806 #pragma omp for reduction(inscan,+:r) private(i)
10807 for (i = 0; i < n; i = i + 1)
10812 #pragma omp scan inclusive(r)
10818 into two worksharing loops + code to merge results:
10820 num_threads = omp_get_num_threads ();
10821 thread_num = omp_get_thread_num ();
10822 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10827 // For UDRs this is UDR init, or if ctors are needed, copy from
10828 // var3 that has been constructed to contain the neutral element.
10832 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10833 // a shared array with num_threads elements and rprivb to a local array
10834 // number of elements equal to the number of (contiguous) iterations the
10835 // current thread will perform. controlb and controlp variables are
10836 // temporaries to handle deallocation of rprivb at the end of second
10838 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10839 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10840 for (i = 0; i < n; i = i + 1)
10843 // For UDRs this is UDR init or copy from var3.
10845 // This is the input phase from user code.
10849 // For UDRs this is UDR merge.
10851 // Rather than handing it over to the user, save to local thread's
10853 rprivb[ivar] = var2;
10854 // For exclusive scan, the above two statements are swapped.
10858 // And remember the final value from this thread's into the shared
10860 rpriva[(sizetype) thread_num] = var2;
10861 // If more than one thread, compute using Work-Efficient prefix sum
10862 // the inclusive parallel scan of the rpriva array.
10863 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10868 num_threadsu = (unsigned int) num_threads;
10869 thread_numup1 = (unsigned int) thread_num + 1;
10872 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10876 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10881 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10882 mul = REALPART_EXPR <cplx>;
10883 ovf = IMAGPART_EXPR <cplx>;
10884 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10887 andvm1 = andv + 4294967295;
10889 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10891 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10892 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10893 rpriva[l] = rpriva[l - k] + rpriva[l];
10895 if (down == 0) goto <D.2121>; else goto <D.2122>;
10903 if (k != 0) goto <D.2108>; else goto <D.2103>;
10905 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10907 // For UDRs this is UDR init or copy from var3.
10911 var2 = rpriva[thread_num - 1];
10914 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10915 reduction(inscan,+:r) private(i)
10916 for (i = 0; i < n; i = i + 1)
10919 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10920 r = var2 + rprivb[ivar];
10923 // This is the scan phase from user code.
10925 // Plus a bump of the iterator.
10931 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10932 struct omp_for_data
*fd
, omp_context
*ctx
)
10934 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10935 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10937 gimple_seq body
= gimple_omp_body (stmt
);
10938 gimple_stmt_iterator input1_gsi
= gsi_none ();
10939 struct walk_stmt_info wi
;
10940 memset (&wi
, 0, sizeof (wi
));
10941 wi
.val_only
= true;
10942 wi
.info
= (void *) &input1_gsi
;
10943 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10944 gcc_assert (!gsi_end_p (input1_gsi
));
10946 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10947 gimple_stmt_iterator gsi
= input1_gsi
;
10949 gimple_stmt_iterator scan1_gsi
= gsi
;
10950 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10951 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10953 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10954 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10955 gimple_omp_set_body (input_stmt1
, NULL
);
10956 gimple_omp_set_body (scan_stmt1
, NULL
);
10957 gimple_omp_set_body (stmt
, NULL
);
10959 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10960 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10961 gimple_omp_set_body (stmt
, body
);
10962 gimple_omp_set_body (input_stmt1
, input_body
);
10964 gimple_stmt_iterator input2_gsi
= gsi_none ();
10965 memset (&wi
, 0, sizeof (wi
));
10966 wi
.val_only
= true;
10967 wi
.info
= (void *) &input2_gsi
;
10968 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10969 gcc_assert (!gsi_end_p (input2_gsi
));
10971 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10974 gimple_stmt_iterator scan2_gsi
= gsi
;
10975 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10976 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10977 gimple_omp_set_body (scan_stmt2
, scan_body
);
10979 gimple_stmt_iterator input3_gsi
= gsi_none ();
10980 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10981 gimple_stmt_iterator input4_gsi
= gsi_none ();
10982 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10983 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10984 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10985 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10988 memset (&wi
, 0, sizeof (wi
));
10989 wi
.val_only
= true;
10990 wi
.info
= (void *) &input3_gsi
;
10991 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10992 gcc_assert (!gsi_end_p (input3_gsi
));
10994 input_stmt3
= gsi_stmt (input3_gsi
);
10998 scan_stmt3
= gsi_stmt (gsi
);
10999 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
11001 memset (&wi
, 0, sizeof (wi
));
11002 wi
.val_only
= true;
11003 wi
.info
= (void *) &input4_gsi
;
11004 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
11005 gcc_assert (!gsi_end_p (input4_gsi
));
11007 input_stmt4
= gsi_stmt (input4_gsi
);
11011 scan_stmt4
= gsi_stmt (gsi
);
11012 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
11014 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
11015 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
11018 tree num_threads
= create_tmp_var (integer_type_node
);
11019 tree thread_num
= create_tmp_var (integer_type_node
);
11020 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
11021 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
11022 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
11023 gimple_call_set_lhs (g
, num_threads
);
11024 gimple_seq_add_stmt (body_p
, g
);
11025 g
= gimple_build_call (threadnum_decl
, 0);
11026 gimple_call_set_lhs (g
, thread_num
);
11027 gimple_seq_add_stmt (body_p
, g
);
11029 tree ivar
= create_tmp_var (sizetype
);
11030 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
11031 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
11032 tree k
= create_tmp_var (unsigned_type_node
);
11033 tree l
= create_tmp_var (unsigned_type_node
);
11035 gimple_seq clist
= NULL
, mdlist
= NULL
;
11036 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
11037 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
11038 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
11039 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
11040 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11041 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11042 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
11044 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11045 tree var
= OMP_CLAUSE_DECL (c
);
11046 tree new_var
= lookup_decl (var
, ctx
);
11047 tree var3
= NULL_TREE
;
11048 tree new_vard
= new_var
;
11049 if (omp_privatize_by_reference (var
))
11050 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
11051 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11053 var3
= maybe_lookup_decl (new_vard
, ctx
);
11054 if (var3
== new_vard
)
11058 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
11059 tree rpriva
= create_tmp_var (ptype
);
11060 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11061 OMP_CLAUSE_DECL (nc
) = rpriva
;
11063 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11065 tree rprivb
= create_tmp_var (ptype
);
11066 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11067 OMP_CLAUSE_DECL (nc
) = rprivb
;
11068 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
11070 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11072 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
11073 if (new_vard
!= new_var
)
11074 TREE_ADDRESSABLE (var2
) = 1;
11075 gimple_add_tmp_var (var2
);
11077 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
11078 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11079 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11080 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11081 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11083 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
11084 thread_num
, integer_minus_one_node
);
11085 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11086 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11087 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11088 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11089 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11091 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
11092 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11093 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11094 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11095 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11097 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
11098 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11099 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11100 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11101 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11102 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11104 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
11105 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11106 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
11107 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11109 tree var4
= is_for_simd
? new_var
: var2
;
11110 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
11113 var5
= lookup_decl (var
, input_simd_ctx
);
11114 var6
= lookup_decl (var
, scan_simd_ctx
);
11115 if (new_vard
!= new_var
)
11117 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11118 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11121 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11123 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11126 x
= lang_hooks
.decls
.omp_clause_default_ctor
11127 (c
, var2
, build_outer_var_ref (var
, ctx
));
11129 gimplify_and_add (x
, &clist
);
11131 x
= build_outer_var_ref (var
, ctx
);
11132 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11134 gimplify_and_add (x
, &thr01_list
);
11136 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11137 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11140 x
= unshare_expr (var4
);
11141 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11142 gimplify_and_add (x
, &thrn1_list
);
11143 x
= unshare_expr (var4
);
11144 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11145 gimplify_and_add (x
, &thr02_list
);
11147 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11149 /* Otherwise, assign to it the identity element. */
11150 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11151 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11154 if (new_vard
!= new_var
)
11155 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11156 SET_DECL_VALUE_EXPR (new_vard
, val
);
11157 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11159 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11160 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11161 lower_omp (&tseq
, ctx
);
11162 gimple_seq_add_seq (&thrn1_list
, tseq
);
11163 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11164 lower_omp (&tseq
, ctx
);
11165 gimple_seq_add_seq (&thr02_list
, tseq
);
11166 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11167 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11168 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11170 SET_DECL_VALUE_EXPR (new_vard
, y
);
11173 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11174 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11178 x
= unshare_expr (var4
);
11179 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11180 gimplify_and_add (x
, &thrn2_list
);
11184 x
= unshare_expr (rprivb_ref
);
11185 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11186 gimplify_and_add (x
, &scan1_list
);
11190 if (ctx
->scan_exclusive
)
11192 x
= unshare_expr (rprivb_ref
);
11193 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11194 gimplify_and_add (x
, &scan1_list
);
11197 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11198 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11199 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11200 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11201 lower_omp (&tseq
, ctx
);
11202 gimple_seq_add_seq (&scan1_list
, tseq
);
11204 if (ctx
->scan_inclusive
)
11206 x
= unshare_expr (rprivb_ref
);
11207 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11208 gimplify_and_add (x
, &scan1_list
);
11212 x
= unshare_expr (rpriva_ref
);
11213 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11214 unshare_expr (var4
));
11215 gimplify_and_add (x
, &mdlist
);
11217 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11218 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11219 gimplify_and_add (x
, &input2_list
);
11222 if (new_vard
!= new_var
)
11223 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11225 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11226 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11227 SET_DECL_VALUE_EXPR (new_vard
, val
);
11228 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11231 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11232 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11235 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11236 lower_omp (&tseq
, ctx
);
11238 SET_DECL_VALUE_EXPR (new_vard
, y
);
11241 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11242 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11246 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11247 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11248 lower_omp (&tseq
, ctx
);
11250 gimple_seq_add_seq (&input2_list
, tseq
);
11252 x
= build_outer_var_ref (var
, ctx
);
11253 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11254 gimplify_and_add (x
, &last_list
);
11256 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11257 gimplify_and_add (x
, &reduc_list
);
11258 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11259 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11261 if (new_vard
!= new_var
)
11262 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11263 SET_DECL_VALUE_EXPR (new_vard
, val
);
11264 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11265 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11266 lower_omp (&tseq
, ctx
);
11267 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11268 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11269 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11271 SET_DECL_VALUE_EXPR (new_vard
, y
);
11274 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11275 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11277 gimple_seq_add_seq (&reduc_list
, tseq
);
11278 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11279 gimplify_and_add (x
, &reduc_list
);
11281 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11283 gimplify_and_add (x
, dlist
);
11287 x
= build_outer_var_ref (var
, ctx
);
11288 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11290 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11291 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11293 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11295 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11297 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11298 if (code
== MINUS_EXPR
)
11302 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11305 if (ctx
->scan_exclusive
)
11306 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11308 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11309 gimplify_assign (var2
, x
, &scan1_list
);
11310 if (ctx
->scan_inclusive
)
11311 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11315 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11318 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11319 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11321 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11324 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11325 unshare_expr (rprival_ref
));
11326 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11330 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11331 gimple_seq_add_stmt (&scan1_list
, g
);
11332 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11333 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11334 ? scan_stmt4
: scan_stmt2
), g
);
11336 tree controlb
= create_tmp_var (boolean_type_node
);
11337 tree controlp
= create_tmp_var (ptr_type_node
);
11338 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11339 OMP_CLAUSE_DECL (nc
) = controlb
;
11340 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11342 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11343 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11344 OMP_CLAUSE_DECL (nc
) = controlp
;
11345 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11347 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11348 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11349 OMP_CLAUSE_DECL (nc
) = controlb
;
11350 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11352 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11353 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11354 OMP_CLAUSE_DECL (nc
) = controlp
;
11355 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11357 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11359 *cp1
= gimple_omp_for_clauses (stmt
);
11360 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11361 *cp2
= gimple_omp_for_clauses (new_stmt
);
11362 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11366 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11367 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11369 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11371 gsi_remove (&input3_gsi
, true);
11372 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11374 gsi_remove (&scan3_gsi
, true);
11375 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11377 gsi_remove (&input4_gsi
, true);
11378 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11380 gsi_remove (&scan4_gsi
, true);
11384 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11385 gimple_omp_set_body (input_stmt2
, input2_list
);
11388 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11390 gsi_remove (&input1_gsi
, true);
11391 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11393 gsi_remove (&scan1_gsi
, true);
11394 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11396 gsi_remove (&input2_gsi
, true);
11397 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11399 gsi_remove (&scan2_gsi
, true);
11401 gimple_seq_add_seq (body_p
, clist
);
11403 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11404 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11405 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11406 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11407 gimple_seq_add_stmt (body_p
, g
);
11408 g
= gimple_build_label (lab1
);
11409 gimple_seq_add_stmt (body_p
, g
);
11410 gimple_seq_add_seq (body_p
, thr01_list
);
11411 g
= gimple_build_goto (lab3
);
11412 gimple_seq_add_stmt (body_p
, g
);
11413 g
= gimple_build_label (lab2
);
11414 gimple_seq_add_stmt (body_p
, g
);
11415 gimple_seq_add_seq (body_p
, thrn1_list
);
11416 g
= gimple_build_label (lab3
);
11417 gimple_seq_add_stmt (body_p
, g
);
11419 g
= gimple_build_assign (ivar
, size_zero_node
);
11420 gimple_seq_add_stmt (body_p
, g
);
11422 gimple_seq_add_stmt (body_p
, stmt
);
11423 gimple_seq_add_seq (body_p
, body
);
11424 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11427 g
= gimple_build_omp_return (true);
11428 gimple_seq_add_stmt (body_p
, g
);
11429 gimple_seq_add_seq (body_p
, mdlist
);
11431 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11432 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11433 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11434 gimple_seq_add_stmt (body_p
, g
);
11435 g
= gimple_build_label (lab1
);
11436 gimple_seq_add_stmt (body_p
, g
);
11438 g
= omp_build_barrier (NULL
);
11439 gimple_seq_add_stmt (body_p
, g
);
11441 tree down
= create_tmp_var (unsigned_type_node
);
11442 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11443 gimple_seq_add_stmt (body_p
, g
);
11445 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11446 gimple_seq_add_stmt (body_p
, g
);
11448 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11449 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11450 gimple_seq_add_stmt (body_p
, g
);
11452 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11453 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11454 gimple_seq_add_stmt (body_p
, g
);
11456 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11457 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11458 build_int_cst (unsigned_type_node
, 1));
11459 gimple_seq_add_stmt (body_p
, g
);
11461 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11462 g
= gimple_build_label (lab3
);
11463 gimple_seq_add_stmt (body_p
, g
);
11465 tree twok
= create_tmp_var (unsigned_type_node
);
11466 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11467 gimple_seq_add_stmt (body_p
, g
);
11469 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11470 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11471 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11472 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11473 gimple_seq_add_stmt (body_p
, g
);
11474 g
= gimple_build_label (lab4
);
11475 gimple_seq_add_stmt (body_p
, g
);
11476 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11477 gimple_seq_add_stmt (body_p
, g
);
11478 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11479 gimple_seq_add_stmt (body_p
, g
);
11481 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11482 gimple_seq_add_stmt (body_p
, g
);
11483 g
= gimple_build_label (lab6
);
11484 gimple_seq_add_stmt (body_p
, g
);
11486 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11487 gimple_seq_add_stmt (body_p
, g
);
11489 g
= gimple_build_label (lab5
);
11490 gimple_seq_add_stmt (body_p
, g
);
11492 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11493 gimple_seq_add_stmt (body_p
, g
);
11495 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11496 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11497 gimple_call_set_lhs (g
, cplx
);
11498 gimple_seq_add_stmt (body_p
, g
);
11499 tree mul
= create_tmp_var (unsigned_type_node
);
11500 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11501 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11502 gimple_seq_add_stmt (body_p
, g
);
11503 tree ovf
= create_tmp_var (unsigned_type_node
);
11504 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11505 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11506 gimple_seq_add_stmt (body_p
, g
);
11508 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11509 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11510 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11512 gimple_seq_add_stmt (body_p
, g
);
11513 g
= gimple_build_label (lab7
);
11514 gimple_seq_add_stmt (body_p
, g
);
11516 tree andv
= create_tmp_var (unsigned_type_node
);
11517 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11518 gimple_seq_add_stmt (body_p
, g
);
11519 tree andvm1
= create_tmp_var (unsigned_type_node
);
11520 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11521 build_minus_one_cst (unsigned_type_node
));
11522 gimple_seq_add_stmt (body_p
, g
);
11524 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11525 gimple_seq_add_stmt (body_p
, g
);
11527 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11528 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11529 gimple_seq_add_stmt (body_p
, g
);
11530 g
= gimple_build_label (lab9
);
11531 gimple_seq_add_stmt (body_p
, g
);
11532 gimple_seq_add_seq (body_p
, reduc_list
);
11533 g
= gimple_build_label (lab8
);
11534 gimple_seq_add_stmt (body_p
, g
);
11536 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11537 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11538 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11539 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11541 gimple_seq_add_stmt (body_p
, g
);
11542 g
= gimple_build_label (lab10
);
11543 gimple_seq_add_stmt (body_p
, g
);
11544 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11545 gimple_seq_add_stmt (body_p
, g
);
11546 g
= gimple_build_goto (lab12
);
11547 gimple_seq_add_stmt (body_p
, g
);
11548 g
= gimple_build_label (lab11
);
11549 gimple_seq_add_stmt (body_p
, g
);
11550 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11551 gimple_seq_add_stmt (body_p
, g
);
11552 g
= gimple_build_label (lab12
);
11553 gimple_seq_add_stmt (body_p
, g
);
11555 g
= omp_build_barrier (NULL
);
11556 gimple_seq_add_stmt (body_p
, g
);
11558 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11560 gimple_seq_add_stmt (body_p
, g
);
11562 g
= gimple_build_label (lab2
);
11563 gimple_seq_add_stmt (body_p
, g
);
11565 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11566 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11567 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11568 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11569 gimple_seq_add_stmt (body_p
, g
);
11570 g
= gimple_build_label (lab1
);
11571 gimple_seq_add_stmt (body_p
, g
);
11572 gimple_seq_add_seq (body_p
, thr02_list
);
11573 g
= gimple_build_goto (lab3
);
11574 gimple_seq_add_stmt (body_p
, g
);
11575 g
= gimple_build_label (lab2
);
11576 gimple_seq_add_stmt (body_p
, g
);
11577 gimple_seq_add_seq (body_p
, thrn2_list
);
11578 g
= gimple_build_label (lab3
);
11579 gimple_seq_add_stmt (body_p
, g
);
11581 g
= gimple_build_assign (ivar
, size_zero_node
);
11582 gimple_seq_add_stmt (body_p
, g
);
11583 gimple_seq_add_stmt (body_p
, new_stmt
);
11584 gimple_seq_add_seq (body_p
, new_body
);
11586 gimple_seq new_dlist
= NULL
;
11587 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11588 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11589 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11590 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11591 integer_minus_one_node
);
11592 gimple_seq_add_stmt (&new_dlist
, g
);
11593 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11594 gimple_seq_add_stmt (&new_dlist
, g
);
11595 g
= gimple_build_label (lab1
);
11596 gimple_seq_add_stmt (&new_dlist
, g
);
11597 gimple_seq_add_seq (&new_dlist
, last_list
);
11598 g
= gimple_build_label (lab2
);
11599 gimple_seq_add_stmt (&new_dlist
, g
);
11600 gimple_seq_add_seq (&new_dlist
, *dlist
);
11601 *dlist
= new_dlist
;
11604 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11605 the addresses of variables to be made private at the surrounding
11606 parallelism level. Such functions appear in the gimple code stream in two
11607 forms, e.g. for a partitioned loop:
11609 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11610 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11611 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11612 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11614 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11615 not as part of a HEAD_MARK sequence:
11617 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11619 For such stand-alone appearances, the 3rd argument is always 0, denoting
11620 gang partitioning. */
11623 lower_oacc_private_marker (omp_context
*ctx
)
11625 if (ctx
->oacc_privatization_candidates
.length () == 0)
11628 auto_vec
<tree
, 5> args
;
11630 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11631 args
.quick_push (integer_zero_node
);
11632 args
.quick_push (integer_minus_one_node
);
11636 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11638 gcc_checking_assert (TREE_ADDRESSABLE (decl
));
11639 tree addr
= build_fold_addr_expr (decl
);
11640 args
.safe_push (addr
);
11643 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11646 /* Lower code for an OMP loop directive. */
11649 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11651 tree
*rhs_p
, block
;
11652 struct omp_for_data fd
, *fdp
= NULL
;
11653 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11655 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11656 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11657 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11660 push_gimplify_context ();
11662 if (is_gimple_omp_oacc (ctx
->stmt
))
11663 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11665 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11667 block
= make_node (BLOCK
);
11668 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11669 /* Replace at gsi right away, so that 'stmt' is no member
11670 of a sequence anymore as we're going to add to a different
11672 gsi_replace (gsi_p
, new_stmt
, true);
11674 /* Move declaration of temporaries in the loop body before we make
11676 omp_for_body
= gimple_omp_body (stmt
);
11677 if (!gimple_seq_empty_p (omp_for_body
)
11678 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11681 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11682 tree vars
= gimple_bind_vars (inner_bind
);
11683 if (is_gimple_omp_oacc (ctx
->stmt
))
11684 oacc_privatization_scan_decl_chain (ctx
, vars
);
11685 gimple_bind_append_vars (new_stmt
, vars
);
11686 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11687 keep them on the inner_bind and it's block. */
11688 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11689 if (gimple_bind_block (inner_bind
))
11690 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11693 if (gimple_omp_for_combined_into_p (stmt
))
11695 omp_extract_for_data (stmt
, &fd
, NULL
);
11698 /* We need two temporaries with fd.loop.v type (istart/iend)
11699 and then (fd.collapse - 1) temporaries with the same
11700 type for count2 ... countN-1 vars if not constant. */
11702 tree type
= fd
.iter_type
;
11703 if (fd
.collapse
> 1
11704 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11705 count
+= fd
.collapse
- 1;
11707 tree type2
= NULL_TREE
;
11709 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11710 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11711 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11713 tree clauses
= *pc
;
11714 if (fd
.collapse
> 1
11716 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11717 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11718 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11719 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11721 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11722 type2
= TREE_TYPE (v
);
11728 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11729 OMP_CLAUSE__LOOPTEMP_
);
11730 if (ctx
->simt_stmt
)
11731 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11732 OMP_CLAUSE__LOOPTEMP_
);
11733 for (i
= 0; i
< count
+ count2
; i
++)
11738 gcc_assert (outerc
);
11739 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11740 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11741 OMP_CLAUSE__LOOPTEMP_
);
11745 /* If there are 2 adjacent SIMD stmts, one with _simt_
11746 clause, another without, make sure they have the same
11747 decls in _looptemp_ clauses, because the outer stmt
11748 they are combined into will look up just one inner_stmt. */
11749 if (ctx
->simt_stmt
)
11750 temp
= OMP_CLAUSE_DECL (simtc
);
11752 temp
= create_tmp_var (i
>= count
? type2
: type
);
11753 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11755 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11756 OMP_CLAUSE_DECL (*pc
) = temp
;
11757 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11758 if (ctx
->simt_stmt
)
11759 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11760 OMP_CLAUSE__LOOPTEMP_
);
11765 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11769 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11770 OMP_CLAUSE_REDUCTION
);
11771 tree rtmp
= NULL_TREE
;
11774 tree type
= build_pointer_type (pointer_sized_int_node
);
11775 tree temp
= create_tmp_var (type
);
11776 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11777 OMP_CLAUSE_DECL (c
) = temp
;
11778 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11779 gimple_omp_for_set_clauses (stmt
, c
);
11780 lower_omp_task_reductions (ctx
, OMP_FOR
,
11781 gimple_omp_for_clauses (stmt
),
11782 &tred_ilist
, &tred_dlist
);
11784 rtmp
= make_ssa_name (type
);
11785 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11788 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11791 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11793 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11794 gimple_omp_for_pre_body (stmt
));
11796 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11798 gcall
*private_marker
= NULL
;
11799 if (is_gimple_omp_oacc (ctx
->stmt
)
11800 && !gimple_seq_empty_p (omp_for_body
))
11801 private_marker
= lower_oacc_private_marker (ctx
);
11803 /* Lower the header expressions. At this point, we can assume that
11804 the header is of the form:
11806 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11808 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11809 using the .omp_data_s mapping, if needed. */
11810 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11812 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11813 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11815 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11816 TREE_VEC_ELT (*rhs_p
, 1)
11817 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11818 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11819 TREE_VEC_ELT (*rhs_p
, 2)
11820 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11822 else if (!is_gimple_min_invariant (*rhs_p
))
11823 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11824 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11825 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11827 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11828 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11830 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11831 TREE_VEC_ELT (*rhs_p
, 1)
11832 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11833 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11834 TREE_VEC_ELT (*rhs_p
, 2)
11835 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11837 else if (!is_gimple_min_invariant (*rhs_p
))
11838 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11839 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11840 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11842 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11843 if (!is_gimple_min_invariant (*rhs_p
))
11844 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11847 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11849 gimple_seq_add_seq (&body
, cnt_list
);
11851 /* Once lowered, extract the bounds and clauses. */
11852 omp_extract_for_data (stmt
, &fd
, NULL
);
11854 if (is_gimple_omp_oacc (ctx
->stmt
)
11855 && !ctx_in_oacc_kernels_region (ctx
))
11856 lower_oacc_head_tail (gimple_location (stmt
),
11857 gimple_omp_for_clauses (stmt
), private_marker
,
11858 &oacc_head
, &oacc_tail
, ctx
);
11860 /* Add OpenACC partitioning and reduction markers just before the loop. */
11862 gimple_seq_add_seq (&body
, oacc_head
);
11864 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11866 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11867 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11868 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11869 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11871 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11872 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11873 OMP_CLAUSE_LINEAR_STEP (c
)
11874 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11878 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11879 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11880 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11883 gimple_seq_add_stmt (&body
, stmt
);
11884 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11887 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11890 /* After the loop, add exit clauses. */
11891 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11895 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11896 gcall
*g
= gimple_build_call (fndecl
, 0);
11897 gimple_seq_add_stmt (&body
, g
);
11898 gimple_seq_add_seq (&body
, clist
);
11899 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11900 g
= gimple_build_call (fndecl
, 0);
11901 gimple_seq_add_stmt (&body
, g
);
11904 if (ctx
->cancellable
)
11905 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11907 gimple_seq_add_seq (&body
, dlist
);
11911 gimple_seq_add_seq (&tred_ilist
, body
);
11915 body
= maybe_catch_exception (body
);
11917 /* Region exit marker goes at the end of the loop body. */
11918 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11919 gimple_seq_add_stmt (&body
, g
);
11921 gimple_seq_add_seq (&body
, tred_dlist
);
11923 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11926 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11928 /* Add OpenACC joining and reduction markers just after the loop. */
11930 gimple_seq_add_seq (&body
, oacc_tail
);
11932 pop_gimplify_context (new_stmt
);
11934 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11935 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11936 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11937 if (BLOCK_VARS (block
))
11938 TREE_USED (block
) = 1;
11940 gimple_bind_set_body (new_stmt
, body
);
11941 gimple_omp_set_body (stmt
, NULL
);
11942 gimple_omp_for_set_pre_body (stmt
, NULL
);
11945 /* Callback for walk_stmts. Check if the current statement only contains
11946 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11949 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11950 bool *handled_ops_p
,
11951 struct walk_stmt_info
*wi
)
11953 int *info
= (int *) wi
->info
;
11954 gimple
*stmt
= gsi_stmt (*gsi_p
);
11956 *handled_ops_p
= true;
11957 switch (gimple_code (stmt
))
11963 case GIMPLE_OMP_FOR
:
11964 case GIMPLE_OMP_SECTIONS
:
11965 *info
= *info
== 0 ? 1 : -1;
11974 struct omp_taskcopy_context
11976 /* This field must be at the beginning, as we do "inheritance": Some
11977 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11978 receive a copy_body_data pointer that is up-casted to an
11979 omp_context pointer. */
11985 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11987 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11989 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11990 return create_tmp_var (TREE_TYPE (var
));
11996 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11998 tree name
, new_fields
= NULL
, type
, f
;
12000 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
12001 name
= DECL_NAME (TYPE_NAME (orig_type
));
12002 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
12003 TYPE_DECL
, name
, type
);
12004 TYPE_NAME (type
) = name
;
12006 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
12008 tree new_f
= copy_node (f
);
12009 DECL_CONTEXT (new_f
) = type
;
12010 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
12011 TREE_CHAIN (new_f
) = new_fields
;
12012 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12013 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12014 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
12016 new_fields
= new_f
;
12017 tcctx
->cb
.decl_map
->put (f
, new_f
);
12019 TYPE_FIELDS (type
) = nreverse (new_fields
);
12020 layout_type (type
);
12024 /* Create task copyfn. */
12027 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
12029 struct function
*child_cfun
;
12030 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
12031 tree record_type
, srecord_type
, bind
, list
;
12032 bool record_needs_remap
= false, srecord_needs_remap
= false;
12034 struct omp_taskcopy_context tcctx
;
12035 location_t loc
= gimple_location (task_stmt
);
12036 size_t looptempno
= 0;
12038 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
12039 task_cpyfns
.safe_push (task_stmt
);
12040 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
12041 gcc_assert (child_cfun
->cfg
== NULL
);
12042 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
12044 /* Reset DECL_CONTEXT on function arguments. */
12045 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
12046 DECL_CONTEXT (t
) = child_fn
;
12048 /* Populate the function. */
12049 push_gimplify_context ();
12050 push_cfun (child_cfun
);
12052 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12053 TREE_SIDE_EFFECTS (bind
) = 1;
12055 DECL_SAVED_TREE (child_fn
) = bind
;
12056 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
12058 /* Remap src and dst argument types if needed. */
12059 record_type
= ctx
->record_type
;
12060 srecord_type
= ctx
->srecord_type
;
12061 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
12062 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12064 record_needs_remap
= true;
12067 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
12068 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12070 srecord_needs_remap
= true;
12074 if (record_needs_remap
|| srecord_needs_remap
)
12076 memset (&tcctx
, '\0', sizeof (tcctx
));
12077 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
12078 tcctx
.cb
.dst_fn
= child_fn
;
12079 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
12080 gcc_checking_assert (tcctx
.cb
.src_node
);
12081 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
12082 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
12083 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
12084 tcctx
.cb
.eh_lp_nr
= 0;
12085 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
12086 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
12089 if (record_needs_remap
)
12090 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
12091 if (srecord_needs_remap
)
12092 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
12095 tcctx
.cb
.decl_map
= NULL
;
12097 arg
= DECL_ARGUMENTS (child_fn
);
12098 TREE_TYPE (arg
) = build_pointer_type (record_type
);
12099 sarg
= DECL_CHAIN (arg
);
12100 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
12102 /* First pass: initialize temporaries used in record_type and srecord_type
12103 sizes and field offsets. */
12104 if (tcctx
.cb
.decl_map
)
12105 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12106 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12110 decl
= OMP_CLAUSE_DECL (c
);
12111 p
= tcctx
.cb
.decl_map
->get (decl
);
12114 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12115 sf
= (tree
) n
->value
;
12116 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12117 src
= build_simple_mem_ref_loc (loc
, sarg
);
12118 src
= omp_build_component_ref (src
, sf
);
12119 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12120 append_to_statement_list (t
, &list
);
12123 /* Second pass: copy shared var pointers and copy construct non-VLA
12124 firstprivate vars. */
12125 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12126 switch (OMP_CLAUSE_CODE (c
))
12128 splay_tree_key key
;
12129 case OMP_CLAUSE_SHARED
:
12130 decl
= OMP_CLAUSE_DECL (c
);
12131 key
= (splay_tree_key
) decl
;
12132 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12133 key
= (splay_tree_key
) &DECL_UID (decl
);
12134 n
= splay_tree_lookup (ctx
->field_map
, key
);
12137 f
= (tree
) n
->value
;
12138 if (tcctx
.cb
.decl_map
)
12139 f
= *tcctx
.cb
.decl_map
->get (f
);
12140 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12141 sf
= (tree
) n
->value
;
12142 if (tcctx
.cb
.decl_map
)
12143 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12144 src
= build_simple_mem_ref_loc (loc
, sarg
);
12145 src
= omp_build_component_ref (src
, sf
);
12146 dst
= build_simple_mem_ref_loc (loc
, arg
);
12147 dst
= omp_build_component_ref (dst
, f
);
12148 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12149 append_to_statement_list (t
, &list
);
12151 case OMP_CLAUSE_REDUCTION
:
12152 case OMP_CLAUSE_IN_REDUCTION
:
12153 decl
= OMP_CLAUSE_DECL (c
);
12154 if (TREE_CODE (decl
) == MEM_REF
)
12156 decl
= TREE_OPERAND (decl
, 0);
12157 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12158 decl
= TREE_OPERAND (decl
, 0);
12159 if (TREE_CODE (decl
) == INDIRECT_REF
12160 || TREE_CODE (decl
) == ADDR_EXPR
)
12161 decl
= TREE_OPERAND (decl
, 0);
12163 key
= (splay_tree_key
) decl
;
12164 n
= splay_tree_lookup (ctx
->field_map
, key
);
12167 f
= (tree
) n
->value
;
12168 if (tcctx
.cb
.decl_map
)
12169 f
= *tcctx
.cb
.decl_map
->get (f
);
12170 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12171 sf
= (tree
) n
->value
;
12172 if (tcctx
.cb
.decl_map
)
12173 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12174 src
= build_simple_mem_ref_loc (loc
, sarg
);
12175 src
= omp_build_component_ref (src
, sf
);
12176 if (decl
!= OMP_CLAUSE_DECL (c
)
12177 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12178 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12179 src
= build_simple_mem_ref_loc (loc
, src
);
12180 dst
= build_simple_mem_ref_loc (loc
, arg
);
12181 dst
= omp_build_component_ref (dst
, f
);
12182 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12183 append_to_statement_list (t
, &list
);
12185 case OMP_CLAUSE__LOOPTEMP_
:
12186 /* Fields for first two _looptemp_ clauses are initialized by
12187 GOMP_taskloop*, the rest are handled like firstprivate. */
12188 if (looptempno
< 2)
12194 case OMP_CLAUSE__REDUCTEMP_
:
12195 case OMP_CLAUSE_FIRSTPRIVATE
:
12196 decl
= OMP_CLAUSE_DECL (c
);
12197 if (is_variable_sized (decl
))
12199 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12202 f
= (tree
) n
->value
;
12203 if (tcctx
.cb
.decl_map
)
12204 f
= *tcctx
.cb
.decl_map
->get (f
);
12205 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12208 sf
= (tree
) n
->value
;
12209 if (tcctx
.cb
.decl_map
)
12210 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12211 src
= build_simple_mem_ref_loc (loc
, sarg
);
12212 src
= omp_build_component_ref (src
, sf
);
12213 if (use_pointer_for_field (decl
, NULL
)
12214 || omp_privatize_by_reference (decl
))
12215 src
= build_simple_mem_ref_loc (loc
, src
);
12219 dst
= build_simple_mem_ref_loc (loc
, arg
);
12220 dst
= omp_build_component_ref (dst
, f
);
12221 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12222 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12225 if (ctx
->allocate_map
)
12226 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12228 tree allocator
= *allocatorp
;
12229 HOST_WIDE_INT ialign
= 0;
12230 if (TREE_CODE (allocator
) == TREE_LIST
)
12232 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12233 allocator
= TREE_PURPOSE (allocator
);
12235 if (TREE_CODE (allocator
) != INTEGER_CST
)
12237 n
= splay_tree_lookup (ctx
->sfield_map
,
12238 (splay_tree_key
) allocator
);
12239 allocator
= (tree
) n
->value
;
12240 if (tcctx
.cb
.decl_map
)
12241 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12242 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12243 allocator
= omp_build_component_ref (a
, allocator
);
12245 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12246 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12247 tree align
= build_int_cst (size_type_node
,
12249 DECL_ALIGN_UNIT (decl
)));
12250 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12251 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12253 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12254 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12255 append_to_statement_list (t
, &list
);
12256 dst
= build_simple_mem_ref_loc (loc
, dst
);
12258 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12260 append_to_statement_list (t
, &list
);
12262 case OMP_CLAUSE_PRIVATE
:
12263 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12265 decl
= OMP_CLAUSE_DECL (c
);
12266 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12267 f
= (tree
) n
->value
;
12268 if (tcctx
.cb
.decl_map
)
12269 f
= *tcctx
.cb
.decl_map
->get (f
);
12270 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12273 sf
= (tree
) n
->value
;
12274 if (tcctx
.cb
.decl_map
)
12275 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12276 src
= build_simple_mem_ref_loc (loc
, sarg
);
12277 src
= omp_build_component_ref (src
, sf
);
12278 if (use_pointer_for_field (decl
, NULL
))
12279 src
= build_simple_mem_ref_loc (loc
, src
);
12283 dst
= build_simple_mem_ref_loc (loc
, arg
);
12284 dst
= omp_build_component_ref (dst
, f
);
12285 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12286 append_to_statement_list (t
, &list
);
12292 /* Last pass: handle VLA firstprivates. */
12293 if (tcctx
.cb
.decl_map
)
12294 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12295 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12299 decl
= OMP_CLAUSE_DECL (c
);
12300 if (!is_variable_sized (decl
))
12302 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12305 f
= (tree
) n
->value
;
12306 f
= *tcctx
.cb
.decl_map
->get (f
);
12307 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12308 ind
= DECL_VALUE_EXPR (decl
);
12309 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12310 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12311 n
= splay_tree_lookup (ctx
->sfield_map
,
12312 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12313 sf
= (tree
) n
->value
;
12314 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12315 src
= build_simple_mem_ref_loc (loc
, sarg
);
12316 src
= omp_build_component_ref (src
, sf
);
12317 src
= build_simple_mem_ref_loc (loc
, src
);
12318 dst
= build_simple_mem_ref_loc (loc
, arg
);
12319 dst
= omp_build_component_ref (dst
, f
);
12320 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12321 append_to_statement_list (t
, &list
);
12322 n
= splay_tree_lookup (ctx
->field_map
,
12323 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12324 df
= (tree
) n
->value
;
12325 df
= *tcctx
.cb
.decl_map
->get (df
);
12326 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12327 ptr
= omp_build_component_ref (ptr
, df
);
12328 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12329 build_fold_addr_expr_loc (loc
, dst
));
12330 append_to_statement_list (t
, &list
);
12333 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12334 append_to_statement_list (t
, &list
);
12336 if (tcctx
.cb
.decl_map
)
12337 delete tcctx
.cb
.decl_map
;
12338 pop_gimplify_context (NULL
);
12339 BIND_EXPR_BODY (bind
) = list
;
12344 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12348 size_t cnt
[5] = { 0, 0, 0, 0, 0 }, idx
= 2, i
;
12350 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12351 gcc_assert (clauses
);
12352 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12353 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12354 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12356 case OMP_CLAUSE_DEPEND_LAST
:
12357 /* Lowering already done at gimplification. */
12359 case OMP_CLAUSE_DEPEND_IN
:
12362 case OMP_CLAUSE_DEPEND_OUT
:
12363 case OMP_CLAUSE_DEPEND_INOUT
:
12366 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12369 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12372 case OMP_CLAUSE_DEPEND_INOUTSET
:
12376 gcc_unreachable ();
12378 if (cnt
[1] || cnt
[3] || cnt
[4])
12380 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3] + cnt
[4];
12381 size_t inoutidx
= total
+ idx
;
12382 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
+ 2 * cnt
[4]);
12383 tree array
= create_tmp_var (type
);
12384 TREE_ADDRESSABLE (array
) = 1;
12385 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12389 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12390 gimple_seq_add_stmt (iseq
, g
);
12391 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12394 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12395 gimple_seq_add_stmt (iseq
, g
);
12396 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12398 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12399 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12400 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12401 gimple_seq_add_stmt (iseq
, g
);
12403 for (i
= 0; i
< 5; i
++)
12407 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12408 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12412 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12414 case OMP_CLAUSE_DEPEND_IN
:
12418 case OMP_CLAUSE_DEPEND_OUT
:
12419 case OMP_CLAUSE_DEPEND_INOUT
:
12423 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12427 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12431 case OMP_CLAUSE_DEPEND_INOUTSET
:
12436 gcc_unreachable ();
12438 tree t
= OMP_CLAUSE_DECL (c
);
12441 t
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12442 size_int (inoutidx
), NULL_TREE
, NULL_TREE
);
12443 t
= build_fold_addr_expr (t
);
12446 t
= fold_convert (ptr_type_node
, t
);
12447 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12448 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12449 NULL_TREE
, NULL_TREE
);
12450 g
= gimple_build_assign (r
, t
);
12451 gimple_seq_add_stmt (iseq
, g
);
12455 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12456 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12457 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_INOUTSET
)
12459 tree t
= OMP_CLAUSE_DECL (c
);
12460 t
= fold_convert (ptr_type_node
, t
);
12461 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12462 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12463 NULL_TREE
, NULL_TREE
);
12464 g
= gimple_build_assign (r
, t
);
12465 gimple_seq_add_stmt (iseq
, g
);
12466 t
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
12467 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12468 NULL_TREE
, NULL_TREE
);
12469 g
= gimple_build_assign (r
, t
);
12470 gimple_seq_add_stmt (iseq
, g
);
12473 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12474 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12475 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12476 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12478 tree clobber
= build_clobber (type
);
12479 g
= gimple_build_assign (array
, clobber
);
12480 gimple_seq_add_stmt (oseq
, g
);
12483 /* Lower the OpenMP parallel or task directive in the current statement
12484 in GSI_P. CTX holds context information for the directive. */
12487 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12491 gimple
*stmt
= gsi_stmt (*gsi_p
);
12492 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12493 gimple_seq par_body
;
12494 location_t loc
= gimple_location (stmt
);
12496 clauses
= gimple_omp_taskreg_clauses (stmt
);
12497 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12498 && gimple_omp_task_taskwait_p (stmt
))
12506 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12507 par_body
= gimple_bind_body (par_bind
);
12509 child_fn
= ctx
->cb
.dst_fn
;
12510 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12511 && !gimple_omp_parallel_combined_p (stmt
))
12513 struct walk_stmt_info wi
;
12516 memset (&wi
, 0, sizeof (wi
));
12518 wi
.val_only
= true;
12519 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12521 gimple_omp_parallel_set_combined_p (stmt
, true);
12523 gimple_seq dep_ilist
= NULL
;
12524 gimple_seq dep_olist
= NULL
;
12525 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12526 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12528 push_gimplify_context ();
12529 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12530 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12531 &dep_ilist
, &dep_olist
);
12534 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12535 && gimple_omp_task_taskwait_p (stmt
))
12539 gsi_replace (gsi_p
, dep_bind
, true);
12540 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12541 gimple_bind_add_stmt (dep_bind
, stmt
);
12542 gimple_bind_add_seq (dep_bind
, dep_olist
);
12543 pop_gimplify_context (dep_bind
);
12548 if (ctx
->srecord_type
)
12549 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12551 gimple_seq tskred_ilist
= NULL
;
12552 gimple_seq tskred_olist
= NULL
;
12553 if ((is_task_ctx (ctx
)
12554 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12555 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12556 OMP_CLAUSE_REDUCTION
))
12557 || (is_parallel_ctx (ctx
)
12558 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12559 OMP_CLAUSE__REDUCTEMP_
)))
12561 if (dep_bind
== NULL
)
12563 push_gimplify_context ();
12564 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12566 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12568 gimple_omp_taskreg_clauses (ctx
->stmt
),
12569 &tskred_ilist
, &tskred_olist
);
12572 push_gimplify_context ();
12574 gimple_seq par_olist
= NULL
;
12575 gimple_seq par_ilist
= NULL
;
12576 gimple_seq par_rlist
= NULL
;
12577 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12578 lower_omp (&par_body
, ctx
);
12579 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12580 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12582 /* Declare all the variables created by mapping and the variables
12583 declared in the scope of the parallel body. */
12584 record_vars_into (ctx
->block_vars
, child_fn
);
12585 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12586 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12588 if (ctx
->record_type
)
12591 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12592 : ctx
->record_type
, ".omp_data_o");
12593 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12594 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12595 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12598 gimple_seq olist
= NULL
;
12599 gimple_seq ilist
= NULL
;
12600 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12601 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12603 if (ctx
->record_type
)
12605 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12606 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12610 /* Once all the expansions are done, sequence all the different
12611 fragments inside gimple_omp_body. */
12613 gimple_seq new_body
= NULL
;
12615 if (ctx
->record_type
)
12617 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12618 /* fixup_child_record_type might have changed receiver_decl's type. */
12619 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12620 gimple_seq_add_stmt (&new_body
,
12621 gimple_build_assign (ctx
->receiver_decl
, t
));
12624 gimple_seq_add_seq (&new_body
, par_ilist
);
12625 gimple_seq_add_seq (&new_body
, par_body
);
12626 gimple_seq_add_seq (&new_body
, par_rlist
);
12627 if (ctx
->cancellable
)
12628 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12629 gimple_seq_add_seq (&new_body
, par_olist
);
12630 new_body
= maybe_catch_exception (new_body
);
12631 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12632 gimple_seq_add_stmt (&new_body
,
12633 gimple_build_omp_continue (integer_zero_node
,
12634 integer_zero_node
));
12635 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12636 gimple_omp_set_body (stmt
, new_body
);
12638 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12639 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12641 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12642 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12643 gimple_bind_add_seq (bind
, ilist
);
12644 gimple_bind_add_stmt (bind
, stmt
);
12645 gimple_bind_add_seq (bind
, olist
);
12647 pop_gimplify_context (NULL
);
12651 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12652 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12653 gimple_bind_add_stmt (dep_bind
, bind
);
12654 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12655 gimple_bind_add_seq (dep_bind
, dep_olist
);
12656 pop_gimplify_context (dep_bind
);
12660 /* Lower the GIMPLE_OMP_TARGET in the current statement
12661 in GSI_P. CTX holds context information for the directive. */
12664 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12667 tree child_fn
, t
, c
;
12668 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12669 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12670 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12671 location_t loc
= gimple_location (stmt
);
12672 bool offloaded
, data_region
;
12673 unsigned int map_cnt
= 0;
12674 tree in_reduction_clauses
= NULL_TREE
;
12676 offloaded
= is_gimple_omp_offloaded (stmt
);
12677 switch (gimple_omp_target_kind (stmt
))
12679 case GF_OMP_TARGET_KIND_REGION
:
12681 q
= &in_reduction_clauses
;
12682 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12683 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12686 q
= &OMP_CLAUSE_CHAIN (*q
);
12687 *p
= OMP_CLAUSE_CHAIN (*p
);
12690 p
= &OMP_CLAUSE_CHAIN (*p
);
12692 *p
= in_reduction_clauses
;
12694 case GF_OMP_TARGET_KIND_UPDATE
:
12695 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12696 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12697 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12698 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12699 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12700 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12701 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12702 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12703 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12704 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12705 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12706 data_region
= false;
12708 case GF_OMP_TARGET_KIND_DATA
:
12709 case GF_OMP_TARGET_KIND_OACC_DATA
:
12710 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12711 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12712 data_region
= true;
12715 gcc_unreachable ();
12718 /* Ensure that requires map is written via output_offload_tables, even if only
12719 'target (enter/exit) data' is used in the translation unit. */
12720 if (ENABLE_OFFLOADING
&& (omp_requires_mask
& OMP_REQUIRES_TARGET_USED
))
12721 g
->have_offload
= true;
12723 clauses
= gimple_omp_target_clauses (stmt
);
12725 gimple_seq dep_ilist
= NULL
;
12726 gimple_seq dep_olist
= NULL
;
12727 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12728 if (has_depend
|| in_reduction_clauses
)
12730 push_gimplify_context ();
12731 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12733 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12734 &dep_ilist
, &dep_olist
);
12735 if (in_reduction_clauses
)
12736 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12744 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12745 tgt_body
= gimple_bind_body (tgt_bind
);
12747 else if (data_region
)
12748 tgt_body
= gimple_omp_body (stmt
);
12749 child_fn
= ctx
->cb
.dst_fn
;
12751 push_gimplify_context ();
12754 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12755 switch (OMP_CLAUSE_CODE (c
))
12761 case OMP_CLAUSE_MAP
:
12763 /* First check what we're prepared to handle in the following. */
12764 switch (OMP_CLAUSE_MAP_KIND (c
))
12766 case GOMP_MAP_ALLOC
:
12768 case GOMP_MAP_FROM
:
12769 case GOMP_MAP_TOFROM
:
12770 case GOMP_MAP_POINTER
:
12771 case GOMP_MAP_TO_PSET
:
12772 case GOMP_MAP_DELETE
:
12773 case GOMP_MAP_RELEASE
:
12774 case GOMP_MAP_ALWAYS_TO
:
12775 case GOMP_MAP_ALWAYS_FROM
:
12776 case GOMP_MAP_ALWAYS_TOFROM
:
12777 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12778 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12779 case GOMP_MAP_STRUCT
:
12780 case GOMP_MAP_ALWAYS_POINTER
:
12781 case GOMP_MAP_ATTACH
:
12782 case GOMP_MAP_DETACH
:
12783 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12784 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12786 case GOMP_MAP_IF_PRESENT
:
12787 case GOMP_MAP_FORCE_ALLOC
:
12788 case GOMP_MAP_FORCE_TO
:
12789 case GOMP_MAP_FORCE_FROM
:
12790 case GOMP_MAP_FORCE_TOFROM
:
12791 case GOMP_MAP_FORCE_PRESENT
:
12792 case GOMP_MAP_FORCE_DEVICEPTR
:
12793 case GOMP_MAP_DEVICE_RESIDENT
:
12794 case GOMP_MAP_LINK
:
12795 case GOMP_MAP_FORCE_DETACH
:
12796 gcc_assert (is_gimple_omp_oacc (stmt
));
12799 gcc_unreachable ();
12803 case OMP_CLAUSE_TO
:
12804 case OMP_CLAUSE_FROM
:
12806 var
= OMP_CLAUSE_DECL (c
);
12809 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12810 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12811 && (OMP_CLAUSE_MAP_KIND (c
)
12812 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12817 if (DECL_SIZE (var
)
12818 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12820 tree var2
= DECL_VALUE_EXPR (var
);
12821 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12822 var2
= TREE_OPERAND (var2
, 0);
12823 gcc_assert (DECL_P (var2
));
12828 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12829 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12830 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12832 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12834 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12835 && varpool_node::get_create (var
)->offloadable
)
12838 tree type
= build_pointer_type (TREE_TYPE (var
));
12839 tree new_var
= lookup_decl (var
, ctx
);
12840 x
= create_tmp_var_raw (type
, get_name (new_var
));
12841 gimple_add_tmp_var (x
);
12842 x
= build_simple_mem_ref (x
);
12843 SET_DECL_VALUE_EXPR (new_var
, x
);
12844 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12849 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12850 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12851 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12852 && is_omp_target (stmt
))
12854 gcc_assert (maybe_lookup_field (c
, ctx
));
12859 if (!maybe_lookup_field (var
, ctx
))
12862 /* Don't remap compute constructs' reduction variables, because the
12863 intermediate result must be local to each gang. */
12864 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12865 && is_gimple_omp_oacc (ctx
->stmt
)
12866 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12868 x
= build_receiver_ref (var
, true, ctx
);
12869 tree new_var
= lookup_decl (var
, ctx
);
12871 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12872 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12873 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12874 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12875 x
= build_simple_mem_ref (x
);
12876 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12878 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12879 if (omp_privatize_by_reference (new_var
)
12880 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12881 || DECL_BY_REFERENCE (var
)))
12883 /* Create a local object to hold the instance
12885 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12886 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12887 tree inst
= create_tmp_var (type
, id
);
12888 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12889 x
= build_fold_addr_expr (inst
);
12891 gimplify_assign (new_var
, x
, &fplist
);
12893 else if (DECL_P (new_var
))
12895 SET_DECL_VALUE_EXPR (new_var
, x
);
12896 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12899 gcc_unreachable ();
12904 case OMP_CLAUSE_FIRSTPRIVATE
:
12905 omp_firstprivate_recv
:
12906 gcc_checking_assert (offloaded
);
12907 if (is_gimple_omp_oacc (ctx
->stmt
))
12909 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12910 gcc_checking_assert (!is_oacc_kernels (ctx
));
12911 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12912 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12914 goto oacc_firstprivate
;
12917 var
= OMP_CLAUSE_DECL (c
);
12918 if (!omp_privatize_by_reference (var
)
12919 && !is_gimple_reg_type (TREE_TYPE (var
)))
12921 tree new_var
= lookup_decl (var
, ctx
);
12922 if (is_variable_sized (var
))
12924 tree pvar
= DECL_VALUE_EXPR (var
);
12925 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12926 pvar
= TREE_OPERAND (pvar
, 0);
12927 gcc_assert (DECL_P (pvar
));
12928 tree new_pvar
= lookup_decl (pvar
, ctx
);
12929 x
= build_fold_indirect_ref (new_pvar
);
12930 TREE_THIS_NOTRAP (x
) = 1;
12933 x
= build_receiver_ref (var
, true, ctx
);
12934 SET_DECL_VALUE_EXPR (new_var
, x
);
12935 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12937 /* Fortran array descriptors: firstprivate of data + attach. */
12938 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
12939 && lang_hooks
.decls
.omp_array_data (var
, true))
12943 case OMP_CLAUSE_PRIVATE
:
12944 gcc_checking_assert (offloaded
);
12945 if (is_gimple_omp_oacc (ctx
->stmt
))
12947 /* No 'private' clauses on OpenACC 'kernels'. */
12948 gcc_checking_assert (!is_oacc_kernels (ctx
));
12949 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12950 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12954 var
= OMP_CLAUSE_DECL (c
);
12955 if (is_variable_sized (var
))
12957 tree new_var
= lookup_decl (var
, ctx
);
12958 tree pvar
= DECL_VALUE_EXPR (var
);
12959 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12960 pvar
= TREE_OPERAND (pvar
, 0);
12961 gcc_assert (DECL_P (pvar
));
12962 tree new_pvar
= lookup_decl (pvar
, ctx
);
12963 x
= build_fold_indirect_ref (new_pvar
);
12964 TREE_THIS_NOTRAP (x
) = 1;
12965 SET_DECL_VALUE_EXPR (new_var
, x
);
12966 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12970 case OMP_CLAUSE_USE_DEVICE_PTR
:
12971 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12972 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12973 case OMP_CLAUSE_IS_DEVICE_PTR
:
12974 var
= OMP_CLAUSE_DECL (c
);
12975 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12977 while (TREE_CODE (var
) == INDIRECT_REF
12978 || TREE_CODE (var
) == ARRAY_REF
)
12979 var
= TREE_OPERAND (var
, 0);
12980 if (lang_hooks
.decls
.omp_array_data (var
, true))
12981 goto omp_firstprivate_recv
;
12984 if (is_variable_sized (var
))
12986 tree new_var
= lookup_decl (var
, ctx
);
12987 tree pvar
= DECL_VALUE_EXPR (var
);
12988 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12989 pvar
= TREE_OPERAND (pvar
, 0);
12990 gcc_assert (DECL_P (pvar
));
12991 tree new_pvar
= lookup_decl (pvar
, ctx
);
12992 x
= build_fold_indirect_ref (new_pvar
);
12993 TREE_THIS_NOTRAP (x
) = 1;
12994 SET_DECL_VALUE_EXPR (new_var
, x
);
12995 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12997 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12998 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12999 && !omp_privatize_by_reference (var
)
13000 && !omp_is_allocatable_or_ptr (var
)
13001 && !lang_hooks
.decls
.omp_array_data (var
, true))
13002 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13004 tree new_var
= lookup_decl (var
, ctx
);
13005 tree type
= build_pointer_type (TREE_TYPE (var
));
13006 x
= create_tmp_var_raw (type
, get_name (new_var
));
13007 gimple_add_tmp_var (x
);
13008 x
= build_simple_mem_ref (x
);
13009 SET_DECL_VALUE_EXPR (new_var
, x
);
13010 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13014 tree new_var
= lookup_decl (var
, ctx
);
13015 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
13016 gimple_add_tmp_var (x
);
13017 SET_DECL_VALUE_EXPR (new_var
, x
);
13018 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13025 target_nesting_level
++;
13026 lower_omp (&tgt_body
, ctx
);
13027 target_nesting_level
--;
13029 else if (data_region
)
13030 lower_omp (&tgt_body
, ctx
);
13034 /* Declare all the variables created by mapping and the variables
13035 declared in the scope of the target body. */
13036 record_vars_into (ctx
->block_vars
, child_fn
);
13037 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
13038 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
13043 if (ctx
->record_type
)
13046 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
13047 DECL_NAMELESS (ctx
->sender_decl
) = 1;
13048 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
13049 t
= make_tree_vec (3);
13050 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
13051 TREE_VEC_ELT (t
, 1)
13052 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
13053 ".omp_data_sizes");
13054 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
13055 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
13056 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
13057 tree tkind_type
= short_unsigned_type_node
;
13058 int talign_shift
= 8;
13059 TREE_VEC_ELT (t
, 2)
13060 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
13061 ".omp_data_kinds");
13062 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
13063 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
13064 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
13065 gimple_omp_target_set_data_arg (stmt
, t
);
13067 vec
<constructor_elt
, va_gc
> *vsize
;
13068 vec
<constructor_elt
, va_gc
> *vkind
;
13069 vec_alloc (vsize
, map_cnt
);
13070 vec_alloc (vkind
, map_cnt
);
13071 unsigned int map_idx
= 0;
13073 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13074 switch (OMP_CLAUSE_CODE (c
))
13076 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
13077 unsigned int talign
;
13082 case OMP_CLAUSE_MAP
:
13083 case OMP_CLAUSE_TO
:
13084 case OMP_CLAUSE_FROM
:
13085 oacc_firstprivate_map
:
13087 ovar
= OMP_CLAUSE_DECL (c
);
13088 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13089 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13090 || (OMP_CLAUSE_MAP_KIND (c
)
13091 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13093 if (!DECL_P (ovar
))
13095 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13096 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
13098 nc
= OMP_CLAUSE_CHAIN (c
);
13099 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
13100 == get_base_address (ovar
));
13101 ovar
= OMP_CLAUSE_DECL (nc
);
13105 tree x
= build_sender_ref (ovar
, ctx
);
13107 if (in_reduction_clauses
13108 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13109 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13111 v
= unshare_expr (v
);
13113 while (handled_component_p (*p
)
13114 || TREE_CODE (*p
) == INDIRECT_REF
13115 || TREE_CODE (*p
) == ADDR_EXPR
13116 || TREE_CODE (*p
) == MEM_REF
13117 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
13118 p
= &TREE_OPERAND (*p
, 0);
13120 if (is_variable_sized (d
))
13122 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13123 d
= DECL_VALUE_EXPR (d
);
13124 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13125 d
= TREE_OPERAND (d
, 0);
13126 gcc_assert (DECL_P (d
));
13129 = (splay_tree_key
) &DECL_CONTEXT (d
);
13130 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13135 *p
= build_fold_indirect_ref (nd
);
13137 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
13138 gimplify_assign (x
, v
, &ilist
);
13144 if (DECL_SIZE (ovar
)
13145 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
13147 tree ovar2
= DECL_VALUE_EXPR (ovar
);
13148 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
13149 ovar2
= TREE_OPERAND (ovar2
, 0);
13150 gcc_assert (DECL_P (ovar2
));
13153 if (!maybe_lookup_field (ovar
, ctx
)
13154 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13155 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13156 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13160 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13161 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13162 talign
= DECL_ALIGN_UNIT (ovar
);
13167 if (in_reduction_clauses
13168 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13169 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13172 if (is_variable_sized (d
))
13174 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13175 d
= DECL_VALUE_EXPR (d
);
13176 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13177 d
= TREE_OPERAND (d
, 0);
13178 gcc_assert (DECL_P (d
));
13181 = (splay_tree_key
) &DECL_CONTEXT (d
);
13182 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13187 var
= build_fold_indirect_ref (nd
);
13190 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13193 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13194 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13195 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13196 && is_omp_target (stmt
))
13198 x
= build_sender_ref (c
, ctx
);
13199 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13203 x
= build_sender_ref (ovar
, ctx
);
13205 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13206 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13207 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13208 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13210 gcc_assert (offloaded
);
13212 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13213 mark_addressable (avar
);
13214 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13215 talign
= DECL_ALIGN_UNIT (avar
);
13216 avar
= build_fold_addr_expr (avar
);
13217 gimplify_assign (x
, avar
, &ilist
);
13219 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13221 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13222 if (!omp_privatize_by_reference (var
))
13224 if (is_gimple_reg (var
)
13225 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13226 suppress_warning (var
);
13227 var
= build_fold_addr_expr (var
);
13230 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13231 gimplify_assign (x
, var
, &ilist
);
13233 else if (is_gimple_reg (var
))
13235 gcc_assert (offloaded
);
13236 tree avar
= create_tmp_var (TREE_TYPE (var
));
13237 mark_addressable (avar
);
13238 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13239 if (GOMP_MAP_COPY_TO_P (map_kind
)
13240 || map_kind
== GOMP_MAP_POINTER
13241 || map_kind
== GOMP_MAP_TO_PSET
13242 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13244 /* If we need to initialize a temporary
13245 with VAR because it is not addressable, and
13246 the variable hasn't been initialized yet, then
13247 we'll get a warning for the store to avar.
13248 Don't warn in that case, the mapping might
13250 suppress_warning (var
, OPT_Wuninitialized
);
13251 gimplify_assign (avar
, var
, &ilist
);
13253 avar
= build_fold_addr_expr (avar
);
13254 gimplify_assign (x
, avar
, &ilist
);
13255 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13256 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13257 && !TYPE_READONLY (TREE_TYPE (var
)))
13259 x
= unshare_expr (x
);
13260 x
= build_simple_mem_ref (x
);
13261 gimplify_assign (var
, x
, &olist
);
13266 /* While MAP is handled explicitly by the FE,
13267 for 'target update', only the identified is passed. */
13268 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13269 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13270 && (omp_is_allocatable_or_ptr (var
)
13271 && omp_check_optional_argument (var
, false)))
13272 var
= build_fold_indirect_ref (var
);
13273 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13274 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13275 || (!omp_is_allocatable_or_ptr (var
)
13276 && !omp_check_optional_argument (var
, false)))
13277 var
= build_fold_addr_expr (var
);
13278 gimplify_assign (x
, var
, &ilist
);
13282 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13284 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13285 s
= TREE_TYPE (ovar
);
13286 if (TREE_CODE (s
) == REFERENCE_TYPE
13287 || omp_check_optional_argument (ovar
, false))
13289 s
= TYPE_SIZE_UNIT (s
);
13292 s
= OMP_CLAUSE_SIZE (c
);
13293 if (s
== NULL_TREE
)
13294 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13295 s
= fold_convert (size_type_node
, s
);
13296 purpose
= size_int (map_idx
++);
13297 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13298 if (TREE_CODE (s
) != INTEGER_CST
)
13299 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13301 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13302 switch (OMP_CLAUSE_CODE (c
))
13304 case OMP_CLAUSE_MAP
:
13305 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13306 tkind_zero
= tkind
;
13307 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13310 case GOMP_MAP_ALLOC
:
13311 case GOMP_MAP_IF_PRESENT
:
13313 case GOMP_MAP_FROM
:
13314 case GOMP_MAP_TOFROM
:
13315 case GOMP_MAP_ALWAYS_TO
:
13316 case GOMP_MAP_ALWAYS_FROM
:
13317 case GOMP_MAP_ALWAYS_TOFROM
:
13318 case GOMP_MAP_RELEASE
:
13319 case GOMP_MAP_FORCE_TO
:
13320 case GOMP_MAP_FORCE_FROM
:
13321 case GOMP_MAP_FORCE_TOFROM
:
13322 case GOMP_MAP_FORCE_PRESENT
:
13323 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13325 case GOMP_MAP_DELETE
:
13326 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13330 if (tkind_zero
!= tkind
)
13332 if (integer_zerop (s
))
13333 tkind
= tkind_zero
;
13334 else if (integer_nonzerop (s
))
13335 tkind_zero
= tkind
;
13337 if (tkind_zero
== tkind
13338 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13339 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13340 & ~GOMP_MAP_IMPLICIT
)
13343 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13344 bits are not interfered by other special bit encodings,
13345 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13347 tkind
|= GOMP_MAP_IMPLICIT
;
13348 tkind_zero
= tkind
;
13351 case OMP_CLAUSE_FIRSTPRIVATE
:
13352 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13353 tkind
= GOMP_MAP_TO
;
13354 tkind_zero
= tkind
;
13356 case OMP_CLAUSE_TO
:
13357 tkind
= GOMP_MAP_TO
;
13358 tkind_zero
= tkind
;
13360 case OMP_CLAUSE_FROM
:
13361 tkind
= GOMP_MAP_FROM
;
13362 tkind_zero
= tkind
;
13365 gcc_unreachable ();
13367 gcc_checking_assert (tkind
13368 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13369 gcc_checking_assert (tkind_zero
13370 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13371 talign
= ceil_log2 (talign
);
13372 tkind
|= talign
<< talign_shift
;
13373 tkind_zero
|= talign
<< talign_shift
;
13374 gcc_checking_assert (tkind
13375 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13376 gcc_checking_assert (tkind_zero
13377 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13378 if (tkind
== tkind_zero
)
13379 x
= build_int_cstu (tkind_type
, tkind
);
13382 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13383 x
= build3 (COND_EXPR
, tkind_type
,
13384 fold_build2 (EQ_EXPR
, boolean_type_node
,
13385 unshare_expr (s
), size_zero_node
),
13386 build_int_cstu (tkind_type
, tkind_zero
),
13387 build_int_cstu (tkind_type
, tkind
));
13389 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13394 case OMP_CLAUSE_FIRSTPRIVATE
:
13395 omp_has_device_addr_descr
:
13396 if (is_gimple_omp_oacc (ctx
->stmt
))
13397 goto oacc_firstprivate_map
;
13398 ovar
= OMP_CLAUSE_DECL (c
);
13399 if (omp_privatize_by_reference (ovar
))
13400 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13402 talign
= DECL_ALIGN_UNIT (ovar
);
13403 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13404 x
= build_sender_ref (ovar
, ctx
);
13405 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13406 type
= TREE_TYPE (ovar
);
13407 if (omp_privatize_by_reference (ovar
))
13408 type
= TREE_TYPE (type
);
13409 if ((INTEGRAL_TYPE_P (type
)
13410 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13411 || TREE_CODE (type
) == POINTER_TYPE
)
13413 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13415 if (omp_privatize_by_reference (var
))
13416 t
= build_simple_mem_ref (var
);
13417 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13418 suppress_warning (var
);
13419 if (TREE_CODE (type
) != POINTER_TYPE
)
13420 t
= fold_convert (pointer_sized_int_node
, t
);
13421 t
= fold_convert (TREE_TYPE (x
), t
);
13422 gimplify_assign (x
, t
, &ilist
);
13424 else if (omp_privatize_by_reference (var
))
13425 gimplify_assign (x
, var
, &ilist
);
13426 else if (is_gimple_reg (var
))
13428 tree avar
= create_tmp_var (TREE_TYPE (var
));
13429 mark_addressable (avar
);
13430 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13431 suppress_warning (var
);
13432 gimplify_assign (avar
, var
, &ilist
);
13433 avar
= build_fold_addr_expr (avar
);
13434 gimplify_assign (x
, avar
, &ilist
);
13438 var
= build_fold_addr_expr (var
);
13439 gimplify_assign (x
, var
, &ilist
);
13441 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13443 else if (omp_privatize_by_reference (ovar
))
13444 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13446 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13447 s
= fold_convert (size_type_node
, s
);
13448 purpose
= size_int (map_idx
++);
13449 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13450 if (TREE_CODE (s
) != INTEGER_CST
)
13451 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13453 gcc_checking_assert (tkind
13454 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13455 talign
= ceil_log2 (talign
);
13456 tkind
|= talign
<< talign_shift
;
13457 gcc_checking_assert (tkind
13458 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13459 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13460 build_int_cstu (tkind_type
, tkind
));
13461 /* Fortran array descriptors: firstprivate of data + attach. */
13462 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13463 && lang_hooks
.decls
.omp_array_data (ovar
, true))
13465 tree not_null_lb
, null_lb
, after_lb
;
13466 tree var1
, var2
, size1
, size2
;
13467 tree present
= omp_check_optional_argument (ovar
, true);
13470 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13471 not_null_lb
= create_artificial_label (clause_loc
);
13472 null_lb
= create_artificial_label (clause_loc
);
13473 after_lb
= create_artificial_label (clause_loc
);
13474 gimple_seq seq
= NULL
;
13475 present
= force_gimple_operand (present
, &seq
, true,
13477 gimple_seq_add_seq (&ilist
, seq
);
13478 gimple_seq_add_stmt (&ilist
,
13479 gimple_build_cond_from_tree (present
,
13480 not_null_lb
, null_lb
));
13481 gimple_seq_add_stmt (&ilist
,
13482 gimple_build_label (not_null_lb
));
13484 var1
= lang_hooks
.decls
.omp_array_data (var
, false);
13485 size1
= lang_hooks
.decls
.omp_array_size (var
, &ilist
);
13486 var2
= build_fold_addr_expr (x
);
13487 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
13488 var
= build_fold_addr_expr (var
);
13489 size2
= fold_build2 (POINTER_DIFF_EXPR
, ssizetype
,
13490 build_fold_addr_expr (var1
), var
);
13491 size2
= fold_convert (sizetype
, size2
);
13494 tree tmp
= create_tmp_var (TREE_TYPE (var1
));
13495 gimplify_assign (tmp
, var1
, &ilist
);
13497 tmp
= create_tmp_var (TREE_TYPE (var2
));
13498 gimplify_assign (tmp
, var2
, &ilist
);
13500 tmp
= create_tmp_var (TREE_TYPE (size1
));
13501 gimplify_assign (tmp
, size1
, &ilist
);
13503 tmp
= create_tmp_var (TREE_TYPE (size2
));
13504 gimplify_assign (tmp
, size2
, &ilist
);
13506 gimple_seq_add_stmt (&ilist
, gimple_build_goto (after_lb
));
13507 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_lb
));
13508 gimplify_assign (var1
, null_pointer_node
, &ilist
);
13509 gimplify_assign (var2
, null_pointer_node
, &ilist
);
13510 gimplify_assign (size1
, size_zero_node
, &ilist
);
13511 gimplify_assign (size2
, size_zero_node
, &ilist
);
13512 gimple_seq_add_stmt (&ilist
, gimple_build_label (after_lb
));
13514 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13515 gimplify_assign (x
, var1
, &ilist
);
13516 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13517 talign
= DECL_ALIGN_UNIT (ovar
);
13518 talign
= ceil_log2 (talign
);
13519 tkind
|= talign
<< talign_shift
;
13520 gcc_checking_assert (tkind
13522 TYPE_MAX_VALUE (tkind_type
)));
13523 purpose
= size_int (map_idx
++);
13524 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size1
);
13525 if (TREE_CODE (size1
) != INTEGER_CST
)
13526 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13527 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13528 build_int_cstu (tkind_type
, tkind
));
13529 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13530 gimplify_assign (x
, var2
, &ilist
);
13531 tkind
= GOMP_MAP_ATTACH
;
13532 purpose
= size_int (map_idx
++);
13533 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size2
);
13534 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13535 build_int_cstu (tkind_type
, tkind
));
13539 case OMP_CLAUSE_USE_DEVICE_PTR
:
13540 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13541 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13542 case OMP_CLAUSE_IS_DEVICE_PTR
:
13543 ovar
= OMP_CLAUSE_DECL (c
);
13544 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13546 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13547 goto omp_has_device_addr_descr
;
13548 while (TREE_CODE (ovar
) == INDIRECT_REF
13549 || TREE_CODE (ovar
) == ARRAY_REF
)
13550 ovar
= TREE_OPERAND (ovar
, 0);
13552 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13554 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13556 tkind
= ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13557 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13558 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13559 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13561 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13562 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13564 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13565 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13569 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13570 x
= build_sender_ref (ovar
, ctx
);
13573 if (is_gimple_omp_oacc (ctx
->stmt
))
13575 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13577 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13578 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13581 type
= TREE_TYPE (ovar
);
13582 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13583 var
= lang_hooks
.decls
.omp_array_data (var
, false);
13584 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13585 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13586 && !omp_privatize_by_reference (ovar
)
13587 && !omp_is_allocatable_or_ptr (ovar
))
13588 || TREE_CODE (type
) == ARRAY_TYPE
)
13589 var
= build_fold_addr_expr (var
);
13592 if (omp_privatize_by_reference (ovar
)
13593 || omp_check_optional_argument (ovar
, false)
13594 || omp_is_allocatable_or_ptr (ovar
))
13596 type
= TREE_TYPE (type
);
13597 if (POINTER_TYPE_P (type
)
13598 && TREE_CODE (type
) != ARRAY_TYPE
13599 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13600 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13601 && !omp_is_allocatable_or_ptr (ovar
))
13602 || (omp_privatize_by_reference (ovar
)
13603 && omp_is_allocatable_or_ptr (ovar
))))
13604 var
= build_simple_mem_ref (var
);
13605 var
= fold_convert (TREE_TYPE (x
), var
);
13609 present
= omp_check_optional_argument (ovar
, true);
13612 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13613 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13614 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13615 tree new_x
= unshare_expr (x
);
13616 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13618 gcond
*cond
= gimple_build_cond_from_tree (present
,
13621 gimple_seq_add_stmt (&ilist
, cond
);
13622 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13623 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13624 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13625 gimple_seq_add_stmt (&ilist
,
13626 gimple_build_label (notnull_label
));
13627 gimplify_assign (x
, var
, &ilist
);
13628 gimple_seq_add_stmt (&ilist
,
13629 gimple_build_label (opt_arg_label
));
13632 gimplify_assign (x
, var
, &ilist
);
13634 purpose
= size_int (map_idx
++);
13635 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13636 gcc_checking_assert (tkind
13637 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13638 gcc_checking_assert (tkind
13639 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13640 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13641 build_int_cstu (tkind_type
, tkind
));
13645 gcc_assert (map_idx
== map_cnt
);
13647 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13648 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13649 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13650 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13651 for (int i
= 1; i
<= 2; i
++)
13652 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13654 gimple_seq initlist
= NULL
;
13655 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13656 TREE_VEC_ELT (t
, i
)),
13657 &initlist
, true, NULL_TREE
);
13658 gimple_seq_add_seq (&ilist
, initlist
);
13660 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13661 gimple_seq_add_stmt (&olist
,
13662 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13665 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13667 tree id
= get_identifier ("omp declare target");
13668 tree decl
= TREE_VEC_ELT (t
, i
);
13669 DECL_ATTRIBUTES (decl
)
13670 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13671 varpool_node
*node
= varpool_node::get (decl
);
13674 node
->offloadable
= 1;
13675 if (ENABLE_OFFLOADING
)
13677 g
->have_offload
= true;
13678 vec_safe_push (offload_vars
, t
);
13683 tree clobber
= build_clobber (ctx
->record_type
);
13684 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13688 /* Once all the expansions are done, sequence all the different
13689 fragments inside gimple_omp_body. */
13694 && ctx
->record_type
)
13696 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13697 /* fixup_child_record_type might have changed receiver_decl's type. */
13698 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13699 gimple_seq_add_stmt (&new_body
,
13700 gimple_build_assign (ctx
->receiver_decl
, t
));
13702 gimple_seq_add_seq (&new_body
, fplist
);
13704 if (offloaded
|| data_region
)
13706 tree prev
= NULL_TREE
;
13707 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13708 switch (OMP_CLAUSE_CODE (c
))
13713 case OMP_CLAUSE_FIRSTPRIVATE
:
13714 omp_firstprivatize_data_region
:
13715 if (is_gimple_omp_oacc (ctx
->stmt
))
13717 var
= OMP_CLAUSE_DECL (c
);
13718 if (omp_privatize_by_reference (var
)
13719 || is_gimple_reg_type (TREE_TYPE (var
)))
13721 tree new_var
= lookup_decl (var
, ctx
);
13723 type
= TREE_TYPE (var
);
13724 if (omp_privatize_by_reference (var
))
13725 type
= TREE_TYPE (type
);
13726 if ((INTEGRAL_TYPE_P (type
)
13727 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13728 || TREE_CODE (type
) == POINTER_TYPE
)
13730 x
= build_receiver_ref (var
, false, ctx
);
13731 if (TREE_CODE (type
) != POINTER_TYPE
)
13732 x
= fold_convert (pointer_sized_int_node
, x
);
13733 x
= fold_convert (type
, x
);
13734 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13736 if (omp_privatize_by_reference (var
))
13738 tree v
= create_tmp_var_raw (type
, get_name (var
));
13739 gimple_add_tmp_var (v
);
13740 TREE_ADDRESSABLE (v
) = 1;
13741 gimple_seq_add_stmt (&new_body
,
13742 gimple_build_assign (v
, x
));
13743 x
= build_fold_addr_expr (v
);
13745 gimple_seq_add_stmt (&new_body
,
13746 gimple_build_assign (new_var
, x
));
13750 bool by_ref
= !omp_privatize_by_reference (var
);
13751 x
= build_receiver_ref (var
, by_ref
, ctx
);
13752 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13754 gimple_seq_add_stmt (&new_body
,
13755 gimple_build_assign (new_var
, x
));
13758 else if (is_variable_sized (var
))
13760 tree pvar
= DECL_VALUE_EXPR (var
);
13761 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13762 pvar
= TREE_OPERAND (pvar
, 0);
13763 gcc_assert (DECL_P (pvar
));
13764 tree new_var
= lookup_decl (pvar
, ctx
);
13765 x
= build_receiver_ref (var
, false, ctx
);
13766 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13767 gimple_seq_add_stmt (&new_body
,
13768 gimple_build_assign (new_var
, x
));
13771 case OMP_CLAUSE_PRIVATE
:
13772 if (is_gimple_omp_oacc (ctx
->stmt
))
13774 var
= OMP_CLAUSE_DECL (c
);
13775 if (omp_privatize_by_reference (var
))
13777 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13778 tree new_var
= lookup_decl (var
, ctx
);
13779 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13780 if (TREE_CONSTANT (x
))
13782 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13784 gimple_add_tmp_var (x
);
13785 TREE_ADDRESSABLE (x
) = 1;
13786 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13791 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13792 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13793 gimple_seq_add_stmt (&new_body
,
13794 gimple_build_assign (new_var
, x
));
13797 case OMP_CLAUSE_USE_DEVICE_PTR
:
13798 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13799 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13800 case OMP_CLAUSE_IS_DEVICE_PTR
:
13802 gimple_seq assign_body
;
13803 bool is_array_data
;
13804 bool do_optional_check
;
13805 assign_body
= NULL
;
13806 do_optional_check
= false;
13807 var
= OMP_CLAUSE_DECL (c
);
13808 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13809 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
&& is_array_data
)
13810 goto omp_firstprivatize_data_region
;
13812 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13813 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13814 x
= build_sender_ref (is_array_data
13815 ? (splay_tree_key
) &DECL_NAME (var
)
13816 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13819 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13821 while (TREE_CODE (var
) == INDIRECT_REF
13822 || TREE_CODE (var
) == ARRAY_REF
)
13823 var
= TREE_OPERAND (var
, 0);
13825 x
= build_receiver_ref (var
, false, ctx
);
13830 bool is_ref
= omp_privatize_by_reference (var
);
13831 do_optional_check
= true;
13832 /* First, we copy the descriptor data from the host; then
13833 we update its data to point to the target address. */
13834 new_var
= lookup_decl (var
, ctx
);
13835 new_var
= DECL_VALUE_EXPR (new_var
);
13838 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
13839 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13840 v2
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
13844 v2
= build_fold_indirect_ref (v2
);
13845 v
= create_tmp_var_raw (TREE_TYPE (v2
), get_name (var
));
13846 gimple_add_tmp_var (v
);
13847 TREE_ADDRESSABLE (v
) = 1;
13848 gimplify_assign (v
, v2
, &assign_body
);
13849 tree rhs
= build_fold_addr_expr (v
);
13850 gimple_seq_add_stmt (&assign_body
,
13851 gimple_build_assign (new_var
, rhs
));
13854 gimplify_assign (new_var
, v2
, &assign_body
);
13856 v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13858 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13859 gimple_seq_add_stmt (&assign_body
,
13860 gimple_build_assign (v2
, x
));
13862 else if (is_variable_sized (var
))
13864 tree pvar
= DECL_VALUE_EXPR (var
);
13865 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13866 pvar
= TREE_OPERAND (pvar
, 0);
13867 gcc_assert (DECL_P (pvar
));
13868 new_var
= lookup_decl (pvar
, ctx
);
13869 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13870 gimple_seq_add_stmt (&assign_body
,
13871 gimple_build_assign (new_var
, x
));
13873 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13874 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13875 && !omp_privatize_by_reference (var
)
13876 && !omp_is_allocatable_or_ptr (var
))
13877 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13879 new_var
= lookup_decl (var
, ctx
);
13880 new_var
= DECL_VALUE_EXPR (new_var
);
13881 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13882 new_var
= TREE_OPERAND (new_var
, 0);
13883 gcc_assert (DECL_P (new_var
));
13884 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13885 gimple_seq_add_stmt (&assign_body
,
13886 gimple_build_assign (new_var
, x
));
13890 tree type
= TREE_TYPE (var
);
13891 new_var
= lookup_decl (var
, ctx
);
13892 if (omp_privatize_by_reference (var
))
13894 type
= TREE_TYPE (type
);
13895 if (POINTER_TYPE_P (type
)
13896 && TREE_CODE (type
) != ARRAY_TYPE
13897 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13898 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13899 || (omp_privatize_by_reference (var
)
13900 && omp_is_allocatable_or_ptr (var
))))
13902 tree v
= create_tmp_var_raw (type
, get_name (var
));
13903 gimple_add_tmp_var (v
);
13904 TREE_ADDRESSABLE (v
) = 1;
13905 x
= fold_convert (type
, x
);
13906 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13908 gimple_seq_add_stmt (&assign_body
,
13909 gimple_build_assign (v
, x
));
13910 x
= build_fold_addr_expr (v
);
13911 do_optional_check
= true;
13914 new_var
= DECL_VALUE_EXPR (new_var
);
13915 x
= fold_convert (TREE_TYPE (new_var
), x
);
13916 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13917 gimple_seq_add_stmt (&assign_body
,
13918 gimple_build_assign (new_var
, x
));
13921 present
= ((do_optional_check
13922 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13923 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13927 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13928 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13929 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13930 glabel
*null_glabel
= gimple_build_label (null_label
);
13931 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13932 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13933 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13935 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13937 gcond
*cond
= gimple_build_cond_from_tree (present
,
13940 gimple_seq_add_stmt (&new_body
, cond
);
13941 gimple_seq_add_stmt (&new_body
, null_glabel
);
13942 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13943 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13944 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13945 gimple_seq_add_seq (&new_body
, assign_body
);
13946 gimple_seq_add_stmt (&new_body
,
13947 gimple_build_label (opt_arg_label
));
13950 gimple_seq_add_seq (&new_body
, assign_body
);
13953 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13954 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13955 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13956 or references to VLAs. */
13957 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13958 switch (OMP_CLAUSE_CODE (c
))
13963 case OMP_CLAUSE_MAP
:
13964 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13965 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13967 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13968 poly_int64 offset
= 0;
13970 var
= OMP_CLAUSE_DECL (c
);
13972 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13973 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13975 && varpool_node::get_create (var
)->offloadable
)
13977 if (TREE_CODE (var
) == INDIRECT_REF
13978 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13979 var
= TREE_OPERAND (var
, 0);
13980 if (TREE_CODE (var
) == COMPONENT_REF
)
13982 var
= get_addr_base_and_unit_offset (var
, &offset
);
13983 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13985 else if (DECL_SIZE (var
)
13986 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13988 tree var2
= DECL_VALUE_EXPR (var
);
13989 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13990 var2
= TREE_OPERAND (var2
, 0);
13991 gcc_assert (DECL_P (var2
));
13994 tree new_var
= lookup_decl (var
, ctx
), x
;
13995 tree type
= TREE_TYPE (new_var
);
13997 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13998 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
14001 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
14003 new_var
= build2 (MEM_REF
, type
,
14004 build_fold_addr_expr (new_var
),
14005 build_int_cst (build_pointer_type (type
),
14008 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
14010 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
14011 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
14012 new_var
= build2 (MEM_REF
, type
,
14013 build_fold_addr_expr (new_var
),
14014 build_int_cst (build_pointer_type (type
),
14018 is_ref
= omp_privatize_by_reference (var
);
14019 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14021 bool ref_to_array
= false;
14022 bool ref_to_ptr
= false;
14025 type
= TREE_TYPE (type
);
14026 if (TREE_CODE (type
) == ARRAY_TYPE
)
14028 type
= build_pointer_type (type
);
14029 ref_to_array
= true;
14032 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14034 tree decl2
= DECL_VALUE_EXPR (new_var
);
14035 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
14036 decl2
= TREE_OPERAND (decl2
, 0);
14037 gcc_assert (DECL_P (decl2
));
14039 type
= TREE_TYPE (new_var
);
14041 else if (TREE_CODE (type
) == REFERENCE_TYPE
14042 && TREE_CODE (TREE_TYPE (type
)) == POINTER_TYPE
)
14044 type
= TREE_TYPE (type
);
14047 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
14048 x
= fold_convert_loc (clause_loc
, type
, x
);
14049 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
14051 tree bias
= OMP_CLAUSE_SIZE (c
);
14053 bias
= lookup_decl (bias
, ctx
);
14054 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
14055 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
14057 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
14058 TREE_TYPE (x
), x
, bias
);
14061 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14062 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14063 if ((is_ref
&& !ref_to_array
)
14066 tree t
= create_tmp_var_raw (type
, get_name (var
));
14067 gimple_add_tmp_var (t
);
14068 TREE_ADDRESSABLE (t
) = 1;
14069 gimple_seq_add_stmt (&new_body
,
14070 gimple_build_assign (t
, x
));
14071 x
= build_fold_addr_expr_loc (clause_loc
, t
);
14073 gimple_seq_add_stmt (&new_body
,
14074 gimple_build_assign (new_var
, x
));
14077 else if (OMP_CLAUSE_CHAIN (c
)
14078 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
14080 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14081 == GOMP_MAP_FIRSTPRIVATE_POINTER
14082 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14083 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
14086 case OMP_CLAUSE_PRIVATE
:
14087 var
= OMP_CLAUSE_DECL (c
);
14088 if (is_variable_sized (var
))
14090 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14091 tree new_var
= lookup_decl (var
, ctx
);
14092 tree pvar
= DECL_VALUE_EXPR (var
);
14093 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
14094 pvar
= TREE_OPERAND (pvar
, 0);
14095 gcc_assert (DECL_P (pvar
));
14096 tree new_pvar
= lookup_decl (pvar
, ctx
);
14097 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14098 tree al
= size_int (DECL_ALIGN (var
));
14099 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
14100 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14101 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
14102 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14103 gimple_seq_add_stmt (&new_body
,
14104 gimple_build_assign (new_pvar
, x
));
14106 else if (omp_privatize_by_reference (var
)
14107 && !is_gimple_omp_oacc (ctx
->stmt
))
14109 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14110 tree new_var
= lookup_decl (var
, ctx
);
14111 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
14112 if (TREE_CONSTANT (x
))
14117 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14118 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
14119 tree al
= size_int (TYPE_ALIGN (rtype
));
14120 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14123 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14124 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14125 gimple_seq_add_stmt (&new_body
,
14126 gimple_build_assign (new_var
, x
));
14131 gimple_seq fork_seq
= NULL
;
14132 gimple_seq join_seq
= NULL
;
14134 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
14136 /* If there are reductions on the offloaded region itself, treat
14137 them as a dummy GANG loop. */
14138 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
14140 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
14142 if (private_marker
)
14143 gimple_call_set_arg (private_marker
, 2, level
);
14145 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
14146 false, NULL
, private_marker
, NULL
, &fork_seq
,
14150 gimple_seq_add_seq (&new_body
, fork_seq
);
14151 gimple_seq_add_seq (&new_body
, tgt_body
);
14152 gimple_seq_add_seq (&new_body
, join_seq
);
14156 new_body
= maybe_catch_exception (new_body
);
14157 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
14159 gimple_omp_set_body (stmt
, new_body
);
14162 bind
= gimple_build_bind (NULL
, NULL
,
14163 tgt_bind
? gimple_bind_block (tgt_bind
)
14165 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
14166 gimple_bind_add_seq (bind
, ilist
);
14167 gimple_bind_add_stmt (bind
, stmt
);
14168 gimple_bind_add_seq (bind
, olist
);
14170 pop_gimplify_context (NULL
);
14174 gimple_bind_add_seq (dep_bind
, dep_ilist
);
14175 gimple_bind_add_stmt (dep_bind
, bind
);
14176 gimple_bind_add_seq (dep_bind
, dep_olist
);
14177 pop_gimplify_context (dep_bind
);
14181 /* Expand code for an OpenMP teams directive. */
14184 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14186 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
14187 push_gimplify_context ();
14189 tree block
= make_node (BLOCK
);
14190 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
14191 gsi_replace (gsi_p
, bind
, true);
14192 gimple_seq bind_body
= NULL
;
14193 gimple_seq dlist
= NULL
;
14194 gimple_seq olist
= NULL
;
14196 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14197 OMP_CLAUSE_NUM_TEAMS
);
14198 tree num_teams_lower
= NULL_TREE
;
14199 if (num_teams
== NULL_TREE
)
14200 num_teams
= build_int_cst (unsigned_type_node
, 0);
14203 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
14204 if (num_teams_lower
)
14206 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
14207 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
14210 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
14211 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
14212 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
14214 if (num_teams_lower
== NULL_TREE
)
14215 num_teams_lower
= num_teams
;
14216 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14217 OMP_CLAUSE_THREAD_LIMIT
);
14218 if (thread_limit
== NULL_TREE
)
14219 thread_limit
= build_int_cst (unsigned_type_node
, 0);
14222 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
14223 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
14224 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
14227 location_t loc
= gimple_location (teams_stmt
);
14228 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
14229 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
14230 tree first
= create_tmp_var (rettype
);
14231 gimple_seq_add_stmt (&bind_body
,
14232 gimple_build_assign (first
, build_one_cst (rettype
)));
14233 tree llabel
= create_artificial_label (loc
);
14234 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
14236 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
14238 gimple_set_location (call
, loc
);
14239 tree temp
= create_tmp_var (rettype
);
14240 gimple_call_set_lhs (call
, temp
);
14241 gimple_seq_add_stmt (&bind_body
, call
);
14243 tree tlabel
= create_artificial_label (loc
);
14244 tree flabel
= create_artificial_label (loc
);
14245 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
14247 gimple_seq_add_stmt (&bind_body
, cond
);
14248 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
14249 gimple_seq_add_stmt (&bind_body
,
14250 gimple_build_assign (first
, build_zero_cst (rettype
)));
14252 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
14253 &bind_body
, &dlist
, ctx
, NULL
);
14254 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
14255 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
14257 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
14259 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
14260 gimple_omp_set_body (teams_stmt
, NULL
);
14261 gimple_seq_add_seq (&bind_body
, olist
);
14262 gimple_seq_add_seq (&bind_body
, dlist
);
14263 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
14264 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
14265 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
14266 gimple_bind_set_body (bind
, bind_body
);
14268 pop_gimplify_context (bind
);
14270 gimple_bind_append_vars (bind
, ctx
->block_vars
);
14271 BLOCK_VARS (block
) = ctx
->block_vars
;
14272 if (BLOCK_VARS (block
))
14273 TREE_USED (block
) = 1;
14276 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14277 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14278 of OMP context, but with make_addressable_vars set. */
14281 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14286 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14287 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14289 && DECL_HAS_VALUE_EXPR_P (t
))
14292 if (make_addressable_vars
14294 && bitmap_bit_p (make_addressable_vars
, DECL_UID (t
)))
14297 /* If a global variable has been privatized, TREE_CONSTANT on
14298 ADDR_EXPR might be wrong. */
14299 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14300 recompute_tree_invariant_for_addr_expr (t
);
14302 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14306 /* Data to be communicated between lower_omp_regimplify_operands and
14307 lower_omp_regimplify_operands_p. */
14309 struct lower_omp_regimplify_operands_data
14315 /* Helper function for lower_omp_regimplify_operands. Find
14316 omp_member_access_dummy_var vars and adjust temporarily their
14317 DECL_VALUE_EXPRs if needed. */
14320 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14323 tree t
= omp_member_access_dummy_var (*tp
);
14326 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14327 lower_omp_regimplify_operands_data
*ldata
14328 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14329 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14332 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14333 ldata
->decls
->safe_push (*tp
);
14334 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14335 SET_DECL_VALUE_EXPR (*tp
, v
);
14338 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14342 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14343 of omp_member_access_dummy_var vars during regimplification. */
14346 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14347 gimple_stmt_iterator
*gsi_p
)
14349 auto_vec
<tree
, 10> decls
;
14352 struct walk_stmt_info wi
;
14353 memset (&wi
, '\0', sizeof (wi
));
14354 struct lower_omp_regimplify_operands_data data
;
14356 data
.decls
= &decls
;
14358 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14360 gimple_regimplify_operands (stmt
, gsi_p
);
14361 while (!decls
.is_empty ())
14363 tree t
= decls
.pop ();
14364 tree v
= decls
.pop ();
14365 SET_DECL_VALUE_EXPR (t
, v
);
14370 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14372 gimple
*stmt
= gsi_stmt (*gsi_p
);
14373 struct walk_stmt_info wi
;
14376 if (gimple_has_location (stmt
))
14377 input_location
= gimple_location (stmt
);
14379 if (make_addressable_vars
)
14380 memset (&wi
, '\0', sizeof (wi
));
14382 /* If we have issued syntax errors, avoid doing any heavy lifting.
14383 Just replace the OMP directives with a NOP to avoid
14384 confusing RTL expansion. */
14385 if (seen_error () && is_gimple_omp (stmt
))
14387 gsi_replace (gsi_p
, gimple_build_nop (), true);
14391 switch (gimple_code (stmt
))
14395 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14396 if ((ctx
|| make_addressable_vars
)
14397 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14398 lower_omp_regimplify_p
,
14399 ctx
? NULL
: &wi
, NULL
)
14400 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14401 lower_omp_regimplify_p
,
14402 ctx
? NULL
: &wi
, NULL
)))
14403 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14407 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14409 case GIMPLE_EH_FILTER
:
14410 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14413 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14414 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14416 case GIMPLE_TRANSACTION
:
14417 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14421 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14423 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14424 oacc_privatization_scan_decl_chain (ctx
, vars
);
14426 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14427 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14429 case GIMPLE_OMP_PARALLEL
:
14430 case GIMPLE_OMP_TASK
:
14431 ctx
= maybe_lookup_ctx (stmt
);
14433 if (ctx
->cancellable
)
14434 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14435 lower_omp_taskreg (gsi_p
, ctx
);
14437 case GIMPLE_OMP_FOR
:
14438 ctx
= maybe_lookup_ctx (stmt
);
14440 if (ctx
->cancellable
)
14441 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14442 lower_omp_for (gsi_p
, ctx
);
14444 case GIMPLE_OMP_SECTIONS
:
14445 ctx
= maybe_lookup_ctx (stmt
);
14447 if (ctx
->cancellable
)
14448 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14449 lower_omp_sections (gsi_p
, ctx
);
14451 case GIMPLE_OMP_SCOPE
:
14452 ctx
= maybe_lookup_ctx (stmt
);
14454 lower_omp_scope (gsi_p
, ctx
);
14456 case GIMPLE_OMP_SINGLE
:
14457 ctx
= maybe_lookup_ctx (stmt
);
14459 lower_omp_single (gsi_p
, ctx
);
14461 case GIMPLE_OMP_MASTER
:
14462 case GIMPLE_OMP_MASKED
:
14463 ctx
= maybe_lookup_ctx (stmt
);
14465 lower_omp_master (gsi_p
, ctx
);
14467 case GIMPLE_OMP_TASKGROUP
:
14468 ctx
= maybe_lookup_ctx (stmt
);
14470 lower_omp_taskgroup (gsi_p
, ctx
);
14472 case GIMPLE_OMP_ORDERED
:
14473 ctx
= maybe_lookup_ctx (stmt
);
14475 lower_omp_ordered (gsi_p
, ctx
);
14477 case GIMPLE_OMP_SCAN
:
14478 ctx
= maybe_lookup_ctx (stmt
);
14480 lower_omp_scan (gsi_p
, ctx
);
14482 case GIMPLE_OMP_CRITICAL
:
14483 ctx
= maybe_lookup_ctx (stmt
);
14485 lower_omp_critical (gsi_p
, ctx
);
14487 case GIMPLE_OMP_ATOMIC_LOAD
:
14488 if ((ctx
|| make_addressable_vars
)
14489 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14490 as_a
<gomp_atomic_load
*> (stmt
)),
14491 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14492 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14494 case GIMPLE_OMP_TARGET
:
14495 ctx
= maybe_lookup_ctx (stmt
);
14497 lower_omp_target (gsi_p
, ctx
);
14499 case GIMPLE_OMP_TEAMS
:
14500 ctx
= maybe_lookup_ctx (stmt
);
14502 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14503 lower_omp_taskreg (gsi_p
, ctx
);
14505 lower_omp_teams (gsi_p
, ctx
);
14509 call_stmt
= as_a
<gcall
*> (stmt
);
14510 fndecl
= gimple_call_fndecl (call_stmt
);
14512 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14513 switch (DECL_FUNCTION_CODE (fndecl
))
14515 case BUILT_IN_GOMP_BARRIER
:
14519 case BUILT_IN_GOMP_CANCEL
:
14520 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14523 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14524 cctx
= cctx
->outer
;
14525 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14526 if (!cctx
->cancellable
)
14528 if (DECL_FUNCTION_CODE (fndecl
)
14529 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14531 stmt
= gimple_build_nop ();
14532 gsi_replace (gsi_p
, stmt
, false);
14536 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14538 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14539 gimple_call_set_fndecl (call_stmt
, fndecl
);
14540 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14543 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14544 gimple_call_set_lhs (call_stmt
, lhs
);
14545 tree fallthru_label
;
14546 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14548 g
= gimple_build_label (fallthru_label
);
14549 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14550 g
= gimple_build_cond (NE_EXPR
, lhs
,
14551 fold_convert (TREE_TYPE (lhs
),
14552 boolean_false_node
),
14553 cctx
->cancel_label
, fallthru_label
);
14554 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14561 case GIMPLE_ASSIGN
:
14562 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14564 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14565 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14566 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14567 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14568 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14569 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14570 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14571 && (gimple_omp_target_kind (up
->stmt
)
14572 == GF_OMP_TARGET_KIND_DATA
)))
14574 else if (!up
->lastprivate_conditional_map
)
14576 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14577 if (TREE_CODE (lhs
) == MEM_REF
14578 && DECL_P (TREE_OPERAND (lhs
, 0))
14579 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14580 0))) == REFERENCE_TYPE
)
14581 lhs
= TREE_OPERAND (lhs
, 0);
14583 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14586 if (up
->combined_into_simd_safelen1
)
14589 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14592 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14593 clauses
= gimple_omp_for_clauses (up
->stmt
);
14595 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14596 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14597 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14598 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14599 OMP_CLAUSE__CONDTEMP_
);
14600 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14601 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14602 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14609 if ((ctx
|| make_addressable_vars
)
14610 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14613 /* Just remove clobbers, this should happen only if we have
14614 "privatized" local addressable variables in SIMD regions,
14615 the clobber isn't needed in that case and gimplifying address
14616 of the ARRAY_REF into a pointer and creating MEM_REF based
14617 clobber would create worse code than we get with the clobber
14619 if (gimple_clobber_p (stmt
))
14621 gsi_replace (gsi_p
, gimple_build_nop (), true);
14624 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14631 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14633 location_t saved_location
= input_location
;
14634 gimple_stmt_iterator gsi
;
14635 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14636 lower_omp_1 (&gsi
, ctx
);
14637 /* During gimplification, we haven't folded statments inside offloading
14638 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14639 if (target_nesting_level
|| taskreg_nesting_level
)
14640 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14642 input_location
= saved_location
;
14645 /* Main entry point. */
14647 static unsigned int
14648 execute_lower_omp (void)
14654 /* This pass always runs, to provide PROP_gimple_lomp.
14655 But often, there is nothing to do. */
14656 if (flag_openacc
== 0 && flag_openmp
== 0
14657 && flag_openmp_simd
== 0)
14660 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14661 delete_omp_context
);
14663 body
= gimple_body (current_function_decl
);
14665 scan_omp (&body
, NULL
);
14666 gcc_assert (taskreg_nesting_level
== 0);
14667 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14668 finish_taskreg_scan (ctx
);
14669 taskreg_contexts
.release ();
14671 if (all_contexts
->root
)
14673 if (make_addressable_vars
)
14674 push_gimplify_context ();
14675 lower_omp (&body
, NULL
);
14676 if (make_addressable_vars
)
14677 pop_gimplify_context (NULL
);
14682 splay_tree_delete (all_contexts
);
14683 all_contexts
= NULL
;
14685 BITMAP_FREE (make_addressable_vars
);
14686 BITMAP_FREE (global_nonaddressable_vars
);
14688 /* If current function is a method, remove artificial dummy VAR_DECL created
14689 for non-static data member privatization, they aren't needed for
14690 debuginfo nor anything else, have been already replaced everywhere in the
14691 IL and cause problems with LTO. */
14692 if (DECL_ARGUMENTS (current_function_decl
)
14693 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14694 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14696 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14698 for (auto task_stmt
: task_cpyfns
)
14699 finalize_task_copyfn (task_stmt
);
14700 task_cpyfns
.release ();
14706 const pass_data pass_data_lower_omp
=
14708 GIMPLE_PASS
, /* type */
14709 "omplower", /* name */
14710 OPTGROUP_OMP
, /* optinfo_flags */
14711 TV_NONE
, /* tv_id */
14712 PROP_gimple_any
, /* properties_required */
14713 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14714 0, /* properties_destroyed */
14715 0, /* todo_flags_start */
14716 0, /* todo_flags_finish */
14719 class pass_lower_omp
: public gimple_opt_pass
14722 pass_lower_omp (gcc::context
*ctxt
)
14723 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14726 /* opt_pass methods: */
14727 unsigned int execute (function
*) final override
14729 return execute_lower_omp ();
14732 }; // class pass_lower_omp
14734 } // anon namespace
14737 make_pass_lower_omp (gcc::context
*ctxt
)
14739 return new pass_lower_omp (ctxt
);
14742 /* The following is a utility to diagnose structured block violations.
14743 It is not part of the "omplower" pass, as that's invoked too late. It
14744 should be invoked by the respective front ends after gimplification. */
14746 static splay_tree all_labels
;
14748 /* Check for mismatched contexts and generate an error if needed. Return
14749 true if an error is detected. */
14752 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14753 gimple
*branch_ctx
, gimple
*label_ctx
)
14755 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14756 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14758 if (label_ctx
== branch_ctx
)
14761 const char* kind
= NULL
;
14765 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14766 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14768 gcc_checking_assert (kind
== NULL
);
14774 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14778 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14779 so we could traverse it and issue a correct "exit" or "enter" error
14780 message upon a structured block violation.
14782 We built the context by building a list with tree_cons'ing, but there is
14783 no easy counterpart in gimple tuples. It seems like far too much work
14784 for issuing exit/enter error messages. If someone really misses the
14785 distinct error message... patches welcome. */
14788 /* Try to avoid confusing the user by producing and error message
14789 with correct "exit" or "enter" verbiage. We prefer "exit"
14790 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14791 if (branch_ctx
== NULL
)
14797 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14802 label_ctx
= TREE_CHAIN (label_ctx
);
14807 error ("invalid exit from %s structured block", kind
);
14809 error ("invalid entry to %s structured block", kind
);
14812 /* If it's obvious we have an invalid entry, be specific about the error. */
14813 if (branch_ctx
== NULL
)
14814 error ("invalid entry to %s structured block", kind
);
14817 /* Otherwise, be vague and lazy, but efficient. */
14818 error ("invalid branch to/from %s structured block", kind
);
14821 gsi_replace (gsi_p
, gimple_build_nop (), false);
14825 /* Pass 1: Create a minimal tree of structured blocks, and record
14826 where each label is found. */
14829 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14830 struct walk_stmt_info
*wi
)
14832 gimple
*context
= (gimple
*) wi
->info
;
14833 gimple
*inner_context
;
14834 gimple
*stmt
= gsi_stmt (*gsi_p
);
14836 *handled_ops_p
= true;
14838 switch (gimple_code (stmt
))
14842 case GIMPLE_OMP_PARALLEL
:
14843 case GIMPLE_OMP_TASK
:
14844 case GIMPLE_OMP_SCOPE
:
14845 case GIMPLE_OMP_SECTIONS
:
14846 case GIMPLE_OMP_SINGLE
:
14847 case GIMPLE_OMP_SECTION
:
14848 case GIMPLE_OMP_MASTER
:
14849 case GIMPLE_OMP_MASKED
:
14850 case GIMPLE_OMP_ORDERED
:
14851 case GIMPLE_OMP_SCAN
:
14852 case GIMPLE_OMP_CRITICAL
:
14853 case GIMPLE_OMP_TARGET
:
14854 case GIMPLE_OMP_TEAMS
:
14855 case GIMPLE_OMP_TASKGROUP
:
14856 /* The minimal context here is just the current OMP construct. */
14857 inner_context
= stmt
;
14858 wi
->info
= inner_context
;
14859 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14860 wi
->info
= context
;
14863 case GIMPLE_OMP_FOR
:
14864 inner_context
= stmt
;
14865 wi
->info
= inner_context
;
14866 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14868 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14869 diagnose_sb_1
, NULL
, wi
);
14870 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14871 wi
->info
= context
;
14875 splay_tree_insert (all_labels
,
14876 (splay_tree_key
) gimple_label_label (
14877 as_a
<glabel
*> (stmt
)),
14878 (splay_tree_value
) context
);
14888 /* Pass 2: Check each branch and see if its context differs from that of
14889 the destination label's context. */
14892 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14893 struct walk_stmt_info
*wi
)
14895 gimple
*context
= (gimple
*) wi
->info
;
14897 gimple
*stmt
= gsi_stmt (*gsi_p
);
14899 *handled_ops_p
= true;
14901 switch (gimple_code (stmt
))
14905 case GIMPLE_OMP_PARALLEL
:
14906 case GIMPLE_OMP_TASK
:
14907 case GIMPLE_OMP_SCOPE
:
14908 case GIMPLE_OMP_SECTIONS
:
14909 case GIMPLE_OMP_SINGLE
:
14910 case GIMPLE_OMP_SECTION
:
14911 case GIMPLE_OMP_MASTER
:
14912 case GIMPLE_OMP_MASKED
:
14913 case GIMPLE_OMP_ORDERED
:
14914 case GIMPLE_OMP_SCAN
:
14915 case GIMPLE_OMP_CRITICAL
:
14916 case GIMPLE_OMP_TARGET
:
14917 case GIMPLE_OMP_TEAMS
:
14918 case GIMPLE_OMP_TASKGROUP
:
14920 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14921 wi
->info
= context
;
14924 case GIMPLE_OMP_FOR
:
14926 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14928 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14929 diagnose_sb_2
, NULL
, wi
);
14930 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14931 wi
->info
= context
;
14936 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14937 tree lab
= gimple_cond_true_label (cond_stmt
);
14940 n
= splay_tree_lookup (all_labels
,
14941 (splay_tree_key
) lab
);
14942 diagnose_sb_0 (gsi_p
, context
,
14943 n
? (gimple
*) n
->value
: NULL
);
14945 lab
= gimple_cond_false_label (cond_stmt
);
14948 n
= splay_tree_lookup (all_labels
,
14949 (splay_tree_key
) lab
);
14950 diagnose_sb_0 (gsi_p
, context
,
14951 n
? (gimple
*) n
->value
: NULL
);
14958 tree lab
= gimple_goto_dest (stmt
);
14959 if (TREE_CODE (lab
) != LABEL_DECL
)
14962 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14963 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14967 case GIMPLE_SWITCH
:
14969 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14971 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14973 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14974 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14975 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14981 case GIMPLE_RETURN
:
14982 diagnose_sb_0 (gsi_p
, context
, NULL
);
14992 static unsigned int
14993 diagnose_omp_structured_block_errors (void)
14995 struct walk_stmt_info wi
;
14996 gimple_seq body
= gimple_body (current_function_decl
);
14998 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
15000 memset (&wi
, 0, sizeof (wi
));
15001 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
15003 memset (&wi
, 0, sizeof (wi
));
15004 wi
.want_locations
= true;
15005 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
15007 gimple_set_body (current_function_decl
, body
);
15009 splay_tree_delete (all_labels
);
15017 const pass_data pass_data_diagnose_omp_blocks
=
15019 GIMPLE_PASS
, /* type */
15020 "*diagnose_omp_blocks", /* name */
15021 OPTGROUP_OMP
, /* optinfo_flags */
15022 TV_NONE
, /* tv_id */
15023 PROP_gimple_any
, /* properties_required */
15024 0, /* properties_provided */
15025 0, /* properties_destroyed */
15026 0, /* todo_flags_start */
15027 0, /* todo_flags_finish */
15030 class pass_diagnose_omp_blocks
: public gimple_opt_pass
15033 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15034 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
15037 /* opt_pass methods: */
15038 bool gate (function
*) final override
15040 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
15042 unsigned int execute (function
*) final override
15044 return diagnose_omp_structured_block_errors ();
15047 }; // class pass_diagnose_omp_blocks
15049 } // anon namespace
15052 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15054 return new pass_diagnose_omp_blocks (ctxt
);
15058 #include "gt-omp-low.h"