1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap make_addressable_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
194 static vec
<gomp_task
*> task_cpyfns
;
196 static void scan_omp (gimple_seq
*, omp_context
*);
197 static tree
scan_omp_1_op (tree
*, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context
*ctx
);
200 #define WALK_SUBSTMTS \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_TRANSACTION: \
206 /* The sub-statements for these should be walked. */ \
207 *handled_ops_p = false; \
210 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 is_oacc_parallel_or_serial (omp_context
*ctx
)
216 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
217 return ((outer_type
== GIMPLE_OMP_TARGET
)
218 && ((gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
220 || (gimple_omp_target_kind (ctx
->stmt
)
221 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
224 /* Return whether CTX represents an OpenACC 'kernels' construct.
225 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 is_oacc_kernels (omp_context
*ctx
)
230 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
231 return ((outer_type
== GIMPLE_OMP_TARGET
)
232 && (gimple_omp_target_kind (ctx
->stmt
)
233 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
236 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
241 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
242 return ((outer_type
== GIMPLE_OMP_TARGET
)
243 && ((gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
247 || (gimple_omp_target_kind (ctx
->stmt
)
248 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
251 /* Return true if STMT corresponds to an OpenMP target region. */
253 is_omp_target (gimple
*stmt
)
255 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
257 int kind
= gimple_omp_target_kind (stmt
);
258 return (kind
== GF_OMP_TARGET_KIND_REGION
259 || kind
== GF_OMP_TARGET_KIND_DATA
260 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
261 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
266 /* If DECL is the artificial dummy VAR_DECL created for non-static
267 data member privatization, return the underlying "this" parameter,
268 otherwise return NULL. */
271 omp_member_access_dummy_var (tree decl
)
274 || !DECL_ARTIFICIAL (decl
)
275 || !DECL_IGNORED_P (decl
)
276 || !DECL_HAS_VALUE_EXPR_P (decl
)
277 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
280 tree v
= DECL_VALUE_EXPR (decl
);
281 if (TREE_CODE (v
) != COMPONENT_REF
)
285 switch (TREE_CODE (v
))
291 case POINTER_PLUS_EXPR
:
292 v
= TREE_OPERAND (v
, 0);
295 if (DECL_CONTEXT (v
) == current_function_decl
296 && DECL_ARTIFICIAL (v
)
297 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
305 /* Helper for unshare_and_remap, called through walk_tree. */
308 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
310 tree
*pair
= (tree
*) data
;
313 *tp
= unshare_expr (pair
[1]);
316 else if (IS_TYPE_OR_DECL_P (*tp
))
321 /* Return unshare_expr (X) with all occurrences of FROM
325 unshare_and_remap (tree x
, tree from
, tree to
)
327 tree pair
[2] = { from
, to
};
328 x
= unshare_expr (x
);
329 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
333 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 scan_omp_op (tree
*tp
, omp_context
*ctx
)
338 struct walk_stmt_info wi
;
340 memset (&wi
, 0, sizeof (wi
));
342 wi
.want_locations
= true;
344 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
347 static void lower_omp (gimple_seq
*, omp_context
*);
348 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
351 /* Return true if CTX is for an omp parallel. */
354 is_parallel_ctx (omp_context
*ctx
)
356 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
360 /* Return true if CTX is for an omp task. */
363 is_task_ctx (omp_context
*ctx
)
365 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
369 /* Return true if CTX is for an omp taskloop. */
372 is_taskloop_ctx (omp_context
*ctx
)
374 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
375 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
379 /* Return true if CTX is for a host omp teams. */
382 is_host_teams_ctx (omp_context
*ctx
)
384 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
385 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
388 /* Return true if CTX is for an omp parallel or omp task or host omp teams
389 (the last one is strictly not a task region in OpenMP speak, but we
390 need to treat it similarly). */
393 is_taskreg_ctx (omp_context
*ctx
)
395 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
398 /* Return true if EXPR is variable sized. */
401 is_variable_sized (const_tree expr
)
403 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
406 /* Lookup variables. The "maybe" form
407 allows for the variable form to not have been entered, otherwise we
408 assert that the variable must have been entered. */
411 lookup_decl (tree var
, omp_context
*ctx
)
413 tree
*n
= ctx
->cb
.decl_map
->get (var
);
418 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
420 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
421 return n
? *n
: NULL_TREE
;
425 lookup_field (tree var
, omp_context
*ctx
)
428 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
429 return (tree
) n
->value
;
433 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
436 n
= splay_tree_lookup (ctx
->sfield_map
437 ? ctx
->sfield_map
: ctx
->field_map
, key
);
438 return (tree
) n
->value
;
442 lookup_sfield (tree var
, omp_context
*ctx
)
444 return lookup_sfield ((splay_tree_key
) var
, ctx
);
448 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
451 n
= splay_tree_lookup (ctx
->field_map
, key
);
452 return n
? (tree
) n
->value
: NULL_TREE
;
456 maybe_lookup_field (tree var
, omp_context
*ctx
)
458 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
461 /* Return true if DECL should be copied by pointer. SHARED_CTX is
462 the parallel context if DECL is to be shared. */
465 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
467 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
468 || TYPE_ATOMIC (TREE_TYPE (decl
)))
471 /* We can only use copy-in/copy-out semantics for shared variables
472 when we know the value is not accessible from an outer scope. */
475 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
477 /* ??? Trivially accessible from anywhere. But why would we even
478 be passing an address in this case? Should we simply assert
479 this to be false, or should we have a cleanup pass that removes
480 these from the list of mappings? */
481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
484 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
485 without analyzing the expression whether or not its location
486 is accessible to anyone else. In the case of nested parallel
487 regions it certainly may be. */
488 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
491 /* Do not use copy-in/copy-out for variables that have their
493 if (is_global_var (decl
))
495 /* For file scope vars, track whether we've seen them as
496 non-addressable initially and in that case, keep the same
497 answer for the duration of the pass, even when they are made
498 addressable later on e.g. through reduction expansion. Global
499 variables which weren't addressable before the pass will not
500 have their privatized copies address taken. See PR91216. */
501 if (!TREE_ADDRESSABLE (decl
))
503 if (!global_nonaddressable_vars
)
504 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
505 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
507 else if (!global_nonaddressable_vars
508 || !bitmap_bit_p (global_nonaddressable_vars
,
512 else if (TREE_ADDRESSABLE (decl
))
515 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 if (TREE_READONLY (decl
)
518 || ((TREE_CODE (decl
) == RESULT_DECL
519 || TREE_CODE (decl
) == PARM_DECL
)
520 && DECL_BY_REFERENCE (decl
)))
523 /* Disallow copy-in/out in nested parallel if
524 decl is shared in outer parallel, otherwise
525 each thread could store the shared variable
526 in its own copy-in location, making the
527 variable no longer really shared. */
528 if (shared_ctx
->is_nested
)
532 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
533 if ((is_taskreg_ctx (up
)
534 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
535 && is_gimple_omp_offloaded (up
->stmt
)))
536 && maybe_lookup_decl (decl
, up
))
543 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
545 for (c
= gimple_omp_target_clauses (up
->stmt
);
546 c
; c
= OMP_CLAUSE_CHAIN (c
))
547 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
548 && OMP_CLAUSE_DECL (c
) == decl
)
552 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
553 c
; c
= OMP_CLAUSE_CHAIN (c
))
554 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
555 && OMP_CLAUSE_DECL (c
) == decl
)
559 goto maybe_mark_addressable_and_ret
;
563 /* For tasks avoid using copy-in/out. As tasks can be
564 deferred or executed in different thread, when GOMP_task
565 returns, the task hasn't necessarily terminated. */
566 if (is_task_ctx (shared_ctx
))
569 maybe_mark_addressable_and_ret
:
570 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
571 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
573 /* Taking address of OUTER in lower_send_shared_vars
574 might need regimplification of everything that uses the
576 if (!make_addressable_vars
)
577 make_addressable_vars
= BITMAP_ALLOC (NULL
);
578 bitmap_set_bit (make_addressable_vars
, DECL_UID (outer
));
579 TREE_ADDRESSABLE (outer
) = 1;
588 /* Construct a new automatic decl similar to VAR. */
591 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
593 tree copy
= copy_var_decl (var
, name
, type
);
595 DECL_CONTEXT (copy
) = current_function_decl
;
599 DECL_CHAIN (copy
) = ctx
->block_vars
;
600 ctx
->block_vars
= copy
;
605 /* If VAR is listed in make_addressable_vars, it wasn't
606 originally addressable, but was only later made so.
607 We don't need to take address of privatizations
609 if (TREE_ADDRESSABLE (var
)
610 && ((make_addressable_vars
611 && bitmap_bit_p (make_addressable_vars
, DECL_UID (var
)))
612 || (global_nonaddressable_vars
613 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
614 TREE_ADDRESSABLE (copy
) = 0;
620 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
622 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
625 /* Build tree nodes to access the field for VAR on the receiver side. */
628 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
630 tree x
, field
= lookup_field (var
, ctx
);
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x
= maybe_lookup_field (field
, ctx
);
638 x
= build_simple_mem_ref (ctx
->receiver_decl
);
639 TREE_THIS_NOTRAP (x
) = 1;
640 x
= omp_build_component_ref (x
, field
);
643 x
= build_simple_mem_ref (x
);
644 TREE_THIS_NOTRAP (x
) = 1;
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
655 build_outer_var_ref (tree var
, omp_context
*ctx
,
656 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
659 omp_context
*outer
= ctx
->outer
;
660 for (; outer
; outer
= outer
->outer
)
662 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
664 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
665 && !maybe_lookup_decl (var
, outer
))
670 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
672 else if (is_variable_sized (var
))
674 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
675 x
= build_outer_var_ref (x
, ctx
, code
);
676 x
= build_simple_mem_ref (x
);
678 else if (is_taskreg_ctx (ctx
))
680 bool by_ref
= use_pointer_for_field (var
, NULL
);
681 x
= build_receiver_ref (var
, by_ref
, ctx
);
683 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
684 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
686 || code
== OMP_CLAUSE_ALLOCATE
687 || (code
== OMP_CLAUSE_PRIVATE
688 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
689 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
690 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
692 /* #pragma omp simd isn't a worksharing construct, and can reference
693 even private vars in its linear etc. clauses.
694 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
695 to private vars in all worksharing constructs. */
697 if (outer
&& is_taskreg_ctx (outer
))
698 x
= lookup_decl (var
, outer
);
700 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
704 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
708 = splay_tree_lookup (outer
->field_map
,
709 (splay_tree_key
) &DECL_UID (var
));
712 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
715 x
= lookup_decl (var
, outer
);
719 tree field
= (tree
) n
->value
;
720 /* If the receiver record type was remapped in the child function,
721 remap the field into the new record type. */
722 x
= maybe_lookup_field (field
, outer
);
726 x
= build_simple_mem_ref (outer
->receiver_decl
);
727 x
= omp_build_component_ref (x
, field
);
728 if (use_pointer_for_field (var
, outer
))
729 x
= build_simple_mem_ref (x
);
733 x
= lookup_decl (var
, outer
);
734 else if (omp_privatize_by_reference (var
))
735 /* This can happen with orphaned constructs. If var is reference, it is
736 possible it is shared and as such valid. */
738 else if (omp_member_access_dummy_var (var
))
745 tree t
= omp_member_access_dummy_var (var
);
748 x
= DECL_VALUE_EXPR (var
);
749 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
751 x
= unshare_and_remap (x
, t
, o
);
753 x
= unshare_expr (x
);
757 if (omp_privatize_by_reference (var
))
758 x
= build_simple_mem_ref (x
);
763 /* Build tree nodes to access the field for VAR on the sender side. */
766 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
768 tree field
= lookup_sfield (key
, ctx
);
769 return omp_build_component_ref (ctx
->sender_decl
, field
);
773 build_sender_ref (tree var
, omp_context
*ctx
)
775 return build_sender_ref ((splay_tree_key
) var
, ctx
);
778 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
779 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
784 tree field
, type
, sfield
= NULL_TREE
;
785 splay_tree_key key
= (splay_tree_key
) var
;
787 if ((mask
& 16) != 0)
789 key
= (splay_tree_key
) &DECL_NAME (var
);
790 gcc_checking_assert (key
!= (splay_tree_key
) var
);
794 key
= (splay_tree_key
) &DECL_UID (var
);
795 gcc_checking_assert (key
!= (splay_tree_key
) var
);
797 gcc_assert ((mask
& 1) == 0
798 || !splay_tree_lookup (ctx
->field_map
, key
));
799 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
800 || !splay_tree_lookup (ctx
->sfield_map
, key
));
801 gcc_assert ((mask
& 3) == 3
802 || !is_gimple_omp_oacc (ctx
->stmt
));
804 type
= TREE_TYPE (var
);
805 if ((mask
& 16) != 0)
806 type
= lang_hooks
.decls
.omp_array_data (var
, true);
808 /* Prevent redeclaring the var in the split-off function with a restrict
809 pointer type. Note that we only clear type itself, restrict qualifiers in
810 the pointed-to type will be ignored by points-to analysis. */
811 if (POINTER_TYPE_P (type
)
812 && TYPE_RESTRICT (type
))
813 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
817 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
818 type
= build_pointer_type (build_pointer_type (type
));
821 type
= build_pointer_type (type
);
822 else if ((mask
& (32 | 3)) == 1
823 && omp_privatize_by_reference (var
))
824 type
= TREE_TYPE (type
);
826 field
= build_decl (DECL_SOURCE_LOCATION (var
),
827 FIELD_DECL
, DECL_NAME (var
), type
);
829 /* Remember what variable this field was created for. This does have a
830 side effect of making dwarf2out ignore this member, so for helpful
831 debugging we clear it later in delete_omp_context. */
832 DECL_ABSTRACT_ORIGIN (field
) = var
;
833 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
835 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
836 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
837 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
840 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
844 insert_field_into_struct (ctx
->record_type
, field
);
845 if (ctx
->srecord_type
)
847 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
848 FIELD_DECL
, DECL_NAME (var
), type
);
849 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
850 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
851 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
852 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
853 insert_field_into_struct (ctx
->srecord_type
, sfield
);
858 if (ctx
->srecord_type
== NULL_TREE
)
862 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
863 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
864 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
866 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
867 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
868 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
869 insert_field_into_struct (ctx
->srecord_type
, sfield
);
870 splay_tree_insert (ctx
->sfield_map
,
871 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
872 (splay_tree_value
) sfield
);
876 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
877 : ctx
->srecord_type
, field
);
881 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
882 if ((mask
& 2) && ctx
->sfield_map
)
883 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
887 install_var_local (tree var
, omp_context
*ctx
)
889 tree new_var
= omp_copy_decl_1 (var
, ctx
);
890 insert_decl_map (&ctx
->cb
, var
, new_var
);
894 /* Adjust the replacement for DECL in CTX for the new context. This means
895 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
902 new_decl
= lookup_decl (decl
, ctx
);
904 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
906 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
907 && DECL_HAS_VALUE_EXPR_P (decl
))
909 tree ve
= DECL_VALUE_EXPR (decl
);
910 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
911 SET_DECL_VALUE_EXPR (new_decl
, ve
);
912 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
915 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
917 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
918 if (size
== error_mark_node
)
919 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
920 DECL_SIZE (new_decl
) = size
;
922 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
923 if (size
== error_mark_node
)
924 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
925 DECL_SIZE_UNIT (new_decl
) = size
;
929 /* The callback for remap_decl. Search all containing contexts for a
930 mapping of the variable; this avoids having to duplicate the splay
931 tree ahead of time. We know a mapping doesn't already exist in the
932 given context. Create new mappings to implement default semantics. */
935 omp_copy_decl (tree var
, copy_body_data
*cb
)
937 omp_context
*ctx
= (omp_context
*) cb
;
940 if (TREE_CODE (var
) == LABEL_DECL
)
942 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
944 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
945 DECL_CONTEXT (new_var
) = current_function_decl
;
946 insert_decl_map (&ctx
->cb
, var
, new_var
);
950 while (!is_taskreg_ctx (ctx
))
955 new_var
= maybe_lookup_decl (var
, ctx
);
960 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
963 return error_mark_node
;
966 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
971 omp_context
*ctx
= XCNEW (omp_context
);
973 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
974 (splay_tree_value
) ctx
);
979 ctx
->outer
= outer_ctx
;
980 ctx
->cb
= outer_ctx
->cb
;
981 ctx
->cb
.block
= NULL
;
982 ctx
->depth
= outer_ctx
->depth
+ 1;
986 ctx
->cb
.src_fn
= current_function_decl
;
987 ctx
->cb
.dst_fn
= current_function_decl
;
988 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
989 gcc_checking_assert (ctx
->cb
.src_node
);
990 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
991 ctx
->cb
.src_cfun
= cfun
;
992 ctx
->cb
.copy_decl
= omp_copy_decl
;
993 ctx
->cb
.eh_lp_nr
= 0;
994 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
995 ctx
->cb
.adjust_array_error_bounds
= true;
996 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1000 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1005 static gimple_seq
maybe_catch_exception (gimple_seq
);
1007 /* Finalize task copyfn. */
1010 finalize_task_copyfn (gomp_task
*task_stmt
)
1012 struct function
*child_cfun
;
1014 gimple_seq seq
= NULL
, new_seq
;
1017 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1018 if (child_fn
== NULL_TREE
)
1021 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1022 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1024 push_cfun (child_cfun
);
1025 bind
= gimplify_body (child_fn
, false);
1026 gimple_seq_add_stmt (&seq
, bind
);
1027 new_seq
= maybe_catch_exception (seq
);
1030 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1032 gimple_seq_add_stmt (&seq
, bind
);
1034 gimple_set_body (child_fn
, seq
);
1037 /* Inform the callgraph about the new function. */
1038 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1039 node
->parallelized_function
= 1;
1040 cgraph_node::add_new_function (child_fn
, false);
1043 /* Destroy a omp_context data structures. Called through the splay tree
1044 value delete callback. */
1047 delete_omp_context (splay_tree_value value
)
1049 omp_context
*ctx
= (omp_context
*) value
;
1051 delete ctx
->cb
.decl_map
;
1054 splay_tree_delete (ctx
->field_map
);
1055 if (ctx
->sfield_map
)
1056 splay_tree_delete (ctx
->sfield_map
);
1058 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1059 it produces corrupt debug information. */
1060 if (ctx
->record_type
)
1063 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1064 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1066 if (ctx
->srecord_type
)
1069 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1070 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1073 if (ctx
->task_reduction_map
)
1075 ctx
->task_reductions
.release ();
1076 delete ctx
->task_reduction_map
;
1079 delete ctx
->lastprivate_conditional_map
;
1080 delete ctx
->allocate_map
;
1085 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1089 fixup_child_record_type (omp_context
*ctx
)
1091 tree f
, type
= ctx
->record_type
;
1093 if (!ctx
->receiver_decl
)
1095 /* ??? It isn't sufficient to just call remap_type here, because
1096 variably_modified_type_p doesn't work the way we expect for
1097 record types. Testing each field for whether it needs remapping
1098 and creating a new record by hand works, however. */
1099 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1100 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1104 tree name
, new_fields
= NULL
;
1106 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1107 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1108 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1109 TYPE_DECL
, name
, type
);
1110 TYPE_NAME (type
) = name
;
1112 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1114 tree new_f
= copy_node (f
);
1115 DECL_CONTEXT (new_f
) = type
;
1116 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1117 DECL_CHAIN (new_f
) = new_fields
;
1118 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1119 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1121 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1125 /* Arrange to be able to look up the receiver field
1126 given the sender field. */
1127 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1128 (splay_tree_value
) new_f
);
1130 TYPE_FIELDS (type
) = nreverse (new_fields
);
1134 /* In a target region we never modify any of the pointers in *.omp_data_i,
1135 so attempt to help the optimizers. */
1136 if (is_gimple_omp_offloaded (ctx
->stmt
))
1137 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1139 TREE_TYPE (ctx
->receiver_decl
)
1140 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1143 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1144 specified by CLAUSES. */
1147 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1150 bool scan_array_reductions
= false;
1152 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1154 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1155 /* omp_default_mem_alloc is 1 */
1156 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1157 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1159 /* The allocate clauses that appear on a target construct or on
1160 constructs in a target region must specify an allocator expression
1161 unless a requires directive with the dynamic_allocators clause
1162 is present in the same compilation unit. */
1163 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1164 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
) == 0)
1165 && omp_maybe_offloaded_ctx (ctx
))
1166 error_at (OMP_CLAUSE_LOCATION (c
), "%<allocate%> clause must"
1167 " specify an allocator here");
1168 if (ctx
->allocate_map
== NULL
)
1169 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1170 tree val
= integer_zero_node
;
1171 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1172 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1173 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1174 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1175 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1178 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1182 switch (OMP_CLAUSE_CODE (c
))
1184 case OMP_CLAUSE_PRIVATE
:
1185 decl
= OMP_CLAUSE_DECL (c
);
1186 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1188 else if (!is_variable_sized (decl
))
1189 install_var_local (decl
, ctx
);
1192 case OMP_CLAUSE_SHARED
:
1193 decl
= OMP_CLAUSE_DECL (c
);
1194 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1195 ctx
->allocate_map
->remove (decl
);
1196 /* Ignore shared directives in teams construct inside of
1197 target construct. */
1198 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1199 && !is_host_teams_ctx (ctx
))
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1204 if (is_global_var (odecl
))
1206 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1209 gcc_assert (is_taskreg_ctx (ctx
));
1210 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1211 || !is_variable_sized (decl
));
1212 /* Global variables don't need to be copied,
1213 the receiver side will use them directly. */
1214 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1216 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1218 use_pointer_for_field (decl
, ctx
);
1221 by_ref
= use_pointer_for_field (decl
, NULL
);
1222 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1223 || TREE_ADDRESSABLE (decl
)
1225 || omp_privatize_by_reference (decl
))
1227 by_ref
= use_pointer_for_field (decl
, ctx
);
1228 install_var_field (decl
, by_ref
, 3, ctx
);
1229 install_var_local (decl
, ctx
);
1232 /* We don't need to copy const scalar vars back. */
1233 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1236 case OMP_CLAUSE_REDUCTION
:
1237 /* Collect 'reduction' clauses on OpenACC compute construct. */
1238 if (is_gimple_omp_oacc (ctx
->stmt
)
1239 && is_gimple_omp_offloaded (ctx
->stmt
))
1241 /* No 'reduction' clauses on OpenACC 'kernels'. */
1242 gcc_checking_assert (!is_oacc_kernels (ctx
));
1243 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1244 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1246 ctx
->local_reduction_clauses
1247 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1251 case OMP_CLAUSE_IN_REDUCTION
:
1252 decl
= OMP_CLAUSE_DECL (c
);
1253 if (ctx
->allocate_map
1254 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1255 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1256 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1257 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1258 || is_task_ctx (ctx
)))
1261 if (ctx
->allocate_map
->get (decl
))
1262 ctx
->allocate_map
->remove (decl
);
1264 if (TREE_CODE (decl
) == MEM_REF
)
1266 tree t
= TREE_OPERAND (decl
, 0);
1267 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1268 t
= TREE_OPERAND (t
, 0);
1269 if (TREE_CODE (t
) == INDIRECT_REF
1270 || TREE_CODE (t
) == ADDR_EXPR
)
1271 t
= TREE_OPERAND (t
, 0);
1272 if (is_omp_target (ctx
->stmt
))
1274 if (is_variable_sized (t
))
1276 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1277 t
= DECL_VALUE_EXPR (t
);
1278 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1279 t
= TREE_OPERAND (t
, 0);
1280 gcc_assert (DECL_P (t
));
1284 scan_omp_op (&at
, ctx
->outer
);
1285 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1286 splay_tree_insert (ctx
->field_map
,
1287 (splay_tree_key
) &DECL_CONTEXT (t
),
1288 (splay_tree_value
) nt
);
1290 splay_tree_insert (ctx
->field_map
,
1291 (splay_tree_key
) &DECL_CONTEXT (at
),
1292 (splay_tree_value
) nt
);
1295 install_var_local (t
, ctx
);
1296 if (is_taskreg_ctx (ctx
)
1297 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1298 || (is_task_ctx (ctx
)
1299 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1300 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1302 == POINTER_TYPE
)))))
1303 && !is_variable_sized (t
)
1304 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1305 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1306 && !is_task_ctx (ctx
))))
1308 by_ref
= use_pointer_for_field (t
, NULL
);
1309 if (is_task_ctx (ctx
)
1310 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1311 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1313 install_var_field (t
, false, 1, ctx
);
1314 install_var_field (t
, by_ref
, 2, ctx
);
1317 install_var_field (t
, by_ref
, 3, ctx
);
1321 if (is_omp_target (ctx
->stmt
))
1325 scan_omp_op (&at
, ctx
->outer
);
1326 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1327 splay_tree_insert (ctx
->field_map
,
1328 (splay_tree_key
) &DECL_CONTEXT (decl
),
1329 (splay_tree_value
) nt
);
1331 splay_tree_insert (ctx
->field_map
,
1332 (splay_tree_key
) &DECL_CONTEXT (at
),
1333 (splay_tree_value
) nt
);
1336 if (is_task_ctx (ctx
)
1337 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1338 && OMP_CLAUSE_REDUCTION_TASK (c
)
1339 && is_parallel_ctx (ctx
)))
1341 /* Global variables don't need to be copied,
1342 the receiver side will use them directly. */
1343 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1345 by_ref
= use_pointer_for_field (decl
, ctx
);
1346 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1347 install_var_field (decl
, by_ref
, 3, ctx
);
1349 install_var_local (decl
, ctx
);
1352 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1353 && OMP_CLAUSE_REDUCTION_TASK (c
))
1355 install_var_local (decl
, ctx
);
1360 case OMP_CLAUSE_LASTPRIVATE
:
1361 /* Let the corresponding firstprivate clause create
1363 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1367 case OMP_CLAUSE_FIRSTPRIVATE
:
1368 case OMP_CLAUSE_LINEAR
:
1369 decl
= OMP_CLAUSE_DECL (c
);
1371 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1372 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1373 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1374 && is_gimple_omp_offloaded (ctx
->stmt
))
1376 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1377 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
1378 && lang_hooks
.decls
.omp_array_data (decl
, true)))
1380 by_ref
= !omp_privatize_by_reference (decl
);
1381 install_var_field (decl
, by_ref
, 3, ctx
);
1383 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1385 if (TREE_CODE (decl
) == INDIRECT_REF
)
1386 decl
= TREE_OPERAND (decl
, 0);
1387 install_var_field (decl
, true, 3, ctx
);
1389 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1390 install_var_field (decl
, true, 3, ctx
);
1392 install_var_field (decl
, false, 3, ctx
);
1394 if (is_variable_sized (decl
))
1396 if (is_task_ctx (ctx
))
1398 if (ctx
->allocate_map
1399 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1402 if (ctx
->allocate_map
->get (decl
))
1403 ctx
->allocate_map
->remove (decl
);
1405 install_var_field (decl
, false, 1, ctx
);
1409 else if (is_taskreg_ctx (ctx
))
1412 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1413 by_ref
= use_pointer_for_field (decl
, NULL
);
1415 if (is_task_ctx (ctx
)
1416 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1418 if (ctx
->allocate_map
1419 && ctx
->allocate_map
->get (decl
))
1420 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1422 install_var_field (decl
, false, 1, ctx
);
1424 install_var_field (decl
, by_ref
, 2, ctx
);
1427 install_var_field (decl
, by_ref
, 3, ctx
);
1429 install_var_local (decl
, ctx
);
1430 /* For descr arrays on target: firstprivatize data + attach ptr. */
1431 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1432 && is_gimple_omp_offloaded (ctx
->stmt
)
1433 && !is_gimple_omp_oacc (ctx
->stmt
)
1434 && lang_hooks
.decls
.omp_array_data (decl
, true))
1436 install_var_field (decl
, false, 16 | 3, ctx
);
1437 install_var_field (decl
, true, 8 | 3, ctx
);
1441 case OMP_CLAUSE_USE_DEVICE_PTR
:
1442 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1443 decl
= OMP_CLAUSE_DECL (c
);
1445 /* Fortran array descriptors. */
1446 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1447 install_var_field (decl
, false, 19, ctx
);
1448 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1449 && !omp_privatize_by_reference (decl
)
1450 && !omp_is_allocatable_or_ptr (decl
))
1451 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1452 install_var_field (decl
, true, 11, ctx
);
1454 install_var_field (decl
, false, 11, ctx
);
1455 if (DECL_SIZE (decl
)
1456 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1458 tree decl2
= DECL_VALUE_EXPR (decl
);
1459 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1460 decl2
= TREE_OPERAND (decl2
, 0);
1461 gcc_assert (DECL_P (decl2
));
1462 install_var_local (decl2
, ctx
);
1464 install_var_local (decl
, ctx
);
1467 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1468 decl
= OMP_CLAUSE_DECL (c
);
1469 while (TREE_CODE (decl
) == INDIRECT_REF
1470 || TREE_CODE (decl
) == ARRAY_REF
)
1471 decl
= TREE_OPERAND (decl
, 0);
1474 case OMP_CLAUSE_IS_DEVICE_PTR
:
1475 decl
= OMP_CLAUSE_DECL (c
);
1478 case OMP_CLAUSE__LOOPTEMP_
:
1479 case OMP_CLAUSE__REDUCTEMP_
:
1480 gcc_assert (is_taskreg_ctx (ctx
));
1481 decl
= OMP_CLAUSE_DECL (c
);
1482 install_var_field (decl
, false, 3, ctx
);
1483 install_var_local (decl
, ctx
);
1486 case OMP_CLAUSE_COPYPRIVATE
:
1487 case OMP_CLAUSE_COPYIN
:
1488 decl
= OMP_CLAUSE_DECL (c
);
1489 by_ref
= use_pointer_for_field (decl
, NULL
);
1490 install_var_field (decl
, by_ref
, 3, ctx
);
1493 case OMP_CLAUSE_FINAL
:
1495 case OMP_CLAUSE_NUM_THREADS
:
1496 case OMP_CLAUSE_NUM_TEAMS
:
1497 case OMP_CLAUSE_THREAD_LIMIT
:
1498 case OMP_CLAUSE_DEVICE
:
1499 case OMP_CLAUSE_SCHEDULE
:
1500 case OMP_CLAUSE_DIST_SCHEDULE
:
1501 case OMP_CLAUSE_DEPEND
:
1502 case OMP_CLAUSE_PRIORITY
:
1503 case OMP_CLAUSE_GRAINSIZE
:
1504 case OMP_CLAUSE_NUM_TASKS
:
1505 case OMP_CLAUSE_NUM_GANGS
:
1506 case OMP_CLAUSE_NUM_WORKERS
:
1507 case OMP_CLAUSE_VECTOR_LENGTH
:
1508 case OMP_CLAUSE_DETACH
:
1509 case OMP_CLAUSE_FILTER
:
1511 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1515 case OMP_CLAUSE_FROM
:
1516 case OMP_CLAUSE_MAP
:
1518 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1519 decl
= OMP_CLAUSE_DECL (c
);
1520 /* If requested, make 'decl' addressable. */
1521 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1522 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
))
1524 gcc_checking_assert (DECL_P (decl
));
1526 bool decl_addressable
= TREE_ADDRESSABLE (decl
);
1527 if (!decl_addressable
)
1529 if (!make_addressable_vars
)
1530 make_addressable_vars
= BITMAP_ALLOC (NULL
);
1531 bitmap_set_bit (make_addressable_vars
, DECL_UID (decl
));
1532 TREE_ADDRESSABLE (decl
) = 1;
1535 if (dump_enabled_p ())
1537 location_t loc
= OMP_CLAUSE_LOCATION (c
);
1538 const dump_user_location_t d_u_loc
1539 = dump_user_location_t::from_location_t (loc
);
1540 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 # pragma GCC diagnostic push
1543 # pragma GCC diagnostic ignored "-Wformat"
1545 if (!decl_addressable
)
1546 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1548 " made addressable\n",
1551 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1553 " already made addressable\n",
1556 # pragma GCC diagnostic pop
1561 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
) = 0;
1563 /* Global variables with "omp declare target" attribute
1564 don't need to be copied, the receiver side will use them
1565 directly. However, global variables with "omp declare target link"
1566 attribute need to be copied. Or when ALWAYS modifier is used. */
1567 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1569 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1570 && (OMP_CLAUSE_MAP_KIND (c
)
1571 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1572 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1573 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1574 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1575 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1576 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1577 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1578 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1579 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1580 && varpool_node::get_create (decl
)->offloadable
1581 && !lookup_attribute ("omp declare target link",
1582 DECL_ATTRIBUTES (decl
)))
1584 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1585 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1587 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1588 not offloaded; there is nothing to map for those. */
1589 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1590 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1591 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1594 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1596 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1597 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1598 && is_omp_target (ctx
->stmt
))
1600 /* If this is an offloaded region, an attach operation should
1601 only exist when the pointer variable is mapped in a prior
1603 if (is_gimple_omp_offloaded (ctx
->stmt
))
1605 (maybe_lookup_decl (decl
, ctx
)
1606 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1607 && lookup_attribute ("omp declare target",
1608 DECL_ATTRIBUTES (decl
))));
1610 /* By itself, attach/detach is generated as part of pointer
1611 variable mapping and should not create new variables in the
1612 offloaded region, however sender refs for it must be created
1613 for its address to be passed to the runtime. */
1615 = build_decl (OMP_CLAUSE_LOCATION (c
),
1616 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1617 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1618 insert_field_into_struct (ctx
->record_type
, field
);
1619 /* To not clash with a map of the pointer variable itself,
1620 attach/detach maps have their field looked up by the *clause*
1621 tree expression, not the decl. */
1622 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1623 (splay_tree_key
) c
));
1624 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1625 (splay_tree_value
) field
);
1628 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1629 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1630 || (OMP_CLAUSE_MAP_KIND (c
)
1631 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1633 if (TREE_CODE (decl
) == COMPONENT_REF
1634 || (TREE_CODE (decl
) == INDIRECT_REF
1635 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1636 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1637 == REFERENCE_TYPE
)))
1639 if (DECL_SIZE (decl
)
1640 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1642 tree decl2
= DECL_VALUE_EXPR (decl
);
1643 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1644 decl2
= TREE_OPERAND (decl2
, 0);
1645 gcc_assert (DECL_P (decl2
));
1646 install_var_local (decl2
, ctx
);
1648 install_var_local (decl
, ctx
);
1653 if (DECL_SIZE (decl
)
1654 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1656 tree decl2
= DECL_VALUE_EXPR (decl
);
1657 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1658 decl2
= TREE_OPERAND (decl2
, 0);
1659 gcc_assert (DECL_P (decl2
));
1660 install_var_field (decl2
, true, 3, ctx
);
1661 install_var_local (decl2
, ctx
);
1662 install_var_local (decl
, ctx
);
1666 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1667 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1668 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1669 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1670 install_var_field (decl
, true, 7, ctx
);
1672 install_var_field (decl
, true, 3, ctx
);
1673 if (is_gimple_omp_offloaded (ctx
->stmt
)
1674 && !(is_gimple_omp_oacc (ctx
->stmt
)
1675 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1676 install_var_local (decl
, ctx
);
1681 tree base
= get_base_address (decl
);
1682 tree nc
= OMP_CLAUSE_CHAIN (c
);
1685 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1686 && OMP_CLAUSE_DECL (nc
) == base
1687 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1688 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1690 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1691 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1697 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1698 decl
= OMP_CLAUSE_DECL (c
);
1700 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1701 (splay_tree_key
) decl
));
1703 = build_decl (OMP_CLAUSE_LOCATION (c
),
1704 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1705 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1706 insert_field_into_struct (ctx
->record_type
, field
);
1707 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1708 (splay_tree_value
) field
);
1713 case OMP_CLAUSE_ORDER
:
1714 ctx
->order_concurrent
= true;
1717 case OMP_CLAUSE_BIND
:
1721 case OMP_CLAUSE_NOWAIT
:
1722 case OMP_CLAUSE_ORDERED
:
1723 case OMP_CLAUSE_COLLAPSE
:
1724 case OMP_CLAUSE_UNTIED
:
1725 case OMP_CLAUSE_MERGEABLE
:
1726 case OMP_CLAUSE_PROC_BIND
:
1727 case OMP_CLAUSE_SAFELEN
:
1728 case OMP_CLAUSE_SIMDLEN
:
1729 case OMP_CLAUSE_THREADS
:
1730 case OMP_CLAUSE_SIMD
:
1731 case OMP_CLAUSE_NOGROUP
:
1732 case OMP_CLAUSE_DEFAULTMAP
:
1733 case OMP_CLAUSE_ASYNC
:
1734 case OMP_CLAUSE_WAIT
:
1735 case OMP_CLAUSE_GANG
:
1736 case OMP_CLAUSE_WORKER
:
1737 case OMP_CLAUSE_VECTOR
:
1738 case OMP_CLAUSE_INDEPENDENT
:
1739 case OMP_CLAUSE_AUTO
:
1740 case OMP_CLAUSE_SEQ
:
1741 case OMP_CLAUSE_TILE
:
1742 case OMP_CLAUSE__SIMT_
:
1743 case OMP_CLAUSE_DEFAULT
:
1744 case OMP_CLAUSE_NONTEMPORAL
:
1745 case OMP_CLAUSE_IF_PRESENT
:
1746 case OMP_CLAUSE_FINALIZE
:
1747 case OMP_CLAUSE_TASK_REDUCTION
:
1748 case OMP_CLAUSE_ALLOCATE
:
1751 case OMP_CLAUSE_ALIGNED
:
1752 decl
= OMP_CLAUSE_DECL (c
);
1753 if (is_global_var (decl
)
1754 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1755 install_var_local (decl
, ctx
);
1758 case OMP_CLAUSE__CONDTEMP_
:
1759 decl
= OMP_CLAUSE_DECL (c
);
1760 if (is_parallel_ctx (ctx
))
1762 install_var_field (decl
, false, 3, ctx
);
1763 install_var_local (decl
, ctx
);
1765 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1766 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1767 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1768 install_var_local (decl
, ctx
);
1771 case OMP_CLAUSE__CACHE_
:
1772 case OMP_CLAUSE_NOHOST
:
1778 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1780 switch (OMP_CLAUSE_CODE (c
))
1782 case OMP_CLAUSE_LASTPRIVATE
:
1783 /* Let the corresponding firstprivate clause create
1785 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1786 scan_array_reductions
= true;
1787 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1791 case OMP_CLAUSE_FIRSTPRIVATE
:
1792 case OMP_CLAUSE_PRIVATE
:
1793 case OMP_CLAUSE_LINEAR
:
1794 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1795 case OMP_CLAUSE_IS_DEVICE_PTR
:
1796 decl
= OMP_CLAUSE_DECL (c
);
1797 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1799 while (TREE_CODE (decl
) == INDIRECT_REF
1800 || TREE_CODE (decl
) == ARRAY_REF
)
1801 decl
= TREE_OPERAND (decl
, 0);
1804 if (is_variable_sized (decl
))
1806 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1807 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1808 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1809 && is_gimple_omp_offloaded (ctx
->stmt
))
1811 tree decl2
= DECL_VALUE_EXPR (decl
);
1812 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1813 decl2
= TREE_OPERAND (decl2
, 0);
1814 gcc_assert (DECL_P (decl2
));
1815 install_var_local (decl2
, ctx
);
1816 fixup_remapped_decl (decl2
, ctx
, false);
1818 install_var_local (decl
, ctx
);
1820 fixup_remapped_decl (decl
, ctx
,
1821 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1822 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1823 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1824 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1825 scan_array_reductions
= true;
1828 case OMP_CLAUSE_REDUCTION
:
1829 case OMP_CLAUSE_IN_REDUCTION
:
1830 decl
= OMP_CLAUSE_DECL (c
);
1831 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1833 if (is_variable_sized (decl
))
1834 install_var_local (decl
, ctx
);
1835 fixup_remapped_decl (decl
, ctx
, false);
1837 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1838 scan_array_reductions
= true;
1841 case OMP_CLAUSE_TASK_REDUCTION
:
1842 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1843 scan_array_reductions
= true;
1846 case OMP_CLAUSE_SHARED
:
1847 /* Ignore shared directives in teams construct inside of
1848 target construct. */
1849 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1850 && !is_host_teams_ctx (ctx
))
1852 decl
= OMP_CLAUSE_DECL (c
);
1853 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1855 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1857 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1860 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1861 install_var_field (decl
, by_ref
, 11, ctx
);
1864 fixup_remapped_decl (decl
, ctx
, false);
1867 case OMP_CLAUSE_MAP
:
1868 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1870 decl
= OMP_CLAUSE_DECL (c
);
1872 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1873 && (OMP_CLAUSE_MAP_KIND (c
)
1874 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1875 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1876 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1877 && varpool_node::get_create (decl
)->offloadable
)
1879 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1880 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1881 && is_omp_target (ctx
->stmt
)
1882 && !is_gimple_omp_offloaded (ctx
->stmt
))
1886 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1887 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1888 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1889 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1891 tree new_decl
= lookup_decl (decl
, ctx
);
1892 TREE_TYPE (new_decl
)
1893 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1895 else if (DECL_SIZE (decl
)
1896 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1898 tree decl2
= DECL_VALUE_EXPR (decl
);
1899 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1900 decl2
= TREE_OPERAND (decl2
, 0);
1901 gcc_assert (DECL_P (decl2
));
1902 fixup_remapped_decl (decl2
, ctx
, false);
1903 fixup_remapped_decl (decl
, ctx
, true);
1906 fixup_remapped_decl (decl
, ctx
, false);
1910 case OMP_CLAUSE_COPYPRIVATE
:
1911 case OMP_CLAUSE_COPYIN
:
1912 case OMP_CLAUSE_DEFAULT
:
1914 case OMP_CLAUSE_NUM_THREADS
:
1915 case OMP_CLAUSE_NUM_TEAMS
:
1916 case OMP_CLAUSE_THREAD_LIMIT
:
1917 case OMP_CLAUSE_DEVICE
:
1918 case OMP_CLAUSE_SCHEDULE
:
1919 case OMP_CLAUSE_DIST_SCHEDULE
:
1920 case OMP_CLAUSE_NOWAIT
:
1921 case OMP_CLAUSE_ORDERED
:
1922 case OMP_CLAUSE_COLLAPSE
:
1923 case OMP_CLAUSE_UNTIED
:
1924 case OMP_CLAUSE_FINAL
:
1925 case OMP_CLAUSE_MERGEABLE
:
1926 case OMP_CLAUSE_PROC_BIND
:
1927 case OMP_CLAUSE_SAFELEN
:
1928 case OMP_CLAUSE_SIMDLEN
:
1929 case OMP_CLAUSE_ALIGNED
:
1930 case OMP_CLAUSE_DEPEND
:
1931 case OMP_CLAUSE_DETACH
:
1932 case OMP_CLAUSE_ALLOCATE
:
1933 case OMP_CLAUSE__LOOPTEMP_
:
1934 case OMP_CLAUSE__REDUCTEMP_
:
1936 case OMP_CLAUSE_FROM
:
1937 case OMP_CLAUSE_PRIORITY
:
1938 case OMP_CLAUSE_GRAINSIZE
:
1939 case OMP_CLAUSE_NUM_TASKS
:
1940 case OMP_CLAUSE_THREADS
:
1941 case OMP_CLAUSE_SIMD
:
1942 case OMP_CLAUSE_NOGROUP
:
1943 case OMP_CLAUSE_DEFAULTMAP
:
1944 case OMP_CLAUSE_ORDER
:
1945 case OMP_CLAUSE_BIND
:
1946 case OMP_CLAUSE_USE_DEVICE_PTR
:
1947 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1948 case OMP_CLAUSE_NONTEMPORAL
:
1949 case OMP_CLAUSE_ASYNC
:
1950 case OMP_CLAUSE_WAIT
:
1951 case OMP_CLAUSE_NUM_GANGS
:
1952 case OMP_CLAUSE_NUM_WORKERS
:
1953 case OMP_CLAUSE_VECTOR_LENGTH
:
1954 case OMP_CLAUSE_GANG
:
1955 case OMP_CLAUSE_WORKER
:
1956 case OMP_CLAUSE_VECTOR
:
1957 case OMP_CLAUSE_INDEPENDENT
:
1958 case OMP_CLAUSE_AUTO
:
1959 case OMP_CLAUSE_SEQ
:
1960 case OMP_CLAUSE_TILE
:
1961 case OMP_CLAUSE__SIMT_
:
1962 case OMP_CLAUSE_IF_PRESENT
:
1963 case OMP_CLAUSE_FINALIZE
:
1964 case OMP_CLAUSE_FILTER
:
1965 case OMP_CLAUSE__CONDTEMP_
:
1968 case OMP_CLAUSE__CACHE_
:
1969 case OMP_CLAUSE_NOHOST
:
1975 gcc_checking_assert (!scan_array_reductions
1976 || !is_gimple_omp_oacc (ctx
->stmt
));
1977 if (scan_array_reductions
)
1979 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1980 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1981 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1982 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1983 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1985 omp_context
*rctx
= ctx
;
1986 if (is_omp_target (ctx
->stmt
))
1988 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1989 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1991 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1992 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1993 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1994 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1995 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1996 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
2000 /* Create a new name for omp child function. Returns an identifier. */
2003 create_omp_child_function_name (bool task_copy
)
2005 return clone_function_name_numbered (current_function_decl
,
2006 task_copy
? "_omp_cpyfn" : "_omp_fn");
2009 /* Return true if CTX may belong to offloaded code: either if current function
2010 is offloaded, or any enclosing context corresponds to a target region. */
2013 omp_maybe_offloaded_ctx (omp_context
*ctx
)
2015 if (cgraph_node::get (current_function_decl
)->offloadable
)
2017 for (; ctx
; ctx
= ctx
->outer
)
2018 if (is_gimple_omp_offloaded (ctx
->stmt
))
2023 /* Build a decl for the omp child function. It'll not contain a body
2024 yet, just the bare decl. */
2027 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
2029 tree decl
, type
, name
, t
;
2031 name
= create_omp_child_function_name (task_copy
);
2033 type
= build_function_type_list (void_type_node
, ptr_type_node
,
2034 ptr_type_node
, NULL_TREE
);
2036 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2038 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
2040 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
2043 ctx
->cb
.dst_fn
= decl
;
2045 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
2047 TREE_STATIC (decl
) = 1;
2048 TREE_USED (decl
) = 1;
2049 DECL_ARTIFICIAL (decl
) = 1;
2050 DECL_IGNORED_P (decl
) = 0;
2051 TREE_PUBLIC (decl
) = 0;
2052 DECL_UNINLINABLE (decl
) = 1;
2053 DECL_EXTERNAL (decl
) = 0;
2054 DECL_CONTEXT (decl
) = NULL_TREE
;
2055 DECL_INITIAL (decl
) = make_node (BLOCK
);
2056 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
2057 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
2058 /* Remove omp declare simd attribute from the new attributes. */
2059 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
2061 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
2064 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
2065 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
2066 *p
= TREE_CHAIN (*p
);
2069 tree chain
= TREE_CHAIN (*p
);
2070 *p
= copy_node (*p
);
2071 p
= &TREE_CHAIN (*p
);
2075 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2076 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2077 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2078 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2079 DECL_FUNCTION_VERSIONED (decl
)
2080 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2082 if (omp_maybe_offloaded_ctx (ctx
))
2084 cgraph_node::get_create (decl
)->offloadable
= 1;
2085 if (ENABLE_OFFLOADING
)
2086 g
->have_offload
= true;
2089 if (cgraph_node::get_create (decl
)->offloadable
)
2091 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2092 ? "omp target entrypoint"
2093 : "omp declare target");
2094 if (lookup_attribute ("omp declare target",
2095 DECL_ATTRIBUTES (current_function_decl
)))
2097 if (is_gimple_omp_offloaded (ctx
->stmt
))
2098 DECL_ATTRIBUTES (decl
)
2099 = remove_attribute ("omp declare target",
2100 copy_list (DECL_ATTRIBUTES (decl
)));
2105 && is_gimple_omp_offloaded (ctx
->stmt
)
2106 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl
)) == NULL_TREE
)
2107 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("noclone"),
2108 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2110 DECL_ATTRIBUTES (decl
)
2111 = tree_cons (get_identifier (target_attr
),
2112 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2115 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2116 RESULT_DECL
, NULL_TREE
, void_type_node
);
2117 DECL_ARTIFICIAL (t
) = 1;
2118 DECL_IGNORED_P (t
) = 1;
2119 DECL_CONTEXT (t
) = decl
;
2120 DECL_RESULT (decl
) = t
;
2122 tree data_name
= get_identifier (".omp_data_i");
2123 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2125 DECL_ARTIFICIAL (t
) = 1;
2126 DECL_NAMELESS (t
) = 1;
2127 DECL_ARG_TYPE (t
) = ptr_type_node
;
2128 DECL_CONTEXT (t
) = current_function_decl
;
2130 TREE_READONLY (t
) = 1;
2131 DECL_ARGUMENTS (decl
) = t
;
2133 ctx
->receiver_decl
= t
;
2136 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2137 PARM_DECL
, get_identifier (".omp_data_o"),
2139 DECL_ARTIFICIAL (t
) = 1;
2140 DECL_NAMELESS (t
) = 1;
2141 DECL_ARG_TYPE (t
) = ptr_type_node
;
2142 DECL_CONTEXT (t
) = current_function_decl
;
2144 TREE_ADDRESSABLE (t
) = 1;
2145 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2146 DECL_ARGUMENTS (decl
) = t
;
2149 /* Allocate memory for the function structure. The call to
2150 allocate_struct_function clobbers CFUN, so we need to restore
2152 push_struct_function (decl
);
2153 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2154 init_tree_ssa (cfun
);
2158 /* Callback for walk_gimple_seq. Check if combined parallel
2159 contains gimple_omp_for_combined_into_p OMP_FOR. */
2162 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2163 bool *handled_ops_p
,
2164 struct walk_stmt_info
*wi
)
2166 gimple
*stmt
= gsi_stmt (*gsi_p
);
2168 *handled_ops_p
= true;
2169 switch (gimple_code (stmt
))
2173 case GIMPLE_OMP_FOR
:
2174 if (gimple_omp_for_combined_into_p (stmt
)
2175 && gimple_omp_for_kind (stmt
)
2176 == *(const enum gf_mask
*) (wi
->info
))
2179 return integer_zero_node
;
2188 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2191 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2192 omp_context
*outer_ctx
)
2194 struct walk_stmt_info wi
;
2196 memset (&wi
, 0, sizeof (wi
));
2198 wi
.info
= (void *) &msk
;
2199 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2200 if (wi
.info
!= (void *) &msk
)
2202 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2203 struct omp_for_data fd
;
2204 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2205 /* We need two temporaries with fd.loop.v type (istart/iend)
2206 and then (fd.collapse - 1) temporaries with the same
2207 type for count2 ... countN-1 vars if not constant. */
2208 size_t count
= 2, i
;
2209 tree type
= fd
.iter_type
;
2211 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2213 count
+= fd
.collapse
- 1;
2214 /* If there are lastprivate clauses on the inner
2215 GIMPLE_OMP_FOR, add one more temporaries for the total number
2216 of iterations (product of count1 ... countN-1). */
2217 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2218 OMP_CLAUSE_LASTPRIVATE
)
2219 || (msk
== GF_OMP_FOR_KIND_FOR
2220 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2221 OMP_CLAUSE_LASTPRIVATE
)))
2223 tree temp
= create_tmp_var (type
);
2224 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2225 OMP_CLAUSE__LOOPTEMP_
);
2226 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2227 OMP_CLAUSE_DECL (c
) = temp
;
2228 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2229 gimple_omp_taskreg_set_clauses (stmt
, c
);
2232 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2233 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2234 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2236 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2237 tree type2
= TREE_TYPE (v
);
2239 for (i
= 0; i
< 3; i
++)
2241 tree temp
= create_tmp_var (type2
);
2242 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2243 OMP_CLAUSE__LOOPTEMP_
);
2244 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2245 OMP_CLAUSE_DECL (c
) = temp
;
2246 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2247 gimple_omp_taskreg_set_clauses (stmt
, c
);
2251 for (i
= 0; i
< count
; i
++)
2253 tree temp
= create_tmp_var (type
);
2254 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2255 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2256 OMP_CLAUSE_DECL (c
) = temp
;
2257 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2258 gimple_omp_taskreg_set_clauses (stmt
, c
);
2261 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2262 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2263 OMP_CLAUSE_REDUCTION
))
2265 tree type
= build_pointer_type (pointer_sized_int_node
);
2266 tree temp
= create_tmp_var (type
);
2267 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2268 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2269 OMP_CLAUSE_DECL (c
) = temp
;
2270 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2271 gimple_omp_task_set_clauses (stmt
, c
);
2275 /* Scan an OpenMP parallel directive. */
2278 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2282 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2284 /* Ignore parallel directives with empty bodies, unless there
2285 are copyin clauses. */
2287 && empty_body_p (gimple_omp_body (stmt
))
2288 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2289 OMP_CLAUSE_COPYIN
) == NULL
)
2291 gsi_replace (gsi
, gimple_build_nop (), false);
2295 if (gimple_omp_parallel_combined_p (stmt
))
2296 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2297 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2298 OMP_CLAUSE_REDUCTION
);
2299 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2300 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2302 tree type
= build_pointer_type (pointer_sized_int_node
);
2303 tree temp
= create_tmp_var (type
);
2304 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2306 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2307 OMP_CLAUSE_DECL (c
) = temp
;
2308 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2309 gimple_omp_parallel_set_clauses (stmt
, c
);
2312 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2315 ctx
= new_omp_context (stmt
, outer_ctx
);
2316 taskreg_contexts
.safe_push (ctx
);
2317 if (taskreg_nesting_level
> 1)
2318 ctx
->is_nested
= true;
2319 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2320 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2321 name
= create_tmp_var_name (".omp_data_s");
2322 name
= build_decl (gimple_location (stmt
),
2323 TYPE_DECL
, name
, ctx
->record_type
);
2324 DECL_ARTIFICIAL (name
) = 1;
2325 DECL_NAMELESS (name
) = 1;
2326 TYPE_NAME (ctx
->record_type
) = name
;
2327 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2328 create_omp_child_function (ctx
, false);
2329 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2331 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2332 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2334 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2335 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2338 /* Scan an OpenMP task directive. */
2341 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2345 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2347 /* Ignore task directives with empty bodies, unless they have depend
2350 && gimple_omp_body (stmt
)
2351 && empty_body_p (gimple_omp_body (stmt
))
2352 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2354 gsi_replace (gsi
, gimple_build_nop (), false);
2358 if (gimple_omp_task_taskloop_p (stmt
))
2359 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2361 ctx
= new_omp_context (stmt
, outer_ctx
);
2363 if (gimple_omp_task_taskwait_p (stmt
))
2365 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2369 taskreg_contexts
.safe_push (ctx
);
2370 if (taskreg_nesting_level
> 1)
2371 ctx
->is_nested
= true;
2372 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2373 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2374 name
= create_tmp_var_name (".omp_data_s");
2375 name
= build_decl (gimple_location (stmt
),
2376 TYPE_DECL
, name
, ctx
->record_type
);
2377 DECL_ARTIFICIAL (name
) = 1;
2378 DECL_NAMELESS (name
) = 1;
2379 TYPE_NAME (ctx
->record_type
) = name
;
2380 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2381 create_omp_child_function (ctx
, false);
2382 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2384 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2386 if (ctx
->srecord_type
)
2388 name
= create_tmp_var_name (".omp_data_a");
2389 name
= build_decl (gimple_location (stmt
),
2390 TYPE_DECL
, name
, ctx
->srecord_type
);
2391 DECL_ARTIFICIAL (name
) = 1;
2392 DECL_NAMELESS (name
) = 1;
2393 TYPE_NAME (ctx
->srecord_type
) = name
;
2394 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2395 create_omp_child_function (ctx
, true);
2398 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2400 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2402 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2403 t
= build_int_cst (long_integer_type_node
, 0);
2404 gimple_omp_task_set_arg_size (stmt
, t
);
2405 t
= build_int_cst (long_integer_type_node
, 1);
2406 gimple_omp_task_set_arg_align (stmt
, t
);
2410 /* Helper function for finish_taskreg_scan, called through walk_tree.
2411 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2412 tree, replace it in the expression. */
2415 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2419 omp_context
*ctx
= (omp_context
*) data
;
2420 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2423 if (DECL_HAS_VALUE_EXPR_P (t
))
2424 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2429 else if (IS_TYPE_OR_DECL_P (*tp
))
2434 /* If any decls have been made addressable during scan_omp,
2435 adjust their fields if needed, and layout record types
2436 of parallel/task constructs. */
2439 finish_taskreg_scan (omp_context
*ctx
)
2441 if (ctx
->record_type
== NULL_TREE
)
2444 /* If any make_addressable_vars were needed, verify all
2445 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2446 statements if use_pointer_for_field hasn't changed
2447 because of that. If it did, update field types now. */
2448 if (make_addressable_vars
)
2452 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2453 c
; c
= OMP_CLAUSE_CHAIN (c
))
2454 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2455 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2457 tree decl
= OMP_CLAUSE_DECL (c
);
2459 /* Global variables don't need to be copied,
2460 the receiver side will use them directly. */
2461 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2463 if (!bitmap_bit_p (make_addressable_vars
, DECL_UID (decl
))
2464 || !use_pointer_for_field (decl
, ctx
))
2466 tree field
= lookup_field (decl
, ctx
);
2467 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2468 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2470 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2471 TREE_THIS_VOLATILE (field
) = 0;
2472 DECL_USER_ALIGN (field
) = 0;
2473 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2474 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2475 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2476 if (ctx
->srecord_type
)
2478 tree sfield
= lookup_sfield (decl
, ctx
);
2479 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2480 TREE_THIS_VOLATILE (sfield
) = 0;
2481 DECL_USER_ALIGN (sfield
) = 0;
2482 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2483 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2484 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2489 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2491 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2492 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2495 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2496 expects to find it at the start of data. */
2497 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2498 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2502 *p
= DECL_CHAIN (*p
);
2506 p
= &DECL_CHAIN (*p
);
2507 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2508 TYPE_FIELDS (ctx
->record_type
) = f
;
2510 layout_type (ctx
->record_type
);
2511 fixup_child_record_type (ctx
);
2513 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2515 layout_type (ctx
->record_type
);
2516 fixup_child_record_type (ctx
);
2520 location_t loc
= gimple_location (ctx
->stmt
);
2521 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2523 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2525 /* Move VLA fields to the end. */
2526 p
= &TYPE_FIELDS (ctx
->record_type
);
2528 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2529 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2532 *p
= TREE_CHAIN (*p
);
2533 TREE_CHAIN (*q
) = NULL_TREE
;
2534 q
= &TREE_CHAIN (*q
);
2537 p
= &DECL_CHAIN (*p
);
2539 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2541 /* Move fields corresponding to first and second _looptemp_
2542 clause first. There are filled by GOMP_taskloop
2543 and thus need to be in specific positions. */
2544 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2545 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2546 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2547 OMP_CLAUSE__LOOPTEMP_
);
2548 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2549 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2550 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2551 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2552 p
= &TYPE_FIELDS (ctx
->record_type
);
2554 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2555 *p
= DECL_CHAIN (*p
);
2557 p
= &DECL_CHAIN (*p
);
2558 DECL_CHAIN (f1
) = f2
;
2561 DECL_CHAIN (f2
) = f3
;
2562 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2565 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2566 TYPE_FIELDS (ctx
->record_type
) = f1
;
2567 if (ctx
->srecord_type
)
2569 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2570 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2572 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2573 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2575 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2576 *p
= DECL_CHAIN (*p
);
2578 p
= &DECL_CHAIN (*p
);
2579 DECL_CHAIN (f1
) = f2
;
2580 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2583 DECL_CHAIN (f2
) = f3
;
2584 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2587 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2588 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2595 /* Look for a firstprivate clause with the detach event handle. */
2596 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2597 c
; c
= OMP_CLAUSE_CHAIN (c
))
2599 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2601 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2602 == OMP_CLAUSE_DECL (detach_clause
))
2607 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2609 /* Move field corresponding to the detach clause first.
2610 This is filled by GOMP_task and needs to be in a
2611 specific position. */
2612 p
= &TYPE_FIELDS (ctx
->record_type
);
2615 *p
= DECL_CHAIN (*p
);
2617 p
= &DECL_CHAIN (*p
);
2618 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2619 TYPE_FIELDS (ctx
->record_type
) = field
;
2620 if (ctx
->srecord_type
)
2622 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2623 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2626 *p
= DECL_CHAIN (*p
);
2628 p
= &DECL_CHAIN (*p
);
2629 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2630 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2633 layout_type (ctx
->record_type
);
2634 fixup_child_record_type (ctx
);
2635 if (ctx
->srecord_type
)
2636 layout_type (ctx
->srecord_type
);
2637 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2638 TYPE_SIZE_UNIT (ctx
->record_type
));
2639 if (TREE_CODE (t
) != INTEGER_CST
)
2641 t
= unshare_expr (t
);
2642 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2644 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2645 t
= build_int_cst (long_integer_type_node
,
2646 TYPE_ALIGN_UNIT (ctx
->record_type
));
2647 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2651 /* Find the enclosing offload context. */
2653 static omp_context
*
2654 enclosing_target_ctx (omp_context
*ctx
)
2656 for (; ctx
; ctx
= ctx
->outer
)
2657 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2663 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2665 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2668 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2670 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2672 gimple
*stmt
= ctx
->stmt
;
2673 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2674 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2681 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2682 (This doesn't include OpenACC 'kernels' decomposed parts.)
2683 Until kernels handling moves to use the same loop indirection
2684 scheme as parallel, we need to do this checking early. */
2687 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2689 bool checking
= true;
2690 unsigned outer_mask
= 0;
2691 unsigned this_mask
= 0;
2692 bool has_seq
= false, has_auto
= false;
2695 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2699 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2701 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2704 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2706 switch (OMP_CLAUSE_CODE (c
))
2708 case OMP_CLAUSE_GANG
:
2709 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2711 case OMP_CLAUSE_WORKER
:
2712 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2714 case OMP_CLAUSE_VECTOR
:
2715 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2717 case OMP_CLAUSE_SEQ
:
2720 case OMP_CLAUSE_AUTO
:
2730 if (has_seq
&& (this_mask
|| has_auto
))
2731 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2732 " OpenACC loop specifiers");
2733 else if (has_auto
&& this_mask
)
2734 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2735 " OpenACC loop specifiers");
2737 if (this_mask
& outer_mask
)
2738 error_at (gimple_location (stmt
), "inner loop uses same"
2739 " OpenACC parallelism as containing loop");
2742 return outer_mask
| this_mask
;
2745 /* Scan a GIMPLE_OMP_FOR. */
2747 static omp_context
*
2748 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2752 tree clauses
= gimple_omp_for_clauses (stmt
);
2754 ctx
= new_omp_context (stmt
, outer_ctx
);
2756 if (is_gimple_omp_oacc (stmt
))
2758 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2760 if (!(tgt
&& is_oacc_kernels (tgt
)))
2761 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2764 switch (OMP_CLAUSE_CODE (c
))
2766 case OMP_CLAUSE_GANG
:
2767 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2770 case OMP_CLAUSE_WORKER
:
2771 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2774 case OMP_CLAUSE_VECTOR
:
2775 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2784 /* By construction, this is impossible for OpenACC 'kernels'
2785 decomposed parts. */
2786 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2788 error_at (OMP_CLAUSE_LOCATION (c
),
2789 "argument not permitted on %qs clause",
2790 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2792 inform (gimple_location (tgt
->stmt
),
2793 "enclosing parent compute construct");
2794 else if (oacc_get_fn_attrib (current_function_decl
))
2795 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2796 "enclosing routine");
2802 if (tgt
&& is_oacc_kernels (tgt
))
2803 check_oacc_kernel_gwv (stmt
, ctx
);
2805 /* Collect all variables named in reductions on this loop. Ensure
2806 that, if this loop has a reduction on some variable v, and there is
2807 a reduction on v somewhere in an outer context, then there is a
2808 reduction on v on all intervening loops as well. */
2809 tree local_reduction_clauses
= NULL
;
2810 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2812 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2813 local_reduction_clauses
2814 = tree_cons (NULL
, c
, local_reduction_clauses
);
2816 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2817 ctx
->outer_reduction_clauses
2818 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2819 ctx
->outer
->outer_reduction_clauses
);
2820 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2821 tree local_iter
= local_reduction_clauses
;
2822 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2824 tree local_clause
= TREE_VALUE (local_iter
);
2825 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2826 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2827 bool have_outer_reduction
= false;
2828 tree ctx_iter
= outer_reduction_clauses
;
2829 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2831 tree outer_clause
= TREE_VALUE (ctx_iter
);
2832 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2833 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2834 if (outer_var
== local_var
&& outer_op
!= local_op
)
2836 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2837 "conflicting reduction operations for %qE",
2839 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2840 "location of the previous reduction for %qE",
2843 if (outer_var
== local_var
)
2845 have_outer_reduction
= true;
2849 if (have_outer_reduction
)
2851 /* There is a reduction on outer_var both on this loop and on
2852 some enclosing loop. Walk up the context tree until such a
2853 loop with a reduction on outer_var is found, and complain
2854 about all intervening loops that do not have such a
2856 struct omp_context
*curr_loop
= ctx
->outer
;
2858 while (curr_loop
!= NULL
)
2860 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2861 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2863 tree curr_clause
= TREE_VALUE (curr_iter
);
2864 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2865 if (curr_var
== local_var
)
2872 warning_at (gimple_location (curr_loop
->stmt
), 0,
2873 "nested loop in reduction needs "
2874 "reduction clause for %qE",
2878 curr_loop
= curr_loop
->outer
;
2882 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2883 ctx
->outer_reduction_clauses
2884 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2885 ctx
->outer_reduction_clauses
);
2887 if (tgt
&& is_oacc_kernels (tgt
))
2889 /* Strip out reductions, as they are not handled yet. */
2890 tree
*prev_ptr
= &clauses
;
2892 while (tree probe
= *prev_ptr
)
2894 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2896 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2897 *prev_ptr
= *next_ptr
;
2899 prev_ptr
= next_ptr
;
2902 gimple_omp_for_set_clauses (stmt
, clauses
);
2906 scan_sharing_clauses (clauses
, ctx
);
2908 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2909 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2911 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2912 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2913 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2914 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2916 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2920 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2923 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2924 omp_context
*outer_ctx
)
2926 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2927 gsi_replace (gsi
, bind
, false);
2928 gimple_seq seq
= NULL
;
2929 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2930 tree cond
= create_tmp_var_raw (integer_type_node
);
2931 DECL_CONTEXT (cond
) = current_function_decl
;
2932 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2933 gimple_bind_set_vars (bind
, cond
);
2934 gimple_call_set_lhs (g
, cond
);
2935 gimple_seq_add_stmt (&seq
, g
);
2936 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2937 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2938 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2939 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2940 gimple_seq_add_stmt (&seq
, g
);
2941 g
= gimple_build_label (lab1
);
2942 gimple_seq_add_stmt (&seq
, g
);
2943 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2944 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2945 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2946 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2947 gimple_omp_for_set_clauses (new_stmt
, clause
);
2948 gimple_seq_add_stmt (&seq
, new_stmt
);
2949 g
= gimple_build_goto (lab3
);
2950 gimple_seq_add_stmt (&seq
, g
);
2951 g
= gimple_build_label (lab2
);
2952 gimple_seq_add_stmt (&seq
, g
);
2953 gimple_seq_add_stmt (&seq
, stmt
);
2954 g
= gimple_build_label (lab3
);
2955 gimple_seq_add_stmt (&seq
, g
);
2956 gimple_bind_set_body (bind
, seq
);
2958 scan_omp_for (new_stmt
, outer_ctx
);
2959 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2962 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2963 struct walk_stmt_info
*);
2964 static omp_context
*maybe_lookup_ctx (gimple
*);
2966 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2967 for scan phase loop. */
2970 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2971 omp_context
*outer_ctx
)
2973 /* The only change between inclusive and exclusive scan will be
2974 within the first simd loop, so just use inclusive in the
2975 worksharing loop. */
2976 outer_ctx
->scan_inclusive
= true;
2977 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2978 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2980 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2981 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2982 gsi_replace (gsi
, input_stmt
, false);
2983 gimple_seq input_body
= NULL
;
2984 gimple_seq_add_stmt (&input_body
, stmt
);
2985 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2987 gimple_stmt_iterator input1_gsi
= gsi_none ();
2988 struct walk_stmt_info wi
;
2989 memset (&wi
, 0, sizeof (wi
));
2991 wi
.info
= (void *) &input1_gsi
;
2992 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2993 gcc_assert (!gsi_end_p (input1_gsi
));
2995 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2996 gsi_next (&input1_gsi
);
2997 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2998 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2999 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
3000 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3001 std::swap (input_stmt1
, scan_stmt1
);
3003 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
3004 gimple_omp_set_body (input_stmt1
, NULL
);
3006 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
3007 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
3009 gimple_omp_set_body (input_stmt1
, input_body1
);
3010 gimple_omp_set_body (scan_stmt1
, NULL
);
3012 gimple_stmt_iterator input2_gsi
= gsi_none ();
3013 memset (&wi
, 0, sizeof (wi
));
3015 wi
.info
= (void *) &input2_gsi
;
3016 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
3018 gcc_assert (!gsi_end_p (input2_gsi
));
3020 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
3021 gsi_next (&input2_gsi
);
3022 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
3023 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
3024 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3025 std::swap (input_stmt2
, scan_stmt2
);
3027 gimple_omp_set_body (input_stmt2
, NULL
);
3029 gimple_omp_set_body (input_stmt
, input_body
);
3030 gimple_omp_set_body (scan_stmt
, scan_body
);
3032 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
3033 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
3035 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
3036 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
3038 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
3041 /* Scan an OpenMP sections directive. */
3044 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
3048 ctx
= new_omp_context (stmt
, outer_ctx
);
3049 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
3050 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3053 /* Scan an OpenMP single directive. */
3056 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
3061 ctx
= new_omp_context (stmt
, outer_ctx
);
3062 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3063 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3064 name
= create_tmp_var_name (".omp_copy_s");
3065 name
= build_decl (gimple_location (stmt
),
3066 TYPE_DECL
, name
, ctx
->record_type
);
3067 TYPE_NAME (ctx
->record_type
) = name
;
3069 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
3070 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3072 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3073 ctx
->record_type
= NULL
;
3075 layout_type (ctx
->record_type
);
3078 /* Scan a GIMPLE_OMP_TARGET. */
3081 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3085 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3086 tree clauses
= gimple_omp_target_clauses (stmt
);
3088 ctx
= new_omp_context (stmt
, outer_ctx
);
3089 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3090 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3091 name
= create_tmp_var_name (".omp_data_t");
3092 name
= build_decl (gimple_location (stmt
),
3093 TYPE_DECL
, name
, ctx
->record_type
);
3094 DECL_ARTIFICIAL (name
) = 1;
3095 DECL_NAMELESS (name
) = 1;
3096 TYPE_NAME (ctx
->record_type
) = name
;
3097 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3101 create_omp_child_function (ctx
, false);
3102 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3105 scan_sharing_clauses (clauses
, ctx
);
3106 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3108 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3109 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3112 TYPE_FIELDS (ctx
->record_type
)
3113 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3116 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3117 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3119 field
= DECL_CHAIN (field
))
3120 gcc_assert (DECL_ALIGN (field
) == align
);
3122 layout_type (ctx
->record_type
);
3124 fixup_child_record_type (ctx
);
3127 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3129 error_at (gimple_location (stmt
),
3130 "%<target%> construct with nested %<teams%> construct "
3131 "contains directives outside of the %<teams%> construct");
3132 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3136 /* Scan an OpenMP teams directive. */
3139 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3141 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3143 if (!gimple_omp_teams_host (stmt
))
3145 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3146 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3149 taskreg_contexts
.safe_push (ctx
);
3150 gcc_assert (taskreg_nesting_level
== 1);
3151 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3152 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3153 tree name
= create_tmp_var_name (".omp_data_s");
3154 name
= build_decl (gimple_location (stmt
),
3155 TYPE_DECL
, name
, ctx
->record_type
);
3156 DECL_ARTIFICIAL (name
) = 1;
3157 DECL_NAMELESS (name
) = 1;
3158 TYPE_NAME (ctx
->record_type
) = name
;
3159 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3160 create_omp_child_function (ctx
, false);
3161 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3163 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3164 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3166 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3167 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3170 /* Check nesting restrictions. */
3172 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3176 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3177 inside an OpenACC CTX. */
3178 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3179 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3180 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3182 else if (!(is_gimple_omp (stmt
)
3183 && is_gimple_omp_oacc (stmt
)))
3185 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3187 error_at (gimple_location (stmt
),
3188 "non-OpenACC construct inside of OpenACC routine");
3192 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3193 if (is_gimple_omp (octx
->stmt
)
3194 && is_gimple_omp_oacc (octx
->stmt
))
3196 error_at (gimple_location (stmt
),
3197 "non-OpenACC construct inside of OpenACC region");
3204 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3205 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3207 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3209 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3211 error_at (gimple_location (stmt
),
3212 "OpenMP constructs are not allowed in target region "
3213 "with %<ancestor%>");
3217 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3218 ctx
->teams_nested_p
= true;
3220 ctx
->nonteams_nested_p
= true;
3222 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3224 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3226 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3227 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3231 if (ctx
->order_concurrent
3232 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3233 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3234 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3236 error_at (gimple_location (stmt
),
3237 "OpenMP constructs other than %<parallel%>, %<loop%>"
3238 " or %<simd%> may not be nested inside a region with"
3239 " the %<order(concurrent)%> clause");
3242 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3244 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3245 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3247 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3248 && (ctx
->outer
== NULL
3249 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3250 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3251 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3252 != GF_OMP_FOR_KIND_FOR
)
3253 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3255 error_at (gimple_location (stmt
),
3256 "%<ordered simd threads%> must be closely "
3257 "nested inside of %<%s simd%> region",
3258 lang_GNU_Fortran () ? "do" : "for");
3264 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3265 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3266 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3268 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3269 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3271 error_at (gimple_location (stmt
),
3272 "OpenMP constructs other than "
3273 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3274 "not be nested inside %<simd%> region");
3277 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3279 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3280 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3281 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3282 OMP_CLAUSE_BIND
) == NULL_TREE
))
3283 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3285 error_at (gimple_location (stmt
),
3286 "only %<distribute%>, %<parallel%> or %<loop%> "
3287 "regions are allowed to be strictly nested inside "
3288 "%<teams%> region");
3292 else if (ctx
->order_concurrent
3293 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3294 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3295 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3296 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3299 error_at (gimple_location (stmt
),
3300 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3301 "%<simd%> may not be nested inside a %<loop%> region");
3303 error_at (gimple_location (stmt
),
3304 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3305 "%<simd%> may not be nested inside a region with "
3306 "the %<order(concurrent)%> clause");
3310 switch (gimple_code (stmt
))
3312 case GIMPLE_OMP_FOR
:
3313 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3315 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3317 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3319 error_at (gimple_location (stmt
),
3320 "%<distribute%> region must be strictly nested "
3321 "inside %<teams%> construct");
3326 /* We split taskloop into task and nested taskloop in it. */
3327 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3329 /* For now, hope this will change and loop bind(parallel) will not
3330 be allowed in lots of contexts. */
3331 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3332 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3334 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3339 switch (gimple_code (ctx
->stmt
))
3341 case GIMPLE_OMP_FOR
:
3342 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3343 == GF_OMP_FOR_KIND_OACC_LOOP
);
3346 case GIMPLE_OMP_TARGET
:
3347 switch (gimple_omp_target_kind (ctx
->stmt
))
3349 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3350 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3351 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3352 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3353 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3364 else if (oacc_get_fn_attrib (current_function_decl
))
3368 error_at (gimple_location (stmt
),
3369 "OpenACC loop directive must be associated with"
3370 " an OpenACC compute region");
3376 if (is_gimple_call (stmt
)
3377 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3378 == BUILT_IN_GOMP_CANCEL
3379 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3380 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3382 const char *bad
= NULL
;
3383 const char *kind
= NULL
;
3384 const char *construct
3385 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3386 == BUILT_IN_GOMP_CANCEL
)
3388 : "cancellation point";
3391 error_at (gimple_location (stmt
), "orphaned %qs construct",
3395 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3396 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3400 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3402 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3403 == BUILT_IN_GOMP_CANCEL
3404 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3405 ctx
->cancellable
= true;
3409 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3410 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3412 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3413 == BUILT_IN_GOMP_CANCEL
3414 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3416 ctx
->cancellable
= true;
3417 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3419 warning_at (gimple_location (stmt
), 0,
3420 "%<cancel for%> inside "
3421 "%<nowait%> for construct");
3422 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3423 OMP_CLAUSE_ORDERED
))
3424 warning_at (gimple_location (stmt
), 0,
3425 "%<cancel for%> inside "
3426 "%<ordered%> for construct");
3431 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3432 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3434 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3435 == BUILT_IN_GOMP_CANCEL
3436 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3438 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3440 ctx
->cancellable
= true;
3441 if (omp_find_clause (gimple_omp_sections_clauses
3444 warning_at (gimple_location (stmt
), 0,
3445 "%<cancel sections%> inside "
3446 "%<nowait%> sections construct");
3450 gcc_assert (ctx
->outer
3451 && gimple_code (ctx
->outer
->stmt
)
3452 == GIMPLE_OMP_SECTIONS
);
3453 ctx
->outer
->cancellable
= true;
3454 if (omp_find_clause (gimple_omp_sections_clauses
3457 warning_at (gimple_location (stmt
), 0,
3458 "%<cancel sections%> inside "
3459 "%<nowait%> sections construct");
3465 if (!is_task_ctx (ctx
)
3466 && (!is_taskloop_ctx (ctx
)
3467 || ctx
->outer
== NULL
3468 || !is_task_ctx (ctx
->outer
)))
3472 for (omp_context
*octx
= ctx
->outer
;
3473 octx
; octx
= octx
->outer
)
3475 switch (gimple_code (octx
->stmt
))
3477 case GIMPLE_OMP_TASKGROUP
:
3479 case GIMPLE_OMP_TARGET
:
3480 if (gimple_omp_target_kind (octx
->stmt
)
3481 != GF_OMP_TARGET_KIND_REGION
)
3484 case GIMPLE_OMP_PARALLEL
:
3485 case GIMPLE_OMP_TEAMS
:
3486 error_at (gimple_location (stmt
),
3487 "%<%s taskgroup%> construct not closely "
3488 "nested inside of %<taskgroup%> region",
3491 case GIMPLE_OMP_TASK
:
3492 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3494 && is_taskloop_ctx (octx
->outer
))
3497 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3498 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3507 ctx
->cancellable
= true;
3512 error_at (gimple_location (stmt
), "invalid arguments");
3517 error_at (gimple_location (stmt
),
3518 "%<%s %s%> construct not closely nested inside of %qs",
3519 construct
, kind
, bad
);
3524 case GIMPLE_OMP_SECTIONS
:
3525 case GIMPLE_OMP_SINGLE
:
3526 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3527 switch (gimple_code (ctx
->stmt
))
3529 case GIMPLE_OMP_FOR
:
3530 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3531 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3534 case GIMPLE_OMP_SECTIONS
:
3535 case GIMPLE_OMP_SINGLE
:
3536 case GIMPLE_OMP_ORDERED
:
3537 case GIMPLE_OMP_MASTER
:
3538 case GIMPLE_OMP_MASKED
:
3539 case GIMPLE_OMP_TASK
:
3540 case GIMPLE_OMP_CRITICAL
:
3541 if (is_gimple_call (stmt
))
3543 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3544 != BUILT_IN_GOMP_BARRIER
)
3546 error_at (gimple_location (stmt
),
3547 "barrier region may not be closely nested inside "
3548 "of work-sharing, %<loop%>, %<critical%>, "
3549 "%<ordered%>, %<master%>, %<masked%>, explicit "
3550 "%<task%> or %<taskloop%> region");
3553 error_at (gimple_location (stmt
),
3554 "work-sharing region may not be closely nested inside "
3555 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3556 "%<master%>, %<masked%>, explicit %<task%> or "
3557 "%<taskloop%> region");
3559 case GIMPLE_OMP_PARALLEL
:
3560 case GIMPLE_OMP_TEAMS
:
3562 case GIMPLE_OMP_TARGET
:
3563 if (gimple_omp_target_kind (ctx
->stmt
)
3564 == GF_OMP_TARGET_KIND_REGION
)
3571 case GIMPLE_OMP_MASTER
:
3572 case GIMPLE_OMP_MASKED
:
3573 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3574 switch (gimple_code (ctx
->stmt
))
3576 case GIMPLE_OMP_FOR
:
3577 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3578 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3581 case GIMPLE_OMP_SECTIONS
:
3582 case GIMPLE_OMP_SINGLE
:
3583 case GIMPLE_OMP_TASK
:
3584 error_at (gimple_location (stmt
),
3585 "%qs region may not be closely nested inside "
3586 "of work-sharing, %<loop%>, explicit %<task%> or "
3587 "%<taskloop%> region",
3588 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3589 ? "master" : "masked");
3591 case GIMPLE_OMP_PARALLEL
:
3592 case GIMPLE_OMP_TEAMS
:
3594 case GIMPLE_OMP_TARGET
:
3595 if (gimple_omp_target_kind (ctx
->stmt
)
3596 == GF_OMP_TARGET_KIND_REGION
)
3603 case GIMPLE_OMP_SCOPE
:
3604 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3605 switch (gimple_code (ctx
->stmt
))
3607 case GIMPLE_OMP_FOR
:
3608 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3609 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3612 case GIMPLE_OMP_SECTIONS
:
3613 case GIMPLE_OMP_SINGLE
:
3614 case GIMPLE_OMP_TASK
:
3615 case GIMPLE_OMP_CRITICAL
:
3616 case GIMPLE_OMP_ORDERED
:
3617 case GIMPLE_OMP_MASTER
:
3618 case GIMPLE_OMP_MASKED
:
3619 error_at (gimple_location (stmt
),
3620 "%<scope%> region may not be closely nested inside "
3621 "of work-sharing, %<loop%>, explicit %<task%>, "
3622 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3623 "or %<masked%> region");
3625 case GIMPLE_OMP_PARALLEL
:
3626 case GIMPLE_OMP_TEAMS
:
3628 case GIMPLE_OMP_TARGET
:
3629 if (gimple_omp_target_kind (ctx
->stmt
)
3630 == GF_OMP_TARGET_KIND_REGION
)
3637 case GIMPLE_OMP_TASK
:
3638 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3639 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3641 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3642 error_at (OMP_CLAUSE_LOCATION (c
),
3643 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3644 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross",
3645 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3649 case GIMPLE_OMP_ORDERED
:
3650 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3651 c
; c
= OMP_CLAUSE_CHAIN (c
))
3653 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
)
3655 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
3657 error_at (OMP_CLAUSE_LOCATION (c
),
3658 "invalid depend kind in omp %<ordered%> %<depend%>");
3661 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3662 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3667 /* Look for containing ordered(N) loop. */
3669 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3671 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3672 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3674 error_at (OMP_CLAUSE_LOCATION (c
),
3675 "%<ordered%> construct with %<depend%> clause "
3676 "must be closely nested inside an %<ordered%> loop");
3680 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3681 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3683 /* ordered simd must be closely nested inside of simd region,
3684 and simd region must not encounter constructs other than
3685 ordered simd, therefore ordered simd may be either orphaned,
3686 or ctx->stmt must be simd. The latter case is handled already
3690 error_at (gimple_location (stmt
),
3691 "%<ordered%> %<simd%> must be closely nested inside "
3696 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3697 switch (gimple_code (ctx
->stmt
))
3699 case GIMPLE_OMP_CRITICAL
:
3700 case GIMPLE_OMP_TASK
:
3701 case GIMPLE_OMP_ORDERED
:
3702 ordered_in_taskloop
:
3703 error_at (gimple_location (stmt
),
3704 "%<ordered%> region may not be closely nested inside "
3705 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3706 "%<taskloop%> region");
3708 case GIMPLE_OMP_FOR
:
3709 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3710 goto ordered_in_taskloop
;
3712 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3713 OMP_CLAUSE_ORDERED
);
3716 error_at (gimple_location (stmt
),
3717 "%<ordered%> region must be closely nested inside "
3718 "a loop region with an %<ordered%> clause");
3721 if (!gimple_omp_ordered_standalone_p (stmt
))
3723 if (OMP_CLAUSE_ORDERED_DOACROSS (o
))
3725 error_at (gimple_location (stmt
),
3726 "%<ordered%> construct without %<doacross%> or "
3727 "%<depend%> clauses must not have the same "
3728 "binding region as %<ordered%> construct with "
3732 else if (OMP_CLAUSE_ORDERED_EXPR (o
))
3735 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3736 OMP_CLAUSE_COLLAPSE
);
3738 o_n
= tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o
));
3739 HOST_WIDE_INT c_n
= 1;
3741 c_n
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co
));
3744 error_at (gimple_location (stmt
),
3745 "%<ordered%> construct without %<doacross%> "
3746 "or %<depend%> clauses binds to loop where "
3747 "%<collapse%> argument %wd is different from "
3748 "%<ordered%> argument %wd", c_n
, o_n
);
3754 case GIMPLE_OMP_TARGET
:
3755 if (gimple_omp_target_kind (ctx
->stmt
)
3756 != GF_OMP_TARGET_KIND_REGION
)
3759 case GIMPLE_OMP_PARALLEL
:
3760 case GIMPLE_OMP_TEAMS
:
3761 error_at (gimple_location (stmt
),
3762 "%<ordered%> region must be closely nested inside "
3763 "a loop region with an %<ordered%> clause");
3769 case GIMPLE_OMP_CRITICAL
:
3772 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3773 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3774 if (gomp_critical
*other_crit
3775 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3776 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3778 error_at (gimple_location (stmt
),
3779 "%<critical%> region may not be nested inside "
3780 "a %<critical%> region with the same name");
3785 case GIMPLE_OMP_TEAMS
:
3788 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3789 || (gimple_omp_target_kind (ctx
->stmt
)
3790 != GF_OMP_TARGET_KIND_REGION
))
3792 /* Teams construct can appear either strictly nested inside of
3793 target construct with no intervening stmts, or can be encountered
3794 only by initial task (so must not appear inside any OpenMP
3796 error_at (gimple_location (stmt
),
3797 "%<teams%> construct must be closely nested inside of "
3798 "%<target%> construct or not nested in any OpenMP "
3803 case GIMPLE_OMP_TARGET
:
3804 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3805 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3807 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3808 error_at (OMP_CLAUSE_LOCATION (c
),
3809 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3810 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3813 if (is_gimple_omp_offloaded (stmt
)
3814 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3816 error_at (gimple_location (stmt
),
3817 "OpenACC region inside of OpenACC routine, nested "
3818 "parallelism not supported yet");
3821 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3823 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3825 if (is_gimple_omp (stmt
)
3826 && is_gimple_omp_oacc (stmt
)
3827 && is_gimple_omp (ctx
->stmt
))
3829 error_at (gimple_location (stmt
),
3830 "OpenACC construct inside of non-OpenACC region");
3836 const char *stmt_name
, *ctx_stmt_name
;
3837 switch (gimple_omp_target_kind (stmt
))
3839 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3840 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3841 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3842 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3843 stmt_name
= "target enter data"; break;
3844 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3845 stmt_name
= "target exit data"; break;
3846 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3847 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3848 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3849 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3850 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3851 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3852 stmt_name
= "enter data"; break;
3853 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3854 stmt_name
= "exit data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3856 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3858 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3859 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3860 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3861 /* OpenACC 'kernels' decomposed parts. */
3862 stmt_name
= "kernels"; break;
3863 default: gcc_unreachable ();
3865 switch (gimple_omp_target_kind (ctx
->stmt
))
3867 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3868 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3869 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3870 ctx_stmt_name
= "parallel"; break;
3871 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3872 ctx_stmt_name
= "kernels"; break;
3873 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3874 ctx_stmt_name
= "serial"; break;
3875 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3876 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3877 ctx_stmt_name
= "host_data"; break;
3878 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3879 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3880 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3881 /* OpenACC 'kernels' decomposed parts. */
3882 ctx_stmt_name
= "kernels"; break;
3883 default: gcc_unreachable ();
3886 /* OpenACC/OpenMP mismatch? */
3887 if (is_gimple_omp_oacc (stmt
)
3888 != is_gimple_omp_oacc (ctx
->stmt
))
3890 error_at (gimple_location (stmt
),
3891 "%s %qs construct inside of %s %qs region",
3892 (is_gimple_omp_oacc (stmt
)
3893 ? "OpenACC" : "OpenMP"), stmt_name
,
3894 (is_gimple_omp_oacc (ctx
->stmt
)
3895 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3898 if (is_gimple_omp_offloaded (ctx
->stmt
))
3900 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3901 if (is_gimple_omp_oacc (ctx
->stmt
))
3903 error_at (gimple_location (stmt
),
3904 "%qs construct inside of %qs region",
3905 stmt_name
, ctx_stmt_name
);
3910 if ((gimple_omp_target_kind (ctx
->stmt
)
3911 == GF_OMP_TARGET_KIND_REGION
)
3912 && (gimple_omp_target_kind (stmt
)
3913 == GF_OMP_TARGET_KIND_REGION
))
3915 c
= omp_find_clause (gimple_omp_target_clauses (stmt
),
3917 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3920 warning_at (gimple_location (stmt
), 0,
3921 "%qs construct inside of %qs region",
3922 stmt_name
, ctx_stmt_name
);
3934 /* Helper function scan_omp.
3936 Callback for walk_tree or operators in walk_gimple_stmt used to
3937 scan for OMP directives in TP. */
3940 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3942 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3943 omp_context
*ctx
= (omp_context
*) wi
->info
;
3946 switch (TREE_CODE (t
))
3954 tree repl
= remap_decl (t
, &ctx
->cb
);
3955 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3961 if (ctx
&& TYPE_P (t
))
3962 *tp
= remap_type (t
, &ctx
->cb
);
3963 else if (!DECL_P (t
))
3968 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3969 if (tem
!= TREE_TYPE (t
))
3971 if (TREE_CODE (t
) == INTEGER_CST
)
3972 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3974 TREE_TYPE (t
) = tem
;
3984 /* Return true if FNDECL is a setjmp or a longjmp. */
3987 setjmp_or_longjmp_p (const_tree fndecl
)
3989 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3990 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3993 tree declname
= DECL_NAME (fndecl
);
3995 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3996 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3997 || !TREE_PUBLIC (fndecl
))
4000 const char *name
= IDENTIFIER_POINTER (declname
);
4001 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
4004 /* Return true if FNDECL is an omp_* runtime API call. */
4007 omp_runtime_api_call (const_tree fndecl
)
4009 tree declname
= DECL_NAME (fndecl
);
4011 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4012 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4013 || !TREE_PUBLIC (fndecl
))
4016 const char *name
= IDENTIFIER_POINTER (declname
);
4017 if (!startswith (name
, "omp_"))
4020 static const char *omp_runtime_apis
[] =
4022 /* This array has 3 sections. First omp_* calls that don't
4023 have any suffixes. */
4032 "target_associate_ptr",
4033 "target_disassociate_ptr",
4035 "target_is_accessible",
4036 "target_is_present",
4038 "target_memcpy_async",
4039 "target_memcpy_rect",
4040 "target_memcpy_rect_async",
4042 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4043 DECL_NAME is always omp_* without tailing underscore. */
4045 "destroy_allocator",
4047 "destroy_nest_lock",
4051 "get_affinity_format",
4053 "get_default_allocator",
4054 "get_default_device",
4057 "get_initial_device",
4059 "get_max_active_levels",
4060 "get_max_task_priority",
4069 "get_partition_num_places",
4072 "get_supported_active_levels",
4074 "get_teams_thread_limit",
4083 "is_initial_device",
4085 "pause_resource_all",
4086 "set_affinity_format",
4087 "set_default_allocator",
4095 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4096 as DECL_NAME only omp_* and omp_*_8 appear. */
4098 "get_ancestor_thread_num",
4100 "get_partition_place_nums",
4101 "get_place_num_procs",
4102 "get_place_proc_ids",
4105 "set_default_device",
4107 "set_max_active_levels",
4112 "set_teams_thread_limit"
4116 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4118 if (omp_runtime_apis
[i
] == NULL
)
4123 size_t len
= strlen (omp_runtime_apis
[i
]);
4124 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4125 && (name
[4 + len
] == '\0'
4126 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4132 /* Helper function for scan_omp.
4134 Callback for walk_gimple_stmt used to scan for OMP directives in
4135 the current statement in GSI. */
4138 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4139 struct walk_stmt_info
*wi
)
4141 gimple
*stmt
= gsi_stmt (*gsi
);
4142 omp_context
*ctx
= (omp_context
*) wi
->info
;
4144 if (gimple_has_location (stmt
))
4145 input_location
= gimple_location (stmt
);
4147 /* Check the nesting restrictions. */
4148 bool remove
= false;
4149 if (is_gimple_omp (stmt
))
4150 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4151 else if (is_gimple_call (stmt
))
4153 tree fndecl
= gimple_call_fndecl (stmt
);
4157 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4158 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4159 && setjmp_or_longjmp_p (fndecl
)
4163 error_at (gimple_location (stmt
),
4164 "setjmp/longjmp inside %<simd%> construct");
4166 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4167 switch (DECL_FUNCTION_CODE (fndecl
))
4169 case BUILT_IN_GOMP_BARRIER
:
4170 case BUILT_IN_GOMP_CANCEL
:
4171 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4172 case BUILT_IN_GOMP_TASKYIELD
:
4173 case BUILT_IN_GOMP_TASKWAIT
:
4174 case BUILT_IN_GOMP_TASKGROUP_START
:
4175 case BUILT_IN_GOMP_TASKGROUP_END
:
4176 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4183 omp_context
*octx
= ctx
;
4184 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4186 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4189 error_at (gimple_location (stmt
),
4190 "OpenMP runtime API call %qD in a region with "
4191 "%<order(concurrent)%> clause", fndecl
);
4193 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4194 && omp_runtime_api_call (fndecl
)
4195 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4196 != strlen ("omp_get_num_teams"))
4197 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4198 "omp_get_num_teams") != 0)
4199 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4200 != strlen ("omp_get_team_num"))
4201 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4202 "omp_get_team_num") != 0))
4205 error_at (gimple_location (stmt
),
4206 "OpenMP runtime API call %qD strictly nested in a "
4207 "%<teams%> region", fndecl
);
4209 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4210 && (gimple_omp_target_kind (ctx
->stmt
)
4211 == GF_OMP_TARGET_KIND_REGION
)
4212 && omp_runtime_api_call (fndecl
))
4214 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4215 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4216 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4217 error_at (gimple_location (stmt
),
4218 "OpenMP runtime API call %qD in a region with "
4219 "%<device(ancestor)%> clause", fndecl
);
4226 stmt
= gimple_build_nop ();
4227 gsi_replace (gsi
, stmt
, false);
4230 *handled_ops_p
= true;
4232 switch (gimple_code (stmt
))
4234 case GIMPLE_OMP_PARALLEL
:
4235 taskreg_nesting_level
++;
4236 scan_omp_parallel (gsi
, ctx
);
4237 taskreg_nesting_level
--;
4240 case GIMPLE_OMP_TASK
:
4241 taskreg_nesting_level
++;
4242 scan_omp_task (gsi
, ctx
);
4243 taskreg_nesting_level
--;
4246 case GIMPLE_OMP_FOR
:
4247 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4248 == GF_OMP_FOR_KIND_SIMD
)
4249 && gimple_omp_for_combined_into_p (stmt
)
4250 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4252 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4253 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4254 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4256 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4260 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4261 == GF_OMP_FOR_KIND_SIMD
)
4262 && omp_maybe_offloaded_ctx (ctx
)
4263 && omp_max_simt_vf ()
4264 && gimple_omp_for_collapse (stmt
) == 1)
4265 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4267 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4270 case GIMPLE_OMP_SCOPE
:
4271 ctx
= new_omp_context (stmt
, ctx
);
4272 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4273 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4276 case GIMPLE_OMP_SECTIONS
:
4277 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4280 case GIMPLE_OMP_SINGLE
:
4281 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4284 case GIMPLE_OMP_SCAN
:
4285 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4287 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4288 ctx
->scan_inclusive
= true;
4289 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4290 ctx
->scan_exclusive
= true;
4293 case GIMPLE_OMP_SECTION
:
4294 case GIMPLE_OMP_MASTER
:
4295 case GIMPLE_OMP_ORDERED
:
4296 case GIMPLE_OMP_CRITICAL
:
4297 ctx
= new_omp_context (stmt
, ctx
);
4298 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4301 case GIMPLE_OMP_MASKED
:
4302 ctx
= new_omp_context (stmt
, ctx
);
4303 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4304 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4307 case GIMPLE_OMP_TASKGROUP
:
4308 ctx
= new_omp_context (stmt
, ctx
);
4309 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4310 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4313 case GIMPLE_OMP_TARGET
:
4314 if (is_gimple_omp_offloaded (stmt
))
4316 taskreg_nesting_level
++;
4317 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4318 taskreg_nesting_level
--;
4321 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4324 case GIMPLE_OMP_TEAMS
:
4325 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4327 taskreg_nesting_level
++;
4328 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4329 taskreg_nesting_level
--;
4332 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4339 *handled_ops_p
= false;
4341 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4343 var
= DECL_CHAIN (var
))
4344 insert_decl_map (&ctx
->cb
, var
, var
);
4348 *handled_ops_p
= false;
4356 /* Scan all the statements starting at the current statement. CTX
4357 contains context information about the OMP directives and
4358 clauses found during the scan. */
4361 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4363 location_t saved_location
;
4364 struct walk_stmt_info wi
;
4366 memset (&wi
, 0, sizeof (wi
));
4368 wi
.want_locations
= true;
4370 saved_location
= input_location
;
4371 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4372 input_location
= saved_location
;
4375 /* Re-gimplification and code generation routines. */
4377 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4378 of BIND if in a method. */
4381 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4383 if (DECL_ARGUMENTS (current_function_decl
)
4384 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4385 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4388 tree vars
= gimple_bind_vars (bind
);
4389 for (tree
*pvar
= &vars
; *pvar
; )
4390 if (omp_member_access_dummy_var (*pvar
))
4391 *pvar
= DECL_CHAIN (*pvar
);
4393 pvar
= &DECL_CHAIN (*pvar
);
4394 gimple_bind_set_vars (bind
, vars
);
4398 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4399 block and its subblocks. */
4402 remove_member_access_dummy_vars (tree block
)
4404 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4405 if (omp_member_access_dummy_var (*pvar
))
4406 *pvar
= DECL_CHAIN (*pvar
);
4408 pvar
= &DECL_CHAIN (*pvar
);
4410 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4411 remove_member_access_dummy_vars (block
);
4414 /* If a context was created for STMT when it was scanned, return it. */
4416 static omp_context
*
4417 maybe_lookup_ctx (gimple
*stmt
)
4420 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4421 return n
? (omp_context
*) n
->value
: NULL
;
4425 /* Find the mapping for DECL in CTX or the immediately enclosing
4426 context that has a mapping for DECL.
4428 If CTX is a nested parallel directive, we may have to use the decl
4429 mappings created in CTX's parent context. Suppose that we have the
4430 following parallel nesting (variable UIDs showed for clarity):
4433 #omp parallel shared(iD.1562) -> outer parallel
4434 iD.1562 = iD.1562 + 1;
4436 #omp parallel shared (iD.1562) -> inner parallel
4437 iD.1562 = iD.1562 - 1;
4439 Each parallel structure will create a distinct .omp_data_s structure
4440 for copying iD.1562 in/out of the directive:
4442 outer parallel .omp_data_s.1.i -> iD.1562
4443 inner parallel .omp_data_s.2.i -> iD.1562
4445 A shared variable mapping will produce a copy-out operation before
4446 the parallel directive and a copy-in operation after it. So, in
4447 this case we would have:
4450 .omp_data_o.1.i = iD.1562;
4451 #omp parallel shared(iD.1562) -> outer parallel
4452 .omp_data_i.1 = &.omp_data_o.1
4453 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4455 .omp_data_o.2.i = iD.1562; -> **
4456 #omp parallel shared(iD.1562) -> inner parallel
4457 .omp_data_i.2 = &.omp_data_o.2
4458 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4461 ** This is a problem. The symbol iD.1562 cannot be referenced
4462 inside the body of the outer parallel region. But since we are
4463 emitting this copy operation while expanding the inner parallel
4464 directive, we need to access the CTX structure of the outer
4465 parallel directive to get the correct mapping:
4467 .omp_data_o.2.i = .omp_data_i.1->i
4469 Since there may be other workshare or parallel directives enclosing
4470 the parallel directive, it may be necessary to walk up the context
4471 parent chain. This is not a problem in general because nested
4472 parallelism happens only rarely. */
4475 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4480 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4481 t
= maybe_lookup_decl (decl
, up
);
4483 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4485 return t
? t
: decl
;
4489 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4490 in outer contexts. */
4493 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4498 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4499 t
= maybe_lookup_decl (decl
, up
);
4501 return t
? t
: decl
;
4505 /* Construct the initialization value for reduction operation OP. */
4508 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4517 case TRUTH_ORIF_EXPR
:
4518 case TRUTH_XOR_EXPR
:
4520 return build_zero_cst (type
);
4523 case TRUTH_AND_EXPR
:
4524 case TRUTH_ANDIF_EXPR
:
4526 return fold_convert_loc (loc
, type
, integer_one_node
);
4529 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4532 if (SCALAR_FLOAT_TYPE_P (type
))
4534 REAL_VALUE_TYPE min
;
4535 if (HONOR_INFINITIES (type
))
4536 real_arithmetic (&min
, NEGATE_EXPR
, &dconstinf
, NULL
);
4538 real_maxval (&min
, 1, TYPE_MODE (type
));
4539 return build_real (type
, min
);
4541 else if (POINTER_TYPE_P (type
))
4544 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4545 return wide_int_to_tree (type
, min
);
4549 gcc_assert (INTEGRAL_TYPE_P (type
));
4550 return TYPE_MIN_VALUE (type
);
4554 if (SCALAR_FLOAT_TYPE_P (type
))
4556 REAL_VALUE_TYPE max
;
4557 if (HONOR_INFINITIES (type
))
4560 real_maxval (&max
, 0, TYPE_MODE (type
));
4561 return build_real (type
, max
);
4563 else if (POINTER_TYPE_P (type
))
4566 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4567 return wide_int_to_tree (type
, max
);
4571 gcc_assert (INTEGRAL_TYPE_P (type
));
4572 return TYPE_MAX_VALUE (type
);
4580 /* Construct the initialization value for reduction CLAUSE. */
4583 omp_reduction_init (tree clause
, tree type
)
4585 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4586 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4589 /* Return alignment to be assumed for var in CLAUSE, which should be
4590 OMP_CLAUSE_ALIGNED. */
4593 omp_clause_aligned_alignment (tree clause
)
4595 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4596 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4598 /* Otherwise return implementation defined alignment. */
4599 unsigned int al
= 1;
4600 opt_scalar_mode mode_iter
;
4601 auto_vector_modes modes
;
4602 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4603 static enum mode_class classes
[]
4604 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4605 for (int i
= 0; i
< 4; i
+= 2)
4606 /* The for loop above dictates that we only walk through scalar classes. */
4607 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4609 scalar_mode mode
= mode_iter
.require ();
4610 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4611 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4613 machine_mode alt_vmode
;
4614 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4615 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4616 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4619 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4620 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4622 type
= build_vector_type_for_mode (type
, vmode
);
4623 if (TYPE_MODE (type
) != vmode
)
4625 if (TYPE_ALIGN_UNIT (type
) > al
)
4626 al
= TYPE_ALIGN_UNIT (type
);
4628 return build_int_cst (integer_type_node
, al
);
4632 /* This structure is part of the interface between lower_rec_simd_input_clauses
4633 and lower_rec_input_clauses. */
4635 class omplow_simd_context
{
4637 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4641 vec
<tree
, va_heap
> simt_eargs
;
4642 gimple_seq simt_dlist
;
4643 poly_uint64_pod max_vf
;
4647 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4651 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4652 omplow_simd_context
*sctx
, tree
&ivar
,
4653 tree
&lvar
, tree
*rvar
= NULL
,
4656 if (known_eq (sctx
->max_vf
, 0U))
4658 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4659 if (maybe_gt (sctx
->max_vf
, 1U))
4661 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4662 OMP_CLAUSE_SAFELEN
);
4665 poly_uint64 safe_len
;
4666 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4667 || maybe_lt (safe_len
, 1U))
4670 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4673 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4675 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4676 c
= OMP_CLAUSE_CHAIN (c
))
4678 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4681 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4683 /* UDR reductions are not supported yet for SIMT, disable
4689 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4690 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4692 /* Doing boolean operations on non-integral types is
4693 for conformance only, it's not worth supporting this
4700 if (maybe_gt (sctx
->max_vf
, 1U))
4702 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4703 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4706 if (known_eq (sctx
->max_vf
, 1U))
4711 if (is_gimple_reg (new_var
))
4713 ivar
= lvar
= new_var
;
4716 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4717 ivar
= lvar
= create_tmp_var (type
);
4718 TREE_ADDRESSABLE (ivar
) = 1;
4719 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4720 NULL
, DECL_ATTRIBUTES (ivar
));
4721 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4722 tree clobber
= build_clobber (type
);
4723 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4724 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4728 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4729 tree avar
= create_tmp_var_raw (atype
);
4730 if (TREE_ADDRESSABLE (new_var
))
4731 TREE_ADDRESSABLE (avar
) = 1;
4732 DECL_ATTRIBUTES (avar
)
4733 = tree_cons (get_identifier ("omp simd array"), NULL
,
4734 DECL_ATTRIBUTES (avar
));
4735 gimple_add_tmp_var (avar
);
4737 if (rvar
&& !ctx
->for_simd_scan_phase
)
4739 /* For inscan reductions, create another array temporary,
4740 which will hold the reduced value. */
4741 iavar
= create_tmp_var_raw (atype
);
4742 if (TREE_ADDRESSABLE (new_var
))
4743 TREE_ADDRESSABLE (iavar
) = 1;
4744 DECL_ATTRIBUTES (iavar
)
4745 = tree_cons (get_identifier ("omp simd array"), NULL
,
4746 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4747 DECL_ATTRIBUTES (iavar
)));
4748 gimple_add_tmp_var (iavar
);
4749 ctx
->cb
.decl_map
->put (avar
, iavar
);
4750 if (sctx
->lastlane
== NULL_TREE
)
4751 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4752 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4753 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4754 TREE_THIS_NOTRAP (*rvar
) = 1;
4756 if (ctx
->scan_exclusive
)
4758 /* And for exclusive scan yet another one, which will
4759 hold the value during the scan phase. */
4760 tree savar
= create_tmp_var_raw (atype
);
4761 if (TREE_ADDRESSABLE (new_var
))
4762 TREE_ADDRESSABLE (savar
) = 1;
4763 DECL_ATTRIBUTES (savar
)
4764 = tree_cons (get_identifier ("omp simd array"), NULL
,
4765 tree_cons (get_identifier ("omp simd inscan "
4767 DECL_ATTRIBUTES (savar
)));
4768 gimple_add_tmp_var (savar
);
4769 ctx
->cb
.decl_map
->put (iavar
, savar
);
4770 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4771 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4772 TREE_THIS_NOTRAP (*rvar2
) = 1;
4775 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4776 NULL_TREE
, NULL_TREE
);
4777 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4778 NULL_TREE
, NULL_TREE
);
4779 TREE_THIS_NOTRAP (ivar
) = 1;
4780 TREE_THIS_NOTRAP (lvar
) = 1;
4782 if (DECL_P (new_var
))
4784 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4785 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4790 /* Helper function of lower_rec_input_clauses. For a reference
4791 in simd reduction, add an underlying variable it will reference. */
4794 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4796 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4797 if (TREE_CONSTANT (z
))
4799 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4800 get_name (new_vard
));
4801 gimple_add_tmp_var (z
);
4802 TREE_ADDRESSABLE (z
) = 1;
4803 z
= build_fold_addr_expr_loc (loc
, z
);
4804 gimplify_assign (new_vard
, z
, ilist
);
4808 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4809 code to emit (type) (tskred_temp[idx]). */
4812 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4815 unsigned HOST_WIDE_INT sz
4816 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4817 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4818 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4820 tree v
= create_tmp_var (pointer_sized_int_node
);
4821 gimple
*g
= gimple_build_assign (v
, r
);
4822 gimple_seq_add_stmt (ilist
, g
);
4823 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4825 v
= create_tmp_var (type
);
4826 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4827 gimple_seq_add_stmt (ilist
, g
);
4832 /* Lower early initialization of privatized variable NEW_VAR
4833 if it needs an allocator (has allocate clause). */
4836 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4837 tree
&allocate_ptr
, gimple_seq
*ilist
,
4838 omp_context
*ctx
, bool is_ref
, tree size
)
4842 gcc_assert (allocate_ptr
== NULL_TREE
);
4843 if (ctx
->allocate_map
4844 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4845 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4846 allocator
= *allocatorp
;
4847 if (allocator
== NULL_TREE
)
4849 if (!is_ref
&& omp_privatize_by_reference (var
))
4851 allocator
= NULL_TREE
;
4855 unsigned HOST_WIDE_INT ialign
= 0;
4856 if (TREE_CODE (allocator
) == TREE_LIST
)
4858 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4859 allocator
= TREE_PURPOSE (allocator
);
4861 if (TREE_CODE (allocator
) != INTEGER_CST
)
4862 allocator
= build_outer_var_ref (allocator
, ctx
, OMP_CLAUSE_ALLOCATE
);
4863 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4864 if (TREE_CODE (allocator
) != INTEGER_CST
)
4866 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4867 gimplify_assign (var
, allocator
, ilist
);
4871 tree ptr_type
, align
, sz
= size
;
4872 if (TYPE_P (new_var
))
4874 ptr_type
= build_pointer_type (new_var
);
4875 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4879 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4880 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4884 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4885 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4886 if (sz
== NULL_TREE
)
4887 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4889 align
= build_int_cst (size_type_node
, ialign
);
4890 if (TREE_CODE (sz
) != INTEGER_CST
)
4892 tree szvar
= create_tmp_var (size_type_node
);
4893 gimplify_assign (szvar
, sz
, ilist
);
4896 allocate_ptr
= create_tmp_var (ptr_type
);
4897 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4898 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4899 gimple_call_set_lhs (g
, allocate_ptr
);
4900 gimple_seq_add_stmt (ilist
, g
);
4903 tree x
= build_simple_mem_ref (allocate_ptr
);
4904 TREE_THIS_NOTRAP (x
) = 1;
4905 SET_DECL_VALUE_EXPR (new_var
, x
);
4906 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4911 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4912 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4913 private variables. Initialization statements go in ILIST, while calls
4914 to destructors go in DLIST. */
4917 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4918 omp_context
*ctx
, struct omp_for_data
*fd
)
4920 tree c
, copyin_seq
, x
, ptr
;
4921 bool copyin_by_ref
= false;
4922 bool lastprivate_firstprivate
= false;
4923 bool reduction_omp_orig_ref
= false;
4925 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4926 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4927 omplow_simd_context sctx
= omplow_simd_context ();
4928 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4929 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4930 gimple_seq llist
[4] = { };
4931 tree nonconst_simd_if
= NULL_TREE
;
4934 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4936 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4937 with data sharing clauses referencing variable sized vars. That
4938 is unnecessarily hard to support and very unlikely to result in
4939 vectorized code anyway. */
4941 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4942 switch (OMP_CLAUSE_CODE (c
))
4944 case OMP_CLAUSE_LINEAR
:
4945 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4948 case OMP_CLAUSE_PRIVATE
:
4949 case OMP_CLAUSE_FIRSTPRIVATE
:
4950 case OMP_CLAUSE_LASTPRIVATE
:
4951 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4953 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4955 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4956 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4960 case OMP_CLAUSE_REDUCTION
:
4961 case OMP_CLAUSE_IN_REDUCTION
:
4962 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4963 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4965 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4967 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4968 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4973 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4975 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4976 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4978 case OMP_CLAUSE_SIMDLEN
:
4979 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4982 case OMP_CLAUSE__CONDTEMP_
:
4983 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4991 /* Add a placeholder for simduid. */
4992 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4993 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4995 unsigned task_reduction_cnt
= 0;
4996 unsigned task_reduction_cntorig
= 0;
4997 unsigned task_reduction_cnt_full
= 0;
4998 unsigned task_reduction_cntorig_full
= 0;
4999 unsigned task_reduction_other_cnt
= 0;
5000 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
5001 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
5002 /* Do all the fixed sized types in the first pass, and the variable sized
5003 types in the second pass. This makes sure that the scalar arguments to
5004 the variable sized types are processed before we use them in the
5005 variable sized operations. For task reductions we use 4 passes, in the
5006 first two we ignore them, in the third one gather arguments for
5007 GOMP_task_reduction_remap call and in the last pass actually handle
5008 the task reductions. */
5009 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
5012 if (pass
== 2 && task_reduction_cnt
)
5015 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
5016 + task_reduction_cntorig
);
5017 tskred_avar
= create_tmp_var_raw (tskred_atype
);
5018 gimple_add_tmp_var (tskred_avar
);
5019 TREE_ADDRESSABLE (tskred_avar
) = 1;
5020 task_reduction_cnt_full
= task_reduction_cnt
;
5021 task_reduction_cntorig_full
= task_reduction_cntorig
;
5023 else if (pass
== 3 && task_reduction_cnt
)
5025 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
5027 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
5028 size_int (task_reduction_cntorig
),
5029 build_fold_addr_expr (tskred_avar
));
5030 gimple_seq_add_stmt (ilist
, g
);
5032 if (pass
== 3 && task_reduction_other_cnt
)
5034 /* For reduction clauses, build
5035 tskred_base = (void *) tskred_temp[2]
5036 + omp_get_thread_num () * tskred_temp[1]
5037 or if tskred_temp[1] is known to be constant, that constant
5038 directly. This is the start of the private reduction copy block
5039 for the current thread. */
5040 tree v
= create_tmp_var (integer_type_node
);
5041 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
5042 gimple
*g
= gimple_build_call (x
, 0);
5043 gimple_call_set_lhs (g
, v
);
5044 gimple_seq_add_stmt (ilist
, g
);
5045 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
5046 tskred_temp
= OMP_CLAUSE_DECL (c
);
5047 if (is_taskreg_ctx (ctx
))
5048 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
5049 tree v2
= create_tmp_var (sizetype
);
5050 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
5051 gimple_seq_add_stmt (ilist
, g
);
5052 if (ctx
->task_reductions
[0])
5053 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
5055 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
5056 tree v3
= create_tmp_var (sizetype
);
5057 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
5058 gimple_seq_add_stmt (ilist
, g
);
5059 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
5060 tskred_base
= create_tmp_var (ptr_type_node
);
5061 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
5062 gimple_seq_add_stmt (ilist
, g
);
5064 task_reduction_cnt
= 0;
5065 task_reduction_cntorig
= 0;
5066 task_reduction_other_cnt
= 0;
5067 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5069 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
5072 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5073 bool task_reduction_p
= false;
5074 bool task_reduction_needs_orig_p
= false;
5075 tree cond
= NULL_TREE
;
5076 tree allocator
, allocate_ptr
;
5080 case OMP_CLAUSE_PRIVATE
:
5081 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
5084 case OMP_CLAUSE_SHARED
:
5085 /* Ignore shared directives in teams construct inside
5086 of target construct. */
5087 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5088 && !is_host_teams_ctx (ctx
))
5090 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
5092 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
5093 || is_global_var (OMP_CLAUSE_DECL (c
)));
5096 case OMP_CLAUSE_FIRSTPRIVATE
:
5097 case OMP_CLAUSE_COPYIN
:
5099 case OMP_CLAUSE_LINEAR
:
5100 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5101 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5102 lastprivate_firstprivate
= true;
5104 case OMP_CLAUSE_REDUCTION
:
5105 case OMP_CLAUSE_IN_REDUCTION
:
5106 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5107 || is_task_ctx (ctx
)
5108 || OMP_CLAUSE_REDUCTION_TASK (c
))
5110 task_reduction_p
= true;
5111 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5113 task_reduction_other_cnt
++;
5118 task_reduction_cnt
++;
5119 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5121 var
= OMP_CLAUSE_DECL (c
);
5122 /* If var is a global variable that isn't privatized
5123 in outer contexts, we don't need to look up the
5124 original address, it is always the address of the
5125 global variable itself. */
5127 || omp_privatize_by_reference (var
)
5129 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5131 task_reduction_needs_orig_p
= true;
5132 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5133 task_reduction_cntorig
++;
5137 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5138 reduction_omp_orig_ref
= true;
5140 case OMP_CLAUSE__REDUCTEMP_
:
5141 if (!is_taskreg_ctx (ctx
))
5144 case OMP_CLAUSE__LOOPTEMP_
:
5145 /* Handle _looptemp_/_reductemp_ clauses only on
5150 case OMP_CLAUSE_LASTPRIVATE
:
5151 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5153 lastprivate_firstprivate
= true;
5154 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5157 /* Even without corresponding firstprivate, if
5158 decl is Fortran allocatable, it needs outer var
5161 && lang_hooks
.decls
.omp_private_outer_ref
5162 (OMP_CLAUSE_DECL (c
)))
5163 lastprivate_firstprivate
= true;
5165 case OMP_CLAUSE_ALIGNED
:
5168 var
= OMP_CLAUSE_DECL (c
);
5169 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5170 && !is_global_var (var
))
5172 new_var
= maybe_lookup_decl (var
, ctx
);
5173 if (new_var
== NULL_TREE
)
5174 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5175 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5176 tree alarg
= omp_clause_aligned_alignment (c
);
5177 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5178 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5179 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5180 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5181 gimplify_and_add (x
, ilist
);
5183 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5184 && is_global_var (var
))
5186 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5187 new_var
= lookup_decl (var
, ctx
);
5188 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5189 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5190 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5191 tree alarg
= omp_clause_aligned_alignment (c
);
5192 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5193 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5194 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5195 x
= create_tmp_var (ptype
);
5196 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5197 gimplify_and_add (t
, ilist
);
5198 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5199 SET_DECL_VALUE_EXPR (new_var
, t
);
5200 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5203 case OMP_CLAUSE__CONDTEMP_
:
5204 if (is_parallel_ctx (ctx
)
5205 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5212 if (task_reduction_p
!= (pass
>= 2))
5215 allocator
= NULL_TREE
;
5216 allocate_ptr
= NULL_TREE
;
5217 new_var
= var
= OMP_CLAUSE_DECL (c
);
5218 if ((c_kind
== OMP_CLAUSE_REDUCTION
5219 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5220 && TREE_CODE (var
) == MEM_REF
)
5222 var
= TREE_OPERAND (var
, 0);
5223 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5224 var
= TREE_OPERAND (var
, 0);
5225 if (TREE_CODE (var
) == INDIRECT_REF
5226 || TREE_CODE (var
) == ADDR_EXPR
)
5227 var
= TREE_OPERAND (var
, 0);
5228 if (is_variable_sized (var
))
5230 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5231 var
= DECL_VALUE_EXPR (var
);
5232 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5233 var
= TREE_OPERAND (var
, 0);
5234 gcc_assert (DECL_P (var
));
5238 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5240 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5241 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5243 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5244 new_var
= lookup_decl (var
, ctx
);
5246 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5251 /* C/C++ array section reductions. */
5252 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5253 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5254 && var
!= OMP_CLAUSE_DECL (c
))
5259 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5260 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5262 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5264 tree b
= TREE_OPERAND (orig_var
, 1);
5265 if (is_omp_target (ctx
->stmt
))
5268 b
= maybe_lookup_decl (b
, ctx
);
5271 b
= TREE_OPERAND (orig_var
, 1);
5272 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5274 if (integer_zerop (bias
))
5278 bias
= fold_convert_loc (clause_loc
,
5279 TREE_TYPE (b
), bias
);
5280 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5281 TREE_TYPE (b
), b
, bias
);
5283 orig_var
= TREE_OPERAND (orig_var
, 0);
5287 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5288 if (is_global_var (out
)
5289 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5290 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5291 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5294 else if (is_omp_target (ctx
->stmt
))
5298 bool by_ref
= use_pointer_for_field (var
, NULL
);
5299 x
= build_receiver_ref (var
, by_ref
, ctx
);
5300 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5303 x
= build_fold_addr_expr (x
);
5305 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5306 x
= build_simple_mem_ref (x
);
5307 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5309 if (var
== TREE_OPERAND (orig_var
, 0))
5310 x
= build_fold_addr_expr (x
);
5312 bias
= fold_convert (sizetype
, bias
);
5313 x
= fold_convert (ptr_type_node
, x
);
5314 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5315 TREE_TYPE (x
), x
, bias
);
5316 unsigned cnt
= task_reduction_cnt
- 1;
5317 if (!task_reduction_needs_orig_p
)
5318 cnt
+= (task_reduction_cntorig_full
5319 - task_reduction_cntorig
);
5321 cnt
= task_reduction_cntorig
- 1;
5322 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5323 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5324 gimplify_assign (r
, x
, ilist
);
5328 if (TREE_CODE (orig_var
) == INDIRECT_REF
5329 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5330 orig_var
= TREE_OPERAND (orig_var
, 0);
5331 tree d
= OMP_CLAUSE_DECL (c
);
5332 tree type
= TREE_TYPE (d
);
5333 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5334 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5336 const char *name
= get_name (orig_var
);
5337 if (pass
!= 3 && !TREE_CONSTANT (v
))
5340 if (is_omp_target (ctx
->stmt
))
5343 t
= maybe_lookup_decl (v
, ctx
);
5347 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5348 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5349 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5351 build_int_cst (TREE_TYPE (v
), 1));
5352 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5354 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5358 tree xv
= create_tmp_var (ptr_type_node
);
5359 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5361 unsigned cnt
= task_reduction_cnt
- 1;
5362 if (!task_reduction_needs_orig_p
)
5363 cnt
+= (task_reduction_cntorig_full
5364 - task_reduction_cntorig
);
5366 cnt
= task_reduction_cntorig
- 1;
5367 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5368 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5370 gimple
*g
= gimple_build_assign (xv
, x
);
5371 gimple_seq_add_stmt (ilist
, g
);
5375 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5377 if (ctx
->task_reductions
[1 + idx
])
5378 off
= fold_convert (sizetype
,
5379 ctx
->task_reductions
[1 + idx
]);
5381 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5383 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5385 gimple_seq_add_stmt (ilist
, g
);
5387 x
= fold_convert (build_pointer_type (boolean_type_node
),
5389 if (TREE_CONSTANT (v
))
5390 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5391 TYPE_SIZE_UNIT (type
));
5395 if (is_omp_target (ctx
->stmt
))
5398 t
= maybe_lookup_decl (v
, ctx
);
5402 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5403 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5405 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5407 build_int_cst (TREE_TYPE (v
), 1));
5408 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5410 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5411 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5413 cond
= create_tmp_var (TREE_TYPE (x
));
5414 gimplify_assign (cond
, x
, ilist
);
5417 else if (lower_private_allocate (var
, type
, allocator
,
5418 allocate_ptr
, ilist
, ctx
,
5421 ? TYPE_SIZE_UNIT (type
)
5424 else if (TREE_CONSTANT (v
))
5426 x
= create_tmp_var_raw (type
, name
);
5427 gimple_add_tmp_var (x
);
5428 TREE_ADDRESSABLE (x
) = 1;
5429 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5434 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5435 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5436 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5439 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5440 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5441 tree y
= create_tmp_var (ptype
, name
);
5442 gimplify_assign (y
, x
, ilist
);
5446 if (!integer_zerop (bias
))
5448 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5450 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5452 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5453 pointer_sized_int_node
, yb
, bias
);
5454 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5455 yb
= create_tmp_var (ptype
, name
);
5456 gimplify_assign (yb
, x
, ilist
);
5460 d
= TREE_OPERAND (d
, 0);
5461 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5462 d
= TREE_OPERAND (d
, 0);
5463 if (TREE_CODE (d
) == ADDR_EXPR
)
5465 if (orig_var
!= var
)
5467 gcc_assert (is_variable_sized (orig_var
));
5468 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5470 gimplify_assign (new_var
, x
, ilist
);
5471 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5472 tree t
= build_fold_indirect_ref (new_var
);
5473 DECL_IGNORED_P (new_var
) = 0;
5474 TREE_THIS_NOTRAP (t
) = 1;
5475 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5476 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5480 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5481 build_int_cst (ptype
, 0));
5482 SET_DECL_VALUE_EXPR (new_var
, x
);
5483 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5488 gcc_assert (orig_var
== var
);
5489 if (TREE_CODE (d
) == INDIRECT_REF
)
5491 x
= create_tmp_var (ptype
, name
);
5492 TREE_ADDRESSABLE (x
) = 1;
5493 gimplify_assign (x
, yb
, ilist
);
5494 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5496 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5497 gimplify_assign (new_var
, x
, ilist
);
5499 /* GOMP_taskgroup_reduction_register memsets the whole
5500 array to zero. If the initializer is zero, we don't
5501 need to initialize it again, just mark it as ever
5502 used unconditionally, i.e. cond = true. */
5504 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5505 && initializer_zerop (omp_reduction_init (c
,
5508 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5510 gimple_seq_add_stmt (ilist
, g
);
5513 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5517 if (!is_parallel_ctx (ctx
))
5519 tree condv
= create_tmp_var (boolean_type_node
);
5520 g
= gimple_build_assign (condv
,
5521 build_simple_mem_ref (cond
));
5522 gimple_seq_add_stmt (ilist
, g
);
5523 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5524 g
= gimple_build_cond (NE_EXPR
, condv
,
5525 boolean_false_node
, end
, lab1
);
5526 gimple_seq_add_stmt (ilist
, g
);
5527 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5529 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5531 gimple_seq_add_stmt (ilist
, g
);
5534 tree y1
= create_tmp_var (ptype
);
5535 gimplify_assign (y1
, y
, ilist
);
5536 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5537 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5538 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5539 if (task_reduction_needs_orig_p
)
5541 y3
= create_tmp_var (ptype
);
5543 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5544 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5545 size_int (task_reduction_cnt_full
5546 + task_reduction_cntorig
- 1),
5547 NULL_TREE
, NULL_TREE
);
5550 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5551 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5554 gimplify_assign (y3
, ref
, ilist
);
5556 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5560 y2
= create_tmp_var (ptype
);
5561 gimplify_assign (y2
, y
, ilist
);
5563 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5565 tree ref
= build_outer_var_ref (var
, ctx
);
5566 /* For ref build_outer_var_ref already performs this. */
5567 if (TREE_CODE (d
) == INDIRECT_REF
)
5568 gcc_assert (omp_privatize_by_reference (var
));
5569 else if (TREE_CODE (d
) == ADDR_EXPR
)
5570 ref
= build_fold_addr_expr (ref
);
5571 else if (omp_privatize_by_reference (var
))
5572 ref
= build_fold_addr_expr (ref
);
5573 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5574 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5575 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5577 y3
= create_tmp_var (ptype
);
5578 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5582 y4
= create_tmp_var (ptype
);
5583 gimplify_assign (y4
, ref
, dlist
);
5587 tree i
= create_tmp_var (TREE_TYPE (v
));
5588 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5589 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5590 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5593 i2
= create_tmp_var (TREE_TYPE (v
));
5594 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5595 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5596 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5597 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5599 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5601 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5602 tree decl_placeholder
5603 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5604 SET_DECL_VALUE_EXPR (decl_placeholder
,
5605 build_simple_mem_ref (y1
));
5606 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5607 SET_DECL_VALUE_EXPR (placeholder
,
5608 y3
? build_simple_mem_ref (y3
)
5610 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5611 x
= lang_hooks
.decls
.omp_clause_default_ctor
5612 (c
, build_simple_mem_ref (y1
),
5613 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5615 gimplify_and_add (x
, ilist
);
5616 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5618 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5619 lower_omp (&tseq
, ctx
);
5620 gimple_seq_add_seq (ilist
, tseq
);
5622 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5625 SET_DECL_VALUE_EXPR (decl_placeholder
,
5626 build_simple_mem_ref (y2
));
5627 SET_DECL_VALUE_EXPR (placeholder
,
5628 build_simple_mem_ref (y4
));
5629 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5630 lower_omp (&tseq
, ctx
);
5631 gimple_seq_add_seq (dlist
, tseq
);
5632 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5634 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5635 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5638 x
= lang_hooks
.decls
.omp_clause_dtor
5639 (c
, build_simple_mem_ref (y2
));
5641 gimplify_and_add (x
, dlist
);
5646 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5647 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5649 /* reduction(-:var) sums up the partial results, so it
5650 acts identically to reduction(+:var). */
5651 if (code
== MINUS_EXPR
)
5654 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5657 x
= build2 (code
, TREE_TYPE (type
),
5658 build_simple_mem_ref (y4
),
5659 build_simple_mem_ref (y2
));
5660 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5664 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5665 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5666 gimple_seq_add_stmt (ilist
, g
);
5669 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5670 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5671 gimple_seq_add_stmt (ilist
, g
);
5673 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5674 build_int_cst (TREE_TYPE (i
), 1));
5675 gimple_seq_add_stmt (ilist
, g
);
5676 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5677 gimple_seq_add_stmt (ilist
, g
);
5678 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5681 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5682 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5683 gimple_seq_add_stmt (dlist
, g
);
5686 g
= gimple_build_assign
5687 (y4
, POINTER_PLUS_EXPR
, y4
,
5688 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5689 gimple_seq_add_stmt (dlist
, g
);
5691 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5692 build_int_cst (TREE_TYPE (i2
), 1));
5693 gimple_seq_add_stmt (dlist
, g
);
5694 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5695 gimple_seq_add_stmt (dlist
, g
);
5696 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5700 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5701 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5702 gimple_seq_add_stmt (dlist
, g
);
5708 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5709 if (is_global_var (out
))
5711 else if (is_omp_target (ctx
->stmt
))
5715 bool by_ref
= use_pointer_for_field (var
, ctx
);
5716 x
= build_receiver_ref (var
, by_ref
, ctx
);
5718 if (!omp_privatize_by_reference (var
))
5719 x
= build_fold_addr_expr (x
);
5720 x
= fold_convert (ptr_type_node
, x
);
5721 unsigned cnt
= task_reduction_cnt
- 1;
5722 if (!task_reduction_needs_orig_p
)
5723 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5725 cnt
= task_reduction_cntorig
- 1;
5726 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5727 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5728 gimplify_assign (r
, x
, ilist
);
5733 tree type
= TREE_TYPE (new_var
);
5734 if (!omp_privatize_by_reference (var
))
5735 type
= build_pointer_type (type
);
5736 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5738 unsigned cnt
= task_reduction_cnt
- 1;
5739 if (!task_reduction_needs_orig_p
)
5740 cnt
+= (task_reduction_cntorig_full
5741 - task_reduction_cntorig
);
5743 cnt
= task_reduction_cntorig
- 1;
5744 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5745 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5749 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5751 if (ctx
->task_reductions
[1 + idx
])
5752 off
= fold_convert (sizetype
,
5753 ctx
->task_reductions
[1 + idx
]);
5755 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5757 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5760 x
= fold_convert (type
, x
);
5762 if (omp_privatize_by_reference (var
))
5764 gimplify_assign (new_var
, x
, ilist
);
5766 new_var
= build_simple_mem_ref (new_var
);
5770 t
= create_tmp_var (type
);
5771 gimplify_assign (t
, x
, ilist
);
5772 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5773 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5775 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5776 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5777 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5778 cond
= create_tmp_var (TREE_TYPE (t
));
5779 gimplify_assign (cond
, t
, ilist
);
5781 else if (is_variable_sized (var
))
5783 /* For variable sized types, we need to allocate the
5784 actual storage here. Call alloca and store the
5785 result in the pointer decl that we created elsewhere. */
5789 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5793 ptr
= DECL_VALUE_EXPR (new_var
);
5794 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5795 ptr
= TREE_OPERAND (ptr
, 0);
5796 gcc_assert (DECL_P (ptr
));
5797 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5799 if (lower_private_allocate (var
, new_var
, allocator
,
5800 allocate_ptr
, ilist
, ctx
,
5805 /* void *tmp = __builtin_alloca */
5807 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5809 = gimple_build_call (atmp
, 2, x
,
5810 size_int (DECL_ALIGN (var
)));
5811 cfun
->calls_alloca
= 1;
5812 tmp
= create_tmp_var_raw (ptr_type_node
);
5813 gimple_add_tmp_var (tmp
);
5814 gimple_call_set_lhs (stmt
, tmp
);
5816 gimple_seq_add_stmt (ilist
, stmt
);
5819 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5820 gimplify_assign (ptr
, x
, ilist
);
5823 else if (omp_privatize_by_reference (var
)
5824 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5825 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5827 /* For references that are being privatized for Fortran,
5828 allocate new backing storage for the new pointer
5829 variable. This allows us to avoid changing all the
5830 code that expects a pointer to something that expects
5831 a direct variable. */
5835 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5836 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5838 x
= build_receiver_ref (var
, false, ctx
);
5839 if (ctx
->allocate_map
)
5840 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5842 allocator
= *allocatep
;
5843 if (TREE_CODE (allocator
) == TREE_LIST
)
5844 allocator
= TREE_PURPOSE (allocator
);
5845 if (TREE_CODE (allocator
) != INTEGER_CST
)
5846 allocator
= build_outer_var_ref (allocator
, ctx
);
5847 allocator
= fold_convert (pointer_sized_int_node
,
5849 allocate_ptr
= unshare_expr (x
);
5851 if (allocator
== NULL_TREE
)
5852 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5854 else if (lower_private_allocate (var
, new_var
, allocator
,
5856 ilist
, ctx
, true, x
))
5858 else if (TREE_CONSTANT (x
))
5860 /* For reduction in SIMD loop, defer adding the
5861 initialization of the reference, because if we decide
5862 to use SIMD array for it, the initilization could cause
5863 expansion ICE. Ditto for other privatization clauses. */
5868 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5870 gimple_add_tmp_var (x
);
5871 TREE_ADDRESSABLE (x
) = 1;
5872 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5878 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5879 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5880 tree al
= size_int (TYPE_ALIGN (rtype
));
5881 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5886 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5887 gimplify_assign (new_var
, x
, ilist
);
5890 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5892 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5893 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5894 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5902 switch (OMP_CLAUSE_CODE (c
))
5904 case OMP_CLAUSE_SHARED
:
5905 /* Ignore shared directives in teams construct inside
5906 target construct. */
5907 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5908 && !is_host_teams_ctx (ctx
))
5910 /* Shared global vars are just accessed directly. */
5911 if (is_global_var (new_var
))
5913 /* For taskloop firstprivate/lastprivate, represented
5914 as firstprivate and shared clause on the task, new_var
5915 is the firstprivate var. */
5916 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5918 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5919 needs to be delayed until after fixup_child_record_type so
5920 that we get the correct type during the dereference. */
5921 by_ref
= use_pointer_for_field (var
, ctx
);
5922 x
= build_receiver_ref (var
, by_ref
, ctx
);
5923 SET_DECL_VALUE_EXPR (new_var
, x
);
5924 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5926 /* ??? If VAR is not passed by reference, and the variable
5927 hasn't been initialized yet, then we'll get a warning for
5928 the store into the omp_data_s structure. Ideally, we'd be
5929 able to notice this and not store anything at all, but
5930 we're generating code too early. Suppress the warning. */
5932 suppress_warning (var
, OPT_Wuninitialized
);
5935 case OMP_CLAUSE__CONDTEMP_
:
5936 if (is_parallel_ctx (ctx
))
5938 x
= build_receiver_ref (var
, false, ctx
);
5939 SET_DECL_VALUE_EXPR (new_var
, x
);
5940 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5942 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5944 x
= build_zero_cst (TREE_TYPE (var
));
5949 case OMP_CLAUSE_LASTPRIVATE
:
5950 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5954 case OMP_CLAUSE_PRIVATE
:
5955 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5956 x
= build_outer_var_ref (var
, ctx
);
5957 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5959 if (is_task_ctx (ctx
))
5960 x
= build_receiver_ref (var
, false, ctx
);
5962 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5970 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5971 ilist
, ctx
, false, NULL_TREE
);
5972 nx
= unshare_expr (new_var
);
5974 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5975 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5978 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5980 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5983 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5984 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5985 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5986 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5987 || (gimple_omp_for_index (ctx
->stmt
, 0)
5989 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5990 || omp_privatize_by_reference (var
))
5991 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5994 if (omp_privatize_by_reference (var
))
5996 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5997 tree new_vard
= TREE_OPERAND (new_var
, 0);
5998 gcc_assert (DECL_P (new_vard
));
5999 SET_DECL_VALUE_EXPR (new_vard
,
6000 build_fold_addr_expr (lvar
));
6001 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6006 tree iv
= unshare_expr (ivar
);
6008 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
6011 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
6015 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
6017 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
6018 unshare_expr (ivar
), x
);
6022 gimplify_and_add (x
, &llist
[0]);
6023 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6024 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6029 gcc_assert (TREE_CODE (v
) == MEM_REF
);
6030 v
= TREE_OPERAND (v
, 0);
6031 gcc_assert (DECL_P (v
));
6033 v
= *ctx
->lastprivate_conditional_map
->get (v
);
6034 tree t
= create_tmp_var (TREE_TYPE (v
));
6035 tree z
= build_zero_cst (TREE_TYPE (v
));
6037 = build_outer_var_ref (var
, ctx
,
6038 OMP_CLAUSE_LASTPRIVATE
);
6039 gimple_seq_add_stmt (dlist
,
6040 gimple_build_assign (t
, z
));
6041 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
6042 tree civar
= DECL_VALUE_EXPR (v
);
6043 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
6044 civar
= unshare_expr (civar
);
6045 TREE_OPERAND (civar
, 1) = sctx
.idx
;
6046 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
6047 unshare_expr (civar
));
6048 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
6049 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
6050 orig_v
, unshare_expr (ivar
)));
6051 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
6053 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
6055 gimple_seq tseq
= NULL
;
6056 gimplify_and_add (x
, &tseq
);
6058 lower_omp (&tseq
, ctx
->outer
);
6059 gimple_seq_add_seq (&llist
[1], tseq
);
6061 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6062 && ctx
->for_simd_scan_phase
)
6064 x
= unshare_expr (ivar
);
6066 = build_outer_var_ref (var
, ctx
,
6067 OMP_CLAUSE_LASTPRIVATE
);
6068 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6070 gimplify_and_add (x
, &llist
[0]);
6074 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6076 gimplify_and_add (y
, &llist
[1]);
6080 if (omp_privatize_by_reference (var
))
6082 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6083 tree new_vard
= TREE_OPERAND (new_var
, 0);
6084 gcc_assert (DECL_P (new_vard
));
6085 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6086 x
= TYPE_SIZE_UNIT (type
);
6087 if (TREE_CONSTANT (x
))
6089 x
= create_tmp_var_raw (type
, get_name (var
));
6090 gimple_add_tmp_var (x
);
6091 TREE_ADDRESSABLE (x
) = 1;
6092 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6093 x
= fold_convert_loc (clause_loc
,
6094 TREE_TYPE (new_vard
), x
);
6095 gimplify_assign (new_vard
, x
, ilist
);
6100 gimplify_and_add (nx
, ilist
);
6101 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6103 && ctx
->for_simd_scan_phase
)
6105 tree orig_v
= build_outer_var_ref (var
, ctx
,
6106 OMP_CLAUSE_LASTPRIVATE
);
6107 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6109 gimplify_and_add (x
, ilist
);
6114 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6116 gimplify_and_add (x
, dlist
);
6119 if (!is_gimple_val (allocator
))
6121 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6122 gimplify_assign (avar
, allocator
, dlist
);
6125 if (!is_gimple_val (allocate_ptr
))
6127 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6128 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6129 allocate_ptr
= apvar
;
6131 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6133 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6134 gimple_seq_add_stmt (dlist
, g
);
6138 case OMP_CLAUSE_LINEAR
:
6139 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6140 goto do_firstprivate
;
6141 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6144 x
= build_outer_var_ref (var
, ctx
);
6147 case OMP_CLAUSE_FIRSTPRIVATE
:
6148 if (is_task_ctx (ctx
))
6150 if ((omp_privatize_by_reference (var
)
6151 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6152 || is_variable_sized (var
))
6154 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6156 || use_pointer_for_field (var
, NULL
))
6158 x
= build_receiver_ref (var
, false, ctx
);
6159 if (ctx
->allocate_map
)
6160 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6162 allocator
= *allocatep
;
6163 if (TREE_CODE (allocator
) == TREE_LIST
)
6164 allocator
= TREE_PURPOSE (allocator
);
6165 if (TREE_CODE (allocator
) != INTEGER_CST
)
6166 allocator
= build_outer_var_ref (allocator
, ctx
);
6167 allocator
= fold_convert (pointer_sized_int_node
,
6169 allocate_ptr
= unshare_expr (x
);
6170 x
= build_simple_mem_ref (x
);
6171 TREE_THIS_NOTRAP (x
) = 1;
6173 SET_DECL_VALUE_EXPR (new_var
, x
);
6174 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6178 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6179 && omp_privatize_by_reference (var
))
6181 x
= build_outer_var_ref (var
, ctx
);
6182 gcc_assert (TREE_CODE (x
) == MEM_REF
6183 && integer_zerop (TREE_OPERAND (x
, 1)));
6184 x
= TREE_OPERAND (x
, 0);
6185 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6186 (c
, unshare_expr (new_var
), x
);
6187 gimplify_and_add (x
, ilist
);
6191 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6192 ilist
, ctx
, false, NULL_TREE
);
6193 x
= build_outer_var_ref (var
, ctx
);
6196 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6197 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6199 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6201 t
= build_outer_var_ref (t
, ctx
);
6202 tree stept
= TREE_TYPE (t
);
6203 tree ct
= omp_find_clause (clauses
,
6204 OMP_CLAUSE__LOOPTEMP_
);
6206 tree l
= OMP_CLAUSE_DECL (ct
);
6207 tree n1
= fd
->loop
.n1
;
6208 tree step
= fd
->loop
.step
;
6209 tree itype
= TREE_TYPE (l
);
6210 if (POINTER_TYPE_P (itype
))
6211 itype
= signed_type_for (itype
);
6212 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6213 if (TYPE_UNSIGNED (itype
)
6214 && fd
->loop
.cond_code
== GT_EXPR
)
6215 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6216 fold_build1 (NEGATE_EXPR
, itype
, l
),
6217 fold_build1 (NEGATE_EXPR
,
6220 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6221 t
= fold_build2 (MULT_EXPR
, stept
,
6222 fold_convert (stept
, l
), t
);
6224 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6226 if (omp_privatize_by_reference (var
))
6228 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6229 tree new_vard
= TREE_OPERAND (new_var
, 0);
6230 gcc_assert (DECL_P (new_vard
));
6231 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6232 nx
= TYPE_SIZE_UNIT (type
);
6233 if (TREE_CONSTANT (nx
))
6235 nx
= create_tmp_var_raw (type
,
6237 gimple_add_tmp_var (nx
);
6238 TREE_ADDRESSABLE (nx
) = 1;
6239 nx
= build_fold_addr_expr_loc (clause_loc
,
6241 nx
= fold_convert_loc (clause_loc
,
6242 TREE_TYPE (new_vard
),
6244 gimplify_assign (new_vard
, nx
, ilist
);
6248 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6250 gimplify_and_add (x
, ilist
);
6254 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6255 x
= fold_build_pointer_plus (x
, t
);
6257 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
,
6258 fold_convert (TREE_TYPE (x
), t
));
6261 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6262 || TREE_ADDRESSABLE (new_var
)
6263 || omp_privatize_by_reference (var
))
6264 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6267 if (omp_privatize_by_reference (var
))
6269 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6270 tree new_vard
= TREE_OPERAND (new_var
, 0);
6271 gcc_assert (DECL_P (new_vard
));
6272 SET_DECL_VALUE_EXPR (new_vard
,
6273 build_fold_addr_expr (lvar
));
6274 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6276 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6278 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6279 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6280 gimplify_and_add (x
, ilist
);
6281 gimple_stmt_iterator gsi
6282 = gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6284 = gimple_build_assign (unshare_expr (lvar
), iv
);
6285 gsi_insert_before_without_update (&gsi
, g
,
6287 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6288 enum tree_code code
= PLUS_EXPR
;
6289 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6290 code
= POINTER_PLUS_EXPR
;
6291 g
= gimple_build_assign (iv
, code
, iv
, t
);
6292 gsi_insert_before_without_update (&gsi
, g
,
6296 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6297 (c
, unshare_expr (ivar
), x
);
6298 gimplify_and_add (x
, &llist
[0]);
6299 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6301 gimplify_and_add (x
, &llist
[1]);
6304 if (omp_privatize_by_reference (var
))
6306 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6307 tree new_vard
= TREE_OPERAND (new_var
, 0);
6308 gcc_assert (DECL_P (new_vard
));
6309 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6310 nx
= TYPE_SIZE_UNIT (type
);
6311 if (TREE_CONSTANT (nx
))
6313 nx
= create_tmp_var_raw (type
, get_name (var
));
6314 gimple_add_tmp_var (nx
);
6315 TREE_ADDRESSABLE (nx
) = 1;
6316 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6317 nx
= fold_convert_loc (clause_loc
,
6318 TREE_TYPE (new_vard
), nx
);
6319 gimplify_assign (new_vard
, nx
, ilist
);
6323 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6324 (c
, unshare_expr (new_var
), x
);
6325 gimplify_and_add (x
, ilist
);
6328 case OMP_CLAUSE__LOOPTEMP_
:
6329 case OMP_CLAUSE__REDUCTEMP_
:
6330 gcc_assert (is_taskreg_ctx (ctx
));
6331 x
= build_outer_var_ref (var
, ctx
);
6332 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6333 gimplify_and_add (x
, ilist
);
6336 case OMP_CLAUSE_COPYIN
:
6337 by_ref
= use_pointer_for_field (var
, NULL
);
6338 x
= build_receiver_ref (var
, by_ref
, ctx
);
6339 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6340 append_to_statement_list (x
, ©in_seq
);
6341 copyin_by_ref
|= by_ref
;
6344 case OMP_CLAUSE_REDUCTION
:
6345 case OMP_CLAUSE_IN_REDUCTION
:
6346 /* OpenACC reductions are initialized using the
6347 GOACC_REDUCTION internal function. */
6348 if (is_gimple_omp_oacc (ctx
->stmt
))
6350 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6352 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6354 tree ptype
= TREE_TYPE (placeholder
);
6357 x
= error_mark_node
;
6358 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6359 && !task_reduction_needs_orig_p
)
6361 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6363 tree pptype
= build_pointer_type (ptype
);
6364 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6365 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6366 size_int (task_reduction_cnt_full
6367 + task_reduction_cntorig
- 1),
6368 NULL_TREE
, NULL_TREE
);
6372 = *ctx
->task_reduction_map
->get (c
);
6373 x
= task_reduction_read (ilist
, tskred_temp
,
6374 pptype
, 7 + 3 * idx
);
6376 x
= fold_convert (pptype
, x
);
6377 x
= build_simple_mem_ref (x
);
6382 lower_private_allocate (var
, new_var
, allocator
,
6383 allocate_ptr
, ilist
, ctx
, false,
6385 x
= build_outer_var_ref (var
, ctx
);
6387 if (omp_privatize_by_reference (var
)
6388 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6389 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6391 SET_DECL_VALUE_EXPR (placeholder
, x
);
6392 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6393 tree new_vard
= new_var
;
6394 if (omp_privatize_by_reference (var
))
6396 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6397 new_vard
= TREE_OPERAND (new_var
, 0);
6398 gcc_assert (DECL_P (new_vard
));
6400 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6402 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6403 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6406 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6410 if (new_vard
== new_var
)
6412 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6413 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6417 SET_DECL_VALUE_EXPR (new_vard
,
6418 build_fold_addr_expr (ivar
));
6419 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6421 x
= lang_hooks
.decls
.omp_clause_default_ctor
6422 (c
, unshare_expr (ivar
),
6423 build_outer_var_ref (var
, ctx
));
6424 if (rvarp
&& ctx
->for_simd_scan_phase
)
6427 gimplify_and_add (x
, &llist
[0]);
6428 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6430 gimplify_and_add (x
, &llist
[1]);
6437 gimplify_and_add (x
, &llist
[0]);
6439 tree ivar2
= unshare_expr (lvar
);
6440 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6441 x
= lang_hooks
.decls
.omp_clause_default_ctor
6442 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6443 gimplify_and_add (x
, &llist
[0]);
6447 x
= lang_hooks
.decls
.omp_clause_default_ctor
6448 (c
, unshare_expr (rvar2
),
6449 build_outer_var_ref (var
, ctx
));
6450 gimplify_and_add (x
, &llist
[0]);
6453 /* For types that need construction, add another
6454 private var which will be default constructed
6455 and optionally initialized with
6456 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6457 loop we want to assign this value instead of
6458 constructing and destructing it in each
6460 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6461 gimple_add_tmp_var (nv
);
6462 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6466 x
= lang_hooks
.decls
.omp_clause_default_ctor
6467 (c
, nv
, build_outer_var_ref (var
, ctx
));
6468 gimplify_and_add (x
, ilist
);
6470 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6472 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6473 x
= DECL_VALUE_EXPR (new_vard
);
6475 if (new_vard
!= new_var
)
6476 vexpr
= build_fold_addr_expr (nv
);
6477 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6478 lower_omp (&tseq
, ctx
);
6479 SET_DECL_VALUE_EXPR (new_vard
, x
);
6480 gimple_seq_add_seq (ilist
, tseq
);
6481 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6484 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6486 gimplify_and_add (x
, dlist
);
6489 tree ref
= build_outer_var_ref (var
, ctx
);
6490 x
= unshare_expr (ivar
);
6491 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6493 gimplify_and_add (x
, &llist
[0]);
6495 ref
= build_outer_var_ref (var
, ctx
);
6496 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6498 gimplify_and_add (x
, &llist
[3]);
6500 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6501 if (new_vard
== new_var
)
6502 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6504 SET_DECL_VALUE_EXPR (new_vard
,
6505 build_fold_addr_expr (lvar
));
6507 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6509 gimplify_and_add (x
, &llist
[1]);
6511 tree ivar2
= unshare_expr (lvar
);
6512 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6513 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6515 gimplify_and_add (x
, &llist
[1]);
6519 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6521 gimplify_and_add (x
, &llist
[1]);
6526 gimplify_and_add (x
, &llist
[0]);
6527 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6529 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6530 lower_omp (&tseq
, ctx
);
6531 gimple_seq_add_seq (&llist
[0], tseq
);
6533 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6534 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6535 lower_omp (&tseq
, ctx
);
6536 gimple_seq_add_seq (&llist
[1], tseq
);
6537 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6538 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6539 if (new_vard
== new_var
)
6540 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6542 SET_DECL_VALUE_EXPR (new_vard
,
6543 build_fold_addr_expr (lvar
));
6544 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6546 gimplify_and_add (x
, &llist
[1]);
6549 /* If this is a reference to constant size reduction var
6550 with placeholder, we haven't emitted the initializer
6551 for it because it is undesirable if SIMD arrays are used.
6552 But if they aren't used, we need to emit the deferred
6553 initialization now. */
6554 else if (omp_privatize_by_reference (var
) && is_simd
)
6555 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6557 tree lab2
= NULL_TREE
;
6561 if (!is_parallel_ctx (ctx
))
6563 tree condv
= create_tmp_var (boolean_type_node
);
6564 tree m
= build_simple_mem_ref (cond
);
6565 g
= gimple_build_assign (condv
, m
);
6566 gimple_seq_add_stmt (ilist
, g
);
6568 = create_artificial_label (UNKNOWN_LOCATION
);
6569 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6570 g
= gimple_build_cond (NE_EXPR
, condv
,
6573 gimple_seq_add_stmt (ilist
, g
);
6574 gimple_seq_add_stmt (ilist
,
6575 gimple_build_label (lab1
));
6577 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6579 gimple_seq_add_stmt (ilist
, g
);
6581 x
= lang_hooks
.decls
.omp_clause_default_ctor
6582 (c
, unshare_expr (new_var
),
6584 : build_outer_var_ref (var
, ctx
));
6586 gimplify_and_add (x
, ilist
);
6588 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6589 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6591 if (ctx
->for_simd_scan_phase
)
6594 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6596 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6597 gimple_add_tmp_var (nv
);
6598 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6599 x
= lang_hooks
.decls
.omp_clause_default_ctor
6600 (c
, nv
, build_outer_var_ref (var
, ctx
));
6602 gimplify_and_add (x
, ilist
);
6603 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6605 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6607 if (new_vard
!= new_var
)
6608 vexpr
= build_fold_addr_expr (nv
);
6609 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6610 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6611 lower_omp (&tseq
, ctx
);
6612 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6613 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6614 gimple_seq_add_seq (ilist
, tseq
);
6616 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6617 if (is_simd
&& ctx
->scan_exclusive
)
6620 = create_tmp_var_raw (TREE_TYPE (new_var
));
6621 gimple_add_tmp_var (nv2
);
6622 ctx
->cb
.decl_map
->put (nv
, nv2
);
6623 x
= lang_hooks
.decls
.omp_clause_default_ctor
6624 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6625 gimplify_and_add (x
, ilist
);
6626 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6628 gimplify_and_add (x
, dlist
);
6630 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6632 gimplify_and_add (x
, dlist
);
6635 && ctx
->scan_exclusive
6636 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6638 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6639 gimple_add_tmp_var (nv2
);
6640 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6641 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6643 gimplify_and_add (x
, dlist
);
6645 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6649 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6651 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6652 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6653 && is_omp_target (ctx
->stmt
))
6655 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6656 tree oldv
= NULL_TREE
;
6658 if (DECL_HAS_VALUE_EXPR_P (d
))
6659 oldv
= DECL_VALUE_EXPR (d
);
6660 SET_DECL_VALUE_EXPR (d
, new_vard
);
6661 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6662 lower_omp (&tseq
, ctx
);
6664 SET_DECL_VALUE_EXPR (d
, oldv
);
6667 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6668 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6672 lower_omp (&tseq
, ctx
);
6673 gimple_seq_add_seq (ilist
, tseq
);
6675 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6678 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6679 lower_omp (&tseq
, ctx
);
6680 gimple_seq_add_seq (dlist
, tseq
);
6681 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6683 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6687 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6694 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6695 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6696 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6701 tree lab2
= NULL_TREE
;
6702 /* GOMP_taskgroup_reduction_register memsets the whole
6703 array to zero. If the initializer is zero, we don't
6704 need to initialize it again, just mark it as ever
6705 used unconditionally, i.e. cond = true. */
6706 if (initializer_zerop (x
))
6708 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6710 gimple_seq_add_stmt (ilist
, g
);
6715 if (!cond) { cond = true; new_var = x; } */
6716 if (!is_parallel_ctx (ctx
))
6718 tree condv
= create_tmp_var (boolean_type_node
);
6719 tree m
= build_simple_mem_ref (cond
);
6720 g
= gimple_build_assign (condv
, m
);
6721 gimple_seq_add_stmt (ilist
, g
);
6723 = create_artificial_label (UNKNOWN_LOCATION
);
6724 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6725 g
= gimple_build_cond (NE_EXPR
, condv
,
6728 gimple_seq_add_stmt (ilist
, g
);
6729 gimple_seq_add_stmt (ilist
,
6730 gimple_build_label (lab1
));
6732 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6734 gimple_seq_add_stmt (ilist
, g
);
6735 gimplify_assign (new_var
, x
, ilist
);
6737 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6741 /* reduction(-:var) sums up the partial results, so it
6742 acts identically to reduction(+:var). */
6743 if (code
== MINUS_EXPR
)
6747 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6748 tree new_vard
= new_var
;
6749 if (is_simd
&& omp_privatize_by_reference (var
))
6751 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6752 new_vard
= TREE_OPERAND (new_var
, 0);
6753 gcc_assert (DECL_P (new_vard
));
6755 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6757 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6758 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6761 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6765 if (new_vard
!= new_var
)
6767 SET_DECL_VALUE_EXPR (new_vard
,
6768 build_fold_addr_expr (lvar
));
6769 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6772 tree ref
= build_outer_var_ref (var
, ctx
);
6776 if (ctx
->for_simd_scan_phase
)
6778 gimplify_assign (ivar
, ref
, &llist
[0]);
6779 ref
= build_outer_var_ref (var
, ctx
);
6780 gimplify_assign (ref
, rvar
, &llist
[3]);
6784 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6789 simt_lane
= create_tmp_var (unsigned_type_node
);
6790 x
= build_call_expr_internal_loc
6791 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6792 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6793 /* Make sure x is evaluated unconditionally. */
6794 tree bfly_var
= create_tmp_var (TREE_TYPE (ivar
));
6795 gimplify_assign (bfly_var
, x
, &llist
[2]);
6796 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, bfly_var
);
6797 gimplify_assign (ivar
, x
, &llist
[2]);
6803 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6804 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6805 boolean_type_node
, ivar
,
6807 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6808 boolean_type_node
, ref
,
6811 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6813 x
= fold_convert (TREE_TYPE (ref
), x
);
6814 ref
= build_outer_var_ref (var
, ctx
);
6815 gimplify_assign (ref
, x
, &llist
[1]);
6820 lower_private_allocate (var
, new_var
, allocator
,
6821 allocate_ptr
, ilist
, ctx
,
6823 if (omp_privatize_by_reference (var
) && is_simd
)
6824 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6825 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6826 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6828 gimplify_assign (new_var
, x
, ilist
);
6831 tree ref
= build_outer_var_ref (var
, ctx
);
6832 tree new_var2
= new_var
;
6836 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6838 = fold_build2_loc (clause_loc
, NE_EXPR
,
6839 boolean_type_node
, new_var
,
6841 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6842 boolean_type_node
, ref
,
6845 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6847 x
= fold_convert (TREE_TYPE (new_var
), x
);
6848 ref
= build_outer_var_ref (var
, ctx
);
6849 gimplify_assign (ref
, x
, dlist
);
6864 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6865 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6868 if (known_eq (sctx
.max_vf
, 1U))
6870 sctx
.is_simt
= false;
6871 if (ctx
->lastprivate_conditional_map
)
6873 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6875 /* Signal to lower_omp_1 that it should use parent context. */
6876 ctx
->combined_into_simd_safelen1
= true;
6877 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6878 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6879 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6881 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6882 omp_context
*outer
= ctx
->outer
;
6883 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6884 outer
= outer
->outer
;
6885 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6886 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6887 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6893 /* When not vectorized, treat lastprivate(conditional:) like
6894 normal lastprivate, as there will be just one simd lane
6895 writing the privatized variable. */
6896 delete ctx
->lastprivate_conditional_map
;
6897 ctx
->lastprivate_conditional_map
= NULL
;
6902 if (nonconst_simd_if
)
6904 if (sctx
.lane
== NULL_TREE
)
6906 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6907 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6909 /* FIXME: For now. */
6910 sctx
.is_simt
= false;
6913 if (sctx
.lane
|| sctx
.is_simt
)
6915 uid
= create_tmp_var (ptr_type_node
, "simduid");
6916 /* Don't want uninit warnings on simduid, it is always uninitialized,
6917 but we use it not for the value, but for the DECL_UID only. */
6918 suppress_warning (uid
, OPT_Wuninitialized
);
6919 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6920 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6921 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6922 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6924 /* Emit calls denoting privatized variables and initializing a pointer to
6925 structure that holds private variables as fields after ompdevlow pass. */
6928 sctx
.simt_eargs
[0] = uid
;
6930 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6931 gimple_call_set_lhs (g
, uid
);
6932 gimple_seq_add_stmt (ilist
, g
);
6933 sctx
.simt_eargs
.release ();
6935 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6936 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6937 gimple_call_set_lhs (g
, simtrec
);
6938 gimple_seq_add_stmt (ilist
, g
);
6942 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6943 2 + (nonconst_simd_if
!= NULL
),
6944 uid
, integer_zero_node
,
6946 gimple_call_set_lhs (g
, sctx
.lane
);
6947 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6948 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6949 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6950 build_int_cst (unsigned_type_node
, 0));
6951 gimple_seq_add_stmt (ilist
, g
);
6954 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6956 gimple_call_set_lhs (g
, sctx
.lastlane
);
6957 gimple_seq_add_stmt (dlist
, g
);
6958 gimple_seq_add_seq (dlist
, llist
[3]);
6960 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6963 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6964 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6965 gimple_call_set_lhs (g
, simt_vf
);
6966 gimple_seq_add_stmt (dlist
, g
);
6968 tree t
= build_int_cst (unsigned_type_node
, 1);
6969 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6970 gimple_seq_add_stmt (dlist
, g
);
6972 t
= build_int_cst (unsigned_type_node
, 0);
6973 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6974 gimple_seq_add_stmt (dlist
, g
);
6976 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6977 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6978 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6979 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6980 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6982 gimple_seq_add_seq (dlist
, llist
[2]);
6984 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6985 gimple_seq_add_stmt (dlist
, g
);
6987 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6988 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6989 gimple_seq_add_stmt (dlist
, g
);
6991 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6993 for (int i
= 0; i
< 2; i
++)
6996 tree vf
= create_tmp_var (unsigned_type_node
);
6997 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6998 gimple_call_set_lhs (g
, vf
);
6999 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
7000 gimple_seq_add_stmt (seq
, g
);
7001 tree t
= build_int_cst (unsigned_type_node
, 0);
7002 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
7003 gimple_seq_add_stmt (seq
, g
);
7004 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7005 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
7006 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7007 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
7008 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
7009 gimple_seq_add_seq (seq
, llist
[i
]);
7010 t
= build_int_cst (unsigned_type_node
, 1);
7011 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
7012 gimple_seq_add_stmt (seq
, g
);
7013 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
7014 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
7015 gimple_seq_add_stmt (seq
, g
);
7016 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
7021 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
7023 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
7024 gimple_seq_add_stmt (dlist
, g
);
7027 /* The copyin sequence is not to be executed by the main thread, since
7028 that would result in self-copies. Perhaps not visible to scalars,
7029 but it certainly is to C++ operator=. */
7032 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
7034 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
7035 build_int_cst (TREE_TYPE (x
), 0));
7036 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
7037 gimplify_and_add (x
, ilist
);
7040 /* If any copyin variable is passed by reference, we must ensure the
7041 master thread doesn't modify it before it is copied over in all
7042 threads. Similarly for variables in both firstprivate and
7043 lastprivate clauses we need to ensure the lastprivate copying
7044 happens after firstprivate copying in all threads. And similarly
7045 for UDRs if initializer expression refers to omp_orig. */
7046 if (copyin_by_ref
|| lastprivate_firstprivate
7047 || (reduction_omp_orig_ref
7048 && !ctx
->scan_inclusive
7049 && !ctx
->scan_exclusive
))
7051 /* Don't add any barrier for #pragma omp simd or
7052 #pragma omp distribute. */
7053 if (!is_task_ctx (ctx
)
7054 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
7055 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
7056 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
7059 /* If max_vf is non-zero, then we can use only a vectorization factor
7060 up to the max_vf we chose. So stick it into the safelen clause. */
7061 if (maybe_ne (sctx
.max_vf
, 0U))
7063 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
7064 OMP_CLAUSE_SAFELEN
);
7065 poly_uint64 safe_len
;
7067 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
7068 && maybe_gt (safe_len
, sctx
.max_vf
)))
7070 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
7071 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
7073 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
7074 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
7079 /* Create temporary variables for lastprivate(conditional:) implementation
7080 in context CTX with CLAUSES. */
7083 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
7085 tree iter_type
= NULL_TREE
;
7086 tree cond_ptr
= NULL_TREE
;
7087 tree iter_var
= NULL_TREE
;
7088 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7089 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
7090 tree next
= *clauses
;
7091 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7093 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
7097 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7099 if (iter_type
== NULL_TREE
)
7101 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7102 iter_var
= create_tmp_var_raw (iter_type
);
7103 DECL_CONTEXT (iter_var
) = current_function_decl
;
7104 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7105 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7106 ctx
->block_vars
= iter_var
;
7108 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7109 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7110 OMP_CLAUSE_DECL (c3
) = iter_var
;
7111 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7113 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7115 next
= OMP_CLAUSE_CHAIN (cc
);
7116 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7117 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7118 ctx
->lastprivate_conditional_map
->put (o
, v
);
7121 if (iter_type
== NULL
)
7123 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7125 struct omp_for_data fd
;
7126 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7128 iter_type
= unsigned_type_for (fd
.iter_type
);
7130 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7131 iter_type
= unsigned_type_node
;
7132 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7136 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7137 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7141 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7142 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7143 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7144 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7145 ctx
->block_vars
= cond_ptr
;
7146 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7147 OMP_CLAUSE__CONDTEMP_
);
7148 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7149 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7152 iter_var
= create_tmp_var_raw (iter_type
);
7153 DECL_CONTEXT (iter_var
) = current_function_decl
;
7154 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7155 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7156 ctx
->block_vars
= iter_var
;
7158 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7159 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7160 OMP_CLAUSE_DECL (c3
) = iter_var
;
7161 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7162 OMP_CLAUSE_CHAIN (c2
) = c3
;
7163 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7165 tree v
= create_tmp_var_raw (iter_type
);
7166 DECL_CONTEXT (v
) = current_function_decl
;
7167 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7168 DECL_CHAIN (v
) = ctx
->block_vars
;
7169 ctx
->block_vars
= v
;
7170 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7171 ctx
->lastprivate_conditional_map
->put (o
, v
);
7176 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7177 both parallel and workshare constructs. PREDICATE may be NULL if it's
7178 always true. BODY_P is the sequence to insert early initialization
7179 if needed, STMT_LIST is where the non-conditional lastprivate handling
7180 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7184 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7185 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7188 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7189 bool par_clauses
= false;
7190 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7191 unsigned HOST_WIDE_INT conditional_off
= 0;
7192 gimple_seq post_stmt_list
= NULL
;
7194 /* Early exit if there are no lastprivate or linear clauses. */
7195 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7196 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7197 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7198 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7200 if (clauses
== NULL
)
7202 /* If this was a workshare clause, see if it had been combined
7203 with its parallel. In that case, look for the clauses on the
7204 parallel statement itself. */
7205 if (is_parallel_ctx (ctx
))
7209 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7212 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7213 OMP_CLAUSE_LASTPRIVATE
);
7214 if (clauses
== NULL
)
7219 bool maybe_simt
= false;
7220 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7221 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7223 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7224 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7226 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7232 tree label_true
, arm1
, arm2
;
7233 enum tree_code pred_code
= TREE_CODE (predicate
);
7235 label
= create_artificial_label (UNKNOWN_LOCATION
);
7236 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7237 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7239 arm1
= TREE_OPERAND (predicate
, 0);
7240 arm2
= TREE_OPERAND (predicate
, 1);
7241 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7242 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7247 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7248 arm2
= boolean_false_node
;
7249 pred_code
= NE_EXPR
;
7253 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7254 c
= fold_convert (integer_type_node
, c
);
7255 simtcond
= create_tmp_var (integer_type_node
);
7256 gimplify_assign (simtcond
, c
, stmt_list
);
7257 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7259 c
= create_tmp_var (integer_type_node
);
7260 gimple_call_set_lhs (g
, c
);
7261 gimple_seq_add_stmt (stmt_list
, g
);
7262 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7266 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7267 gimple_seq_add_stmt (stmt_list
, stmt
);
7268 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7271 tree cond_ptr
= NULL_TREE
;
7272 for (c
= clauses
; c
;)
7275 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7276 gimple_seq
*this_stmt_list
= stmt_list
;
7277 tree lab2
= NULL_TREE
;
7279 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7280 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7281 && ctx
->lastprivate_conditional_map
7282 && !ctx
->combined_into_simd_safelen1
)
7284 gcc_assert (body_p
);
7287 if (cond_ptr
== NULL_TREE
)
7289 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7290 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7292 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7293 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7294 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7295 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7296 this_stmt_list
= cstmt_list
;
7298 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7300 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7301 build_int_cst (TREE_TYPE (cond_ptr
),
7303 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7306 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7307 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7308 tree mem2
= copy_node (mem
);
7309 gimple_seq seq
= NULL
;
7310 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7311 gimple_seq_add_seq (this_stmt_list
, seq
);
7312 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7313 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7314 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7315 gimple_seq_add_stmt (this_stmt_list
, g
);
7316 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7317 gimplify_assign (mem2
, v
, this_stmt_list
);
7320 && ctx
->combined_into_simd_safelen1
7321 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7322 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7323 && ctx
->lastprivate_conditional_map
)
7324 this_stmt_list
= &post_stmt_list
;
7326 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7327 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7328 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7330 var
= OMP_CLAUSE_DECL (c
);
7331 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7332 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7333 && is_taskloop_ctx (ctx
))
7335 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7336 new_var
= lookup_decl (var
, ctx
->outer
);
7340 new_var
= lookup_decl (var
, ctx
);
7341 /* Avoid uninitialized warnings for lastprivate and
7342 for linear iterators. */
7344 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7345 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7346 suppress_warning (new_var
, OPT_Wuninitialized
);
7349 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7351 tree val
= DECL_VALUE_EXPR (new_var
);
7352 if (TREE_CODE (val
) == ARRAY_REF
7353 && VAR_P (TREE_OPERAND (val
, 0))
7354 && lookup_attribute ("omp simd array",
7355 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7358 if (lastlane
== NULL
)
7360 lastlane
= create_tmp_var (unsigned_type_node
);
7362 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7364 TREE_OPERAND (val
, 1));
7365 gimple_call_set_lhs (g
, lastlane
);
7366 gimple_seq_add_stmt (this_stmt_list
, g
);
7368 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7369 TREE_OPERAND (val
, 0), lastlane
,
7370 NULL_TREE
, NULL_TREE
);
7371 TREE_THIS_NOTRAP (new_var
) = 1;
7374 else if (maybe_simt
)
7376 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7377 ? DECL_VALUE_EXPR (new_var
)
7379 if (simtlast
== NULL
)
7381 simtlast
= create_tmp_var (unsigned_type_node
);
7382 gcall
*g
= gimple_build_call_internal
7383 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7384 gimple_call_set_lhs (g
, simtlast
);
7385 gimple_seq_add_stmt (this_stmt_list
, g
);
7387 x
= build_call_expr_internal_loc
7388 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7389 TREE_TYPE (val
), 2, val
, simtlast
);
7390 new_var
= unshare_expr (new_var
);
7391 gimplify_assign (new_var
, x
, this_stmt_list
);
7392 new_var
= unshare_expr (new_var
);
7395 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7396 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7398 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7399 gimple_seq_add_seq (this_stmt_list
,
7400 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7401 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7403 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7404 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7406 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7407 gimple_seq_add_seq (this_stmt_list
,
7408 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7409 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7413 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7414 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7415 && is_taskloop_ctx (ctx
))
7417 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7419 if (is_global_var (ovar
))
7423 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7424 if (omp_privatize_by_reference (var
))
7425 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7426 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7427 gimplify_and_add (x
, this_stmt_list
);
7430 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7434 c
= OMP_CLAUSE_CHAIN (c
);
7435 if (c
== NULL
&& !par_clauses
)
7437 /* If this was a workshare clause, see if it had been combined
7438 with its parallel. In that case, continue looking for the
7439 clauses also on the parallel statement itself. */
7440 if (is_parallel_ctx (ctx
))
7444 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7447 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7448 OMP_CLAUSE_LASTPRIVATE
);
7454 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7455 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7458 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7459 (which might be a placeholder). INNER is true if this is an inner
7460 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7461 join markers. Generate the before-loop forking sequence in
7462 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7463 general form of these sequences is
7465 GOACC_REDUCTION_SETUP
7467 GOACC_REDUCTION_INIT
7469 GOACC_REDUCTION_FINI
7471 GOACC_REDUCTION_TEARDOWN. */
7474 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7475 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7476 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7479 gimple_seq before_fork
= NULL
;
7480 gimple_seq after_fork
= NULL
;
7481 gimple_seq before_join
= NULL
;
7482 gimple_seq after_join
= NULL
;
7483 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7484 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7485 unsigned offset
= 0;
7487 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7488 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7490 /* No 'reduction' clauses on OpenACC 'kernels'. */
7491 gcc_checking_assert (!is_oacc_kernels (ctx
));
7492 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7493 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7495 tree orig
= OMP_CLAUSE_DECL (c
);
7496 tree var
= maybe_lookup_decl (orig
, ctx
);
7497 tree ref_to_res
= NULL_TREE
;
7498 tree incoming
, outgoing
, v1
, v2
, v3
;
7499 bool is_private
= false;
7501 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7502 if (rcode
== MINUS_EXPR
)
7504 else if (rcode
== TRUTH_ANDIF_EXPR
)
7505 rcode
= BIT_AND_EXPR
;
7506 else if (rcode
== TRUTH_ORIF_EXPR
)
7507 rcode
= BIT_IOR_EXPR
;
7508 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7513 incoming
= outgoing
= var
;
7517 /* See if an outer construct also reduces this variable. */
7518 omp_context
*outer
= ctx
;
7520 while (omp_context
*probe
= outer
->outer
)
7522 enum gimple_code type
= gimple_code (probe
->stmt
);
7527 case GIMPLE_OMP_FOR
:
7528 cls
= gimple_omp_for_clauses (probe
->stmt
);
7531 case GIMPLE_OMP_TARGET
:
7532 /* No 'reduction' clauses inside OpenACC 'kernels'
7534 gcc_checking_assert (!is_oacc_kernels (probe
));
7536 if (!is_gimple_omp_offloaded (probe
->stmt
))
7539 cls
= gimple_omp_target_clauses (probe
->stmt
);
7547 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7548 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7549 && orig
== OMP_CLAUSE_DECL (cls
))
7551 incoming
= outgoing
= lookup_decl (orig
, probe
);
7552 goto has_outer_reduction
;
7554 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7555 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7556 && orig
== OMP_CLAUSE_DECL (cls
))
7564 /* This is the outermost construct with this reduction,
7565 see if there's a mapping for it. */
7566 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7567 && maybe_lookup_field (orig
, outer
) && !is_private
)
7569 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7570 if (omp_privatize_by_reference (orig
))
7571 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7573 tree type
= TREE_TYPE (var
);
7574 if (POINTER_TYPE_P (type
))
7575 type
= TREE_TYPE (type
);
7578 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7582 /* Try to look at enclosing contexts for reduction var,
7583 use original if no mapping found. */
7585 omp_context
*c
= ctx
->outer
;
7588 t
= maybe_lookup_decl (orig
, c
);
7591 incoming
= outgoing
= (t
? t
: orig
);
7594 has_outer_reduction
:;
7598 ref_to_res
= integer_zero_node
;
7600 if (omp_privatize_by_reference (orig
))
7602 tree type
= TREE_TYPE (var
);
7603 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7607 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7608 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7611 v1
= create_tmp_var (type
, id
);
7612 v2
= create_tmp_var (type
, id
);
7613 v3
= create_tmp_var (type
, id
);
7615 gimplify_assign (v1
, var
, fork_seq
);
7616 gimplify_assign (v2
, var
, fork_seq
);
7617 gimplify_assign (v3
, var
, fork_seq
);
7619 var
= build_simple_mem_ref (var
);
7620 v1
= build_simple_mem_ref (v1
);
7621 v2
= build_simple_mem_ref (v2
);
7622 v3
= build_simple_mem_ref (v3
);
7623 outgoing
= build_simple_mem_ref (outgoing
);
7625 if (!TREE_CONSTANT (incoming
))
7626 incoming
= build_simple_mem_ref (incoming
);
7631 /* Determine position in reduction buffer, which may be used
7632 by target. The parser has ensured that this is not a
7633 variable-sized type. */
7634 fixed_size_mode mode
7635 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7636 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7637 offset
= (offset
+ align
- 1) & ~(align
- 1);
7638 tree off
= build_int_cst (sizetype
, offset
);
7639 offset
+= GET_MODE_SIZE (mode
);
7643 init_code
= build_int_cst (integer_type_node
,
7644 IFN_GOACC_REDUCTION_INIT
);
7645 fini_code
= build_int_cst (integer_type_node
,
7646 IFN_GOACC_REDUCTION_FINI
);
7647 setup_code
= build_int_cst (integer_type_node
,
7648 IFN_GOACC_REDUCTION_SETUP
);
7649 teardown_code
= build_int_cst (integer_type_node
,
7650 IFN_GOACC_REDUCTION_TEARDOWN
);
7654 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7655 TREE_TYPE (var
), 6, setup_code
,
7656 unshare_expr (ref_to_res
),
7657 incoming
, level
, op
, off
);
7659 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7660 TREE_TYPE (var
), 6, init_code
,
7661 unshare_expr (ref_to_res
),
7662 v1
, level
, op
, off
);
7664 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7665 TREE_TYPE (var
), 6, fini_code
,
7666 unshare_expr (ref_to_res
),
7667 v2
, level
, op
, off
);
7669 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7670 TREE_TYPE (var
), 6, teardown_code
,
7671 ref_to_res
, v3
, level
, op
, off
);
7673 gimplify_assign (v1
, setup_call
, &before_fork
);
7674 gimplify_assign (v2
, init_call
, &after_fork
);
7675 gimplify_assign (v3
, fini_call
, &before_join
);
7676 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7679 /* Now stitch things together. */
7680 gimple_seq_add_seq (fork_seq
, before_fork
);
7682 gimple_seq_add_stmt (fork_seq
, private_marker
);
7684 gimple_seq_add_stmt (fork_seq
, fork
);
7685 gimple_seq_add_seq (fork_seq
, after_fork
);
7687 gimple_seq_add_seq (join_seq
, before_join
);
7689 gimple_seq_add_stmt (join_seq
, join
);
7690 gimple_seq_add_seq (join_seq
, after_join
);
7693 /* Generate code to implement the REDUCTION clauses, append it
7694 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7695 that should be emitted also inside of the critical section,
7696 in that case clear *CLIST afterwards, otherwise leave it as is
7697 and let the caller emit it itself. */
7700 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7701 gimple_seq
*clist
, omp_context
*ctx
)
7703 gimple_seq sub_seq
= NULL
;
7708 /* OpenACC loop reductions are handled elsewhere. */
7709 if (is_gimple_omp_oacc (ctx
->stmt
))
7712 /* SIMD reductions are handled in lower_rec_input_clauses. */
7713 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7714 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7717 /* inscan reductions are handled elsewhere. */
7718 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7721 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7722 update in that case, otherwise use a lock. */
7723 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7724 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7725 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7727 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7728 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7730 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7740 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7742 tree var
, ref
, new_var
, orig_var
;
7743 enum tree_code code
;
7744 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7746 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7747 || OMP_CLAUSE_REDUCTION_TASK (c
))
7750 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7751 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7752 if (TREE_CODE (var
) == MEM_REF
)
7754 var
= TREE_OPERAND (var
, 0);
7755 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7756 var
= TREE_OPERAND (var
, 0);
7757 if (TREE_CODE (var
) == ADDR_EXPR
)
7758 var
= TREE_OPERAND (var
, 0);
7761 /* If this is a pointer or referenced based array
7762 section, the var could be private in the outer
7763 context e.g. on orphaned loop construct. Pretend this
7764 is private variable's outer reference. */
7765 ccode
= OMP_CLAUSE_PRIVATE
;
7766 if (TREE_CODE (var
) == INDIRECT_REF
)
7767 var
= TREE_OPERAND (var
, 0);
7770 if (is_variable_sized (var
))
7772 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7773 var
= DECL_VALUE_EXPR (var
);
7774 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7775 var
= TREE_OPERAND (var
, 0);
7776 gcc_assert (DECL_P (var
));
7779 new_var
= lookup_decl (var
, ctx
);
7780 if (var
== OMP_CLAUSE_DECL (c
)
7781 && omp_privatize_by_reference (var
))
7782 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7783 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7784 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7786 /* reduction(-:var) sums up the partial results, so it acts
7787 identically to reduction(+:var). */
7788 if (code
== MINUS_EXPR
)
7791 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7794 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7796 addr
= save_expr (addr
);
7797 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7798 tree new_var2
= new_var
;
7802 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7803 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7804 boolean_type_node
, new_var
, zero
);
7805 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7808 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7811 x
= fold_convert (TREE_TYPE (new_var
), x
);
7812 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7813 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7814 gimplify_and_add (x
, stmt_seqp
);
7817 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7819 tree d
= OMP_CLAUSE_DECL (c
);
7820 tree type
= TREE_TYPE (d
);
7821 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7822 tree i
= create_tmp_var (TREE_TYPE (v
));
7823 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7824 tree bias
= TREE_OPERAND (d
, 1);
7825 d
= TREE_OPERAND (d
, 0);
7826 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7828 tree b
= TREE_OPERAND (d
, 1);
7829 b
= maybe_lookup_decl (b
, ctx
);
7832 b
= TREE_OPERAND (d
, 1);
7833 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7835 if (integer_zerop (bias
))
7839 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7840 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7841 TREE_TYPE (b
), b
, bias
);
7843 d
= TREE_OPERAND (d
, 0);
7845 /* For ref build_outer_var_ref already performs this, so
7846 only new_var needs a dereference. */
7847 if (TREE_CODE (d
) == INDIRECT_REF
)
7849 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7850 gcc_assert (omp_privatize_by_reference (var
)
7851 && var
== orig_var
);
7853 else if (TREE_CODE (d
) == ADDR_EXPR
)
7855 if (orig_var
== var
)
7857 new_var
= build_fold_addr_expr (new_var
);
7858 ref
= build_fold_addr_expr (ref
);
7863 gcc_assert (orig_var
== var
);
7864 if (omp_privatize_by_reference (var
))
7865 ref
= build_fold_addr_expr (ref
);
7869 tree t
= maybe_lookup_decl (v
, ctx
);
7873 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7874 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7876 if (!integer_zerop (bias
))
7878 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7879 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7880 TREE_TYPE (new_var
), new_var
,
7881 unshare_expr (bias
));
7882 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7883 TREE_TYPE (ref
), ref
, bias
);
7885 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7886 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7887 tree m
= create_tmp_var (ptype
);
7888 gimplify_assign (m
, new_var
, stmt_seqp
);
7890 m
= create_tmp_var (ptype
);
7891 gimplify_assign (m
, ref
, stmt_seqp
);
7893 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7894 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7895 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7896 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7897 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7898 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7899 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7901 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7902 tree decl_placeholder
7903 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7904 SET_DECL_VALUE_EXPR (placeholder
, out
);
7905 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7906 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7907 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7908 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7909 gimple_seq_add_seq (&sub_seq
,
7910 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7911 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7912 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7913 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7921 tree zero
= build_zero_cst (TREE_TYPE (out
));
7922 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7923 boolean_type_node
, out
, zero
);
7924 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7925 boolean_type_node
, priv
, zero
);
7927 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7929 x
= fold_convert (TREE_TYPE (out
), x
);
7930 out
= unshare_expr (out
);
7931 gimplify_assign (out
, x
, &sub_seq
);
7933 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7934 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7935 gimple_seq_add_stmt (&sub_seq
, g
);
7936 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7937 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7938 gimple_seq_add_stmt (&sub_seq
, g
);
7939 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7940 build_int_cst (TREE_TYPE (i
), 1));
7941 gimple_seq_add_stmt (&sub_seq
, g
);
7942 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7943 gimple_seq_add_stmt (&sub_seq
, g
);
7944 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7946 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7948 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7950 if (omp_privatize_by_reference (var
)
7951 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7953 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7954 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7955 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7956 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7957 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7958 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7959 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7963 tree new_var2
= new_var
;
7967 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7968 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7969 boolean_type_node
, new_var
, zero
);
7970 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7973 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7975 x
= fold_convert (TREE_TYPE (new_var
), x
);
7976 ref
= build_outer_var_ref (var
, ctx
);
7977 gimplify_assign (ref
, x
, &sub_seq
);
7981 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7983 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7985 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7989 gimple_seq_add_seq (stmt_seqp
, *clist
);
7993 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7995 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7999 /* Generate code to implement the COPYPRIVATE clauses. */
8002 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
8007 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8009 tree var
, new_var
, ref
, x
;
8011 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8013 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
8016 var
= OMP_CLAUSE_DECL (c
);
8017 by_ref
= use_pointer_for_field (var
, NULL
);
8019 ref
= build_sender_ref (var
, ctx
);
8020 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
8023 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
8024 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
8026 gimplify_assign (ref
, x
, slist
);
8028 ref
= build_receiver_ref (var
, false, ctx
);
8031 ref
= fold_convert_loc (clause_loc
,
8032 build_pointer_type (TREE_TYPE (new_var
)),
8034 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
8036 if (omp_privatize_by_reference (var
))
8038 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
8039 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
8040 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8042 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
8043 gimplify_and_add (x
, rlist
);
8048 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8049 and REDUCTION from the sender (aka parent) side. */
8052 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
8056 int ignored_looptemp
= 0;
8057 bool is_taskloop
= false;
8059 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8060 by GOMP_taskloop. */
8061 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
8063 ignored_looptemp
= 2;
8067 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8069 tree val
, ref
, x
, var
;
8070 bool by_ref
, do_in
= false, do_out
= false;
8071 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8073 switch (OMP_CLAUSE_CODE (c
))
8075 case OMP_CLAUSE_PRIVATE
:
8076 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8079 case OMP_CLAUSE_FIRSTPRIVATE
:
8080 case OMP_CLAUSE_COPYIN
:
8081 case OMP_CLAUSE_LASTPRIVATE
:
8082 case OMP_CLAUSE_IN_REDUCTION
:
8083 case OMP_CLAUSE__REDUCTEMP_
:
8085 case OMP_CLAUSE_REDUCTION
:
8086 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
8089 case OMP_CLAUSE_SHARED
:
8090 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8093 case OMP_CLAUSE__LOOPTEMP_
:
8094 if (ignored_looptemp
)
8104 val
= OMP_CLAUSE_DECL (c
);
8105 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8106 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8107 && TREE_CODE (val
) == MEM_REF
)
8109 val
= TREE_OPERAND (val
, 0);
8110 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8111 val
= TREE_OPERAND (val
, 0);
8112 if (TREE_CODE (val
) == INDIRECT_REF
8113 || TREE_CODE (val
) == ADDR_EXPR
)
8114 val
= TREE_OPERAND (val
, 0);
8115 if (is_variable_sized (val
))
8119 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8120 outer taskloop region. */
8121 omp_context
*ctx_for_o
= ctx
;
8123 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8124 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8125 ctx_for_o
= ctx
->outer
;
8127 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8129 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8130 && is_global_var (var
)
8131 && (val
== OMP_CLAUSE_DECL (c
)
8132 || !is_task_ctx (ctx
)
8133 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8134 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8135 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8136 != POINTER_TYPE
)))))
8139 t
= omp_member_access_dummy_var (var
);
8142 var
= DECL_VALUE_EXPR (var
);
8143 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8145 var
= unshare_and_remap (var
, t
, o
);
8147 var
= unshare_expr (var
);
8150 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8152 /* Handle taskloop firstprivate/lastprivate, where the
8153 lastprivate on GIMPLE_OMP_TASK is represented as
8154 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8155 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8156 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8157 if (use_pointer_for_field (val
, ctx
))
8158 var
= build_fold_addr_expr (var
);
8159 gimplify_assign (x
, var
, ilist
);
8160 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8164 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8165 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8166 || val
== OMP_CLAUSE_DECL (c
))
8167 && is_variable_sized (val
))
8169 by_ref
= use_pointer_for_field (val
, NULL
);
8171 switch (OMP_CLAUSE_CODE (c
))
8173 case OMP_CLAUSE_FIRSTPRIVATE
:
8174 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8176 && is_task_ctx (ctx
))
8177 suppress_warning (var
);
8181 case OMP_CLAUSE_PRIVATE
:
8182 case OMP_CLAUSE_COPYIN
:
8183 case OMP_CLAUSE__LOOPTEMP_
:
8184 case OMP_CLAUSE__REDUCTEMP_
:
8188 case OMP_CLAUSE_LASTPRIVATE
:
8189 if (by_ref
|| omp_privatize_by_reference (val
))
8191 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8198 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8203 case OMP_CLAUSE_REDUCTION
:
8204 case OMP_CLAUSE_IN_REDUCTION
:
8206 if (val
== OMP_CLAUSE_DECL (c
))
8208 if (is_task_ctx (ctx
))
8209 by_ref
= use_pointer_for_field (val
, ctx
);
8211 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8214 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8223 ref
= build_sender_ref (val
, ctx
);
8224 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8225 gimplify_assign (ref
, x
, ilist
);
8226 if (is_task_ctx (ctx
))
8227 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8232 ref
= build_sender_ref (val
, ctx
);
8233 gimplify_assign (var
, ref
, olist
);
8238 /* Generate code to implement SHARED from the sender (aka parent)
8239 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8240 list things that got automatically shared. */
8243 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8245 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8247 if (ctx
->record_type
== NULL
)
8250 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8251 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8253 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8254 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8257 nvar
= maybe_lookup_decl (ovar
, ctx
);
8259 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8260 || (ctx
->allocate_map
8261 && ctx
->allocate_map
->get (ovar
)))
8264 /* If CTX is a nested parallel directive. Find the immediately
8265 enclosing parallel or workshare construct that contains a
8266 mapping for OVAR. */
8267 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8269 t
= omp_member_access_dummy_var (var
);
8272 var
= DECL_VALUE_EXPR (var
);
8273 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8275 var
= unshare_and_remap (var
, t
, o
);
8277 var
= unshare_expr (var
);
8280 if (use_pointer_for_field (ovar
, ctx
))
8282 x
= build_sender_ref (ovar
, ctx
);
8283 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8284 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8286 gcc_assert (is_parallel_ctx (ctx
)
8287 && DECL_ARTIFICIAL (ovar
));
8288 /* _condtemp_ clause. */
8289 var
= build_constructor (TREE_TYPE (x
), NULL
);
8292 var
= build_fold_addr_expr (var
);
8293 gimplify_assign (x
, var
, ilist
);
8297 x
= build_sender_ref (ovar
, ctx
);
8298 gimplify_assign (x
, var
, ilist
);
8300 if (!TREE_READONLY (var
)
8301 /* We don't need to receive a new reference to a result
8302 or parm decl. In fact we may not store to it as we will
8303 invalidate any pending RSO and generate wrong gimple
8305 && !((TREE_CODE (var
) == RESULT_DECL
8306 || TREE_CODE (var
) == PARM_DECL
)
8307 && DECL_BY_REFERENCE (var
)))
8309 x
= build_sender_ref (ovar
, ctx
);
8310 gimplify_assign (var
, x
, olist
);
8316 /* Emit an OpenACC head marker call, encapulating the partitioning and
8317 other information that must be processed by the target compiler.
8318 Return the maximum number of dimensions the associated loop might
8319 be partitioned over. */
8322 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8323 gimple_seq
*seq
, omp_context
*ctx
)
8325 unsigned levels
= 0;
8327 tree gang_static
= NULL_TREE
;
8328 auto_vec
<tree
, 5> args
;
8330 args
.quick_push (build_int_cst
8331 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8332 args
.quick_push (ddvar
);
8333 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8335 switch (OMP_CLAUSE_CODE (c
))
8337 case OMP_CLAUSE_GANG
:
8338 tag
|= OLF_DIM_GANG
;
8339 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8340 /* static:* is represented by -1, and we can ignore it, as
8341 scheduling is always static. */
8342 if (gang_static
&& integer_minus_onep (gang_static
))
8343 gang_static
= NULL_TREE
;
8347 case OMP_CLAUSE_WORKER
:
8348 tag
|= OLF_DIM_WORKER
;
8352 case OMP_CLAUSE_VECTOR
:
8353 tag
|= OLF_DIM_VECTOR
;
8357 case OMP_CLAUSE_SEQ
:
8361 case OMP_CLAUSE_AUTO
:
8365 case OMP_CLAUSE_INDEPENDENT
:
8366 tag
|= OLF_INDEPENDENT
;
8369 case OMP_CLAUSE_TILE
:
8373 case OMP_CLAUSE_REDUCTION
:
8374 tag
|= OLF_REDUCTION
;
8384 if (DECL_P (gang_static
))
8385 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8386 tag
|= OLF_GANG_STATIC
;
8389 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8390 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8392 else if (is_oacc_kernels (tgt
))
8393 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8395 else if (is_oacc_kernels_decomposed_part (tgt
))
8400 /* In a parallel region, loops are implicitly INDEPENDENT. */
8401 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8402 tag
|= OLF_INDEPENDENT
;
8404 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8405 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8406 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8408 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8409 gcc_assert (!(tag
& OLF_AUTO
));
8413 /* Tiling could use all 3 levels. */
8417 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8418 Ensure at least one level, or 2 for possible auto
8420 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8421 << OLF_DIM_BASE
) | OLF_SEQ
));
8423 if (levels
< 1u + maybe_auto
)
8424 levels
= 1u + maybe_auto
;
8427 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8428 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8430 args
.quick_push (gang_static
);
8432 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8433 gimple_set_location (call
, loc
);
8434 gimple_set_lhs (call
, ddvar
);
8435 gimple_seq_add_stmt (seq
, call
);
8440 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8441 partitioning level of the enclosed region. */
8444 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8445 tree tofollow
, gimple_seq
*seq
)
8447 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8448 : IFN_UNIQUE_OACC_TAIL_MARK
);
8449 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8450 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8451 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8452 marker
, ddvar
, tofollow
);
8453 gimple_set_location (call
, loc
);
8454 gimple_set_lhs (call
, ddvar
);
8455 gimple_seq_add_stmt (seq
, call
);
8458 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8459 the loop clauses, from which we extract reductions. Initialize
8463 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8464 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8467 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8468 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8470 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8474 gimple_set_location (private_marker
, loc
);
8475 gimple_call_set_lhs (private_marker
, ddvar
);
8476 gimple_call_set_arg (private_marker
, 1, ddvar
);
8479 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8480 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8483 for (unsigned done
= 1; count
; count
--, done
++)
8485 gimple_seq fork_seq
= NULL
;
8486 gimple_seq join_seq
= NULL
;
8488 tree place
= build_int_cst (integer_type_node
, -1);
8489 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8490 fork_kind
, ddvar
, place
);
8491 gimple_set_location (fork
, loc
);
8492 gimple_set_lhs (fork
, ddvar
);
8494 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8495 join_kind
, ddvar
, place
);
8496 gimple_set_location (join
, loc
);
8497 gimple_set_lhs (join
, ddvar
);
8499 /* Mark the beginning of this level sequence. */
8501 lower_oacc_loop_marker (loc
, ddvar
, true,
8502 build_int_cst (integer_type_node
, count
),
8504 lower_oacc_loop_marker (loc
, ddvar
, false,
8505 build_int_cst (integer_type_node
, done
),
8508 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8509 fork
, (count
== 1) ? private_marker
: NULL
,
8510 join
, &fork_seq
, &join_seq
, ctx
);
8512 /* Append this level to head. */
8513 gimple_seq_add_seq (head
, fork_seq
);
8514 /* Prepend it to tail. */
8515 gimple_seq_add_seq (&join_seq
, *tail
);
8521 /* Mark the end of the sequence. */
8522 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8523 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8526 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8527 catch handler and return it. This prevents programs from violating the
8528 structured block semantics with throws. */
8531 maybe_catch_exception (gimple_seq body
)
8536 if (!flag_exceptions
)
8539 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8540 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8542 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8544 g
= gimple_build_eh_must_not_throw (decl
);
8545 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8548 return gimple_seq_alloc_with_stmt (g
);
8552 /* Routines to lower OMP directives into OMP-GIMPLE. */
8554 /* If ctx is a worksharing context inside of a cancellable parallel
8555 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8556 and conditional branch to parallel's cancel_label to handle
8557 cancellation in the implicit barrier. */
8560 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8563 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8564 if (gimple_omp_return_nowait_p (omp_return
))
8566 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8567 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8568 && outer
->cancellable
)
8570 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8571 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8572 tree lhs
= create_tmp_var (c_bool_type
);
8573 gimple_omp_return_set_lhs (omp_return
, lhs
);
8574 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8575 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8576 fold_convert (c_bool_type
,
8577 boolean_false_node
),
8578 outer
->cancel_label
, fallthru_label
);
8579 gimple_seq_add_stmt (body
, g
);
8580 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8582 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8583 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8587 /* Find the first task_reduction or reduction clause or return NULL
8588 if there are none. */
8591 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8592 enum omp_clause_code ccode
)
8596 clauses
= omp_find_clause (clauses
, ccode
);
8597 if (clauses
== NULL_TREE
)
8599 if (ccode
!= OMP_CLAUSE_REDUCTION
8600 || code
== OMP_TASKLOOP
8601 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8603 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8607 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8608 gimple_seq
*, gimple_seq
*);
8610 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8611 CTX is the enclosing OMP context for the current statement. */
8614 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8616 tree block
, control
;
8617 gimple_stmt_iterator tgsi
;
8618 gomp_sections
*stmt
;
8620 gbind
*new_stmt
, *bind
;
8621 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8623 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8625 push_gimplify_context ();
8631 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8632 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8633 tree rtmp
= NULL_TREE
;
8636 tree type
= build_pointer_type (pointer_sized_int_node
);
8637 tree temp
= create_tmp_var (type
);
8638 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8639 OMP_CLAUSE_DECL (c
) = temp
;
8640 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8641 gimple_omp_sections_set_clauses (stmt
, c
);
8642 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8643 gimple_omp_sections_clauses (stmt
),
8644 &ilist
, &tred_dlist
);
8646 rtmp
= make_ssa_name (type
);
8647 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8650 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8651 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8653 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8654 &ilist
, &dlist
, ctx
, NULL
);
8656 control
= create_tmp_var (unsigned_type_node
, ".section");
8657 gimple_omp_sections_set_control (stmt
, control
);
8659 new_body
= gimple_omp_body (stmt
);
8660 gimple_omp_set_body (stmt
, NULL
);
8661 tgsi
= gsi_start (new_body
);
8662 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8667 sec_start
= gsi_stmt (tgsi
);
8668 sctx
= maybe_lookup_ctx (sec_start
);
8671 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8672 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8673 GSI_CONTINUE_LINKING
);
8674 gimple_omp_set_body (sec_start
, NULL
);
8676 if (gsi_one_before_end_p (tgsi
))
8678 gimple_seq l
= NULL
;
8679 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8680 &ilist
, &l
, &clist
, ctx
);
8681 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8682 gimple_omp_section_set_last (sec_start
);
8685 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8686 GSI_CONTINUE_LINKING
);
8689 block
= make_node (BLOCK
);
8690 bind
= gimple_build_bind (NULL
, new_body
, block
);
8693 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8697 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8698 gcall
*g
= gimple_build_call (fndecl
, 0);
8699 gimple_seq_add_stmt (&olist
, g
);
8700 gimple_seq_add_seq (&olist
, clist
);
8701 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8702 g
= gimple_build_call (fndecl
, 0);
8703 gimple_seq_add_stmt (&olist
, g
);
8706 block
= make_node (BLOCK
);
8707 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8708 gsi_replace (gsi_p
, new_stmt
, true);
8710 pop_gimplify_context (new_stmt
);
8711 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8712 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8713 if (BLOCK_VARS (block
))
8714 TREE_USED (block
) = 1;
8717 gimple_seq_add_seq (&new_body
, ilist
);
8718 gimple_seq_add_stmt (&new_body
, stmt
);
8719 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8720 gimple_seq_add_stmt (&new_body
, bind
);
8722 t
= gimple_build_omp_continue (control
, control
);
8723 gimple_seq_add_stmt (&new_body
, t
);
8725 gimple_seq_add_seq (&new_body
, olist
);
8726 if (ctx
->cancellable
)
8727 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8728 gimple_seq_add_seq (&new_body
, dlist
);
8730 new_body
= maybe_catch_exception (new_body
);
8732 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8733 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8734 t
= gimple_build_omp_return (nowait
);
8735 gimple_seq_add_stmt (&new_body
, t
);
8736 gimple_seq_add_seq (&new_body
, tred_dlist
);
8737 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8740 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8742 gimple_bind_set_body (new_stmt
, new_body
);
8746 /* A subroutine of lower_omp_single. Expand the simple form of
8747 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8749 if (GOMP_single_start ())
8751 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8753 FIXME. It may be better to delay expanding the logic of this until
8754 pass_expand_omp. The expanded logic may make the job more difficult
8755 to a synchronization analysis pass. */
8758 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8760 location_t loc
= gimple_location (single_stmt
);
8761 tree tlabel
= create_artificial_label (loc
);
8762 tree flabel
= create_artificial_label (loc
);
8763 gimple
*call
, *cond
;
8766 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8767 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8768 call
= gimple_build_call (decl
, 0);
8769 gimple_call_set_lhs (call
, lhs
);
8770 gimple_seq_add_stmt (pre_p
, call
);
8772 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8773 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8776 gimple_seq_add_stmt (pre_p
, cond
);
8777 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8778 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8779 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8783 /* A subroutine of lower_omp_single. Expand the simple form of
8784 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8786 #pragma omp single copyprivate (a, b, c)
8788 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8791 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8797 GOMP_single_copy_end (©out);
8808 FIXME. It may be better to delay expanding the logic of this until
8809 pass_expand_omp. The expanded logic may make the job more difficult
8810 to a synchronization analysis pass. */
8813 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8816 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8817 gimple_seq copyin_seq
;
8818 location_t loc
= gimple_location (single_stmt
);
8820 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8822 ptr_type
= build_pointer_type (ctx
->record_type
);
8823 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8825 l0
= create_artificial_label (loc
);
8826 l1
= create_artificial_label (loc
);
8827 l2
= create_artificial_label (loc
);
8829 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8830 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8831 t
= fold_convert_loc (loc
, ptr_type
, t
);
8832 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8834 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8835 build_int_cst (ptr_type
, 0));
8836 t
= build3 (COND_EXPR
, void_type_node
, t
,
8837 build_and_jump (&l0
), build_and_jump (&l1
));
8838 gimplify_and_add (t
, pre_p
);
8840 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8842 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8845 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8848 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8849 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8850 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8851 gimplify_and_add (t
, pre_p
);
8853 t
= build_and_jump (&l2
);
8854 gimplify_and_add (t
, pre_p
);
8856 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8858 gimple_seq_add_seq (pre_p
, copyin_seq
);
8860 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8864 /* Expand code for an OpenMP single directive. */
8867 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8870 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8872 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8874 push_gimplify_context ();
8876 block
= make_node (BLOCK
);
8877 bind
= gimple_build_bind (NULL
, NULL
, block
);
8878 gsi_replace (gsi_p
, bind
, true);
8881 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8882 &bind_body
, &dlist
, ctx
, NULL
);
8883 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8885 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8887 if (ctx
->record_type
)
8888 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8890 lower_omp_single_simple (single_stmt
, &bind_body
);
8892 gimple_omp_set_body (single_stmt
, NULL
);
8894 gimple_seq_add_seq (&bind_body
, dlist
);
8896 bind_body
= maybe_catch_exception (bind_body
);
8898 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8899 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8900 gimple
*g
= gimple_build_omp_return (nowait
);
8901 gimple_seq_add_stmt (&bind_body_tail
, g
);
8902 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8903 if (ctx
->record_type
)
8905 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8906 tree clobber
= build_clobber (ctx
->record_type
);
8907 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8908 clobber
), GSI_SAME_STMT
);
8910 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8911 gimple_bind_set_body (bind
, bind_body
);
8913 pop_gimplify_context (bind
);
8915 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8916 BLOCK_VARS (block
) = ctx
->block_vars
;
8917 if (BLOCK_VARS (block
))
8918 TREE_USED (block
) = 1;
8922 /* Lower code for an OMP scope directive. */
8925 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8928 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8930 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8931 gimple_seq tred_dlist
= NULL
;
8933 push_gimplify_context ();
8935 block
= make_node (BLOCK
);
8936 bind
= gimple_build_bind (NULL
, NULL
, block
);
8937 gsi_replace (gsi_p
, bind
, true);
8942 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8943 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8946 tree type
= build_pointer_type (pointer_sized_int_node
);
8947 tree temp
= create_tmp_var (type
);
8948 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8949 OMP_CLAUSE_DECL (c
) = temp
;
8950 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8951 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8952 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8953 gimple_omp_scope_clauses (scope_stmt
),
8954 &bind_body
, &tred_dlist
);
8956 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8957 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8958 gimple_seq_add_stmt (&bind_body
, stmt
);
8961 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8962 &bind_body
, &dlist
, ctx
, NULL
);
8963 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8965 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8967 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8969 gimple_omp_set_body (scope_stmt
, NULL
);
8971 gimple_seq clist
= NULL
;
8972 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8973 &bind_body
, &clist
, ctx
);
8976 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8977 gcall
*g
= gimple_build_call (fndecl
, 0);
8978 gimple_seq_add_stmt (&bind_body
, g
);
8979 gimple_seq_add_seq (&bind_body
, clist
);
8980 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8981 g
= gimple_build_call (fndecl
, 0);
8982 gimple_seq_add_stmt (&bind_body
, g
);
8985 gimple_seq_add_seq (&bind_body
, dlist
);
8987 bind_body
= maybe_catch_exception (bind_body
);
8989 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8990 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8991 gimple
*g
= gimple_build_omp_return (nowait
);
8992 gimple_seq_add_stmt (&bind_body_tail
, g
);
8993 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8994 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8995 if (ctx
->record_type
)
8997 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8998 tree clobber
= build_clobber (ctx
->record_type
);
8999 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
9000 clobber
), GSI_SAME_STMT
);
9002 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
9004 gimple_bind_set_body (bind
, bind_body
);
9006 pop_gimplify_context (bind
);
9008 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9009 BLOCK_VARS (block
) = ctx
->block_vars
;
9010 if (BLOCK_VARS (block
))
9011 TREE_USED (block
) = 1;
9013 /* Expand code for an OpenMP master or masked directive. */
9016 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9018 tree block
, lab
= NULL
, x
, bfn_decl
;
9019 gimple
*stmt
= gsi_stmt (*gsi_p
);
9021 location_t loc
= gimple_location (stmt
);
9023 tree filter
= integer_zero_node
;
9025 push_gimplify_context ();
9027 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
9029 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
9032 filter
= fold_convert (integer_type_node
,
9033 OMP_CLAUSE_FILTER_EXPR (filter
));
9035 filter
= integer_zero_node
;
9037 block
= make_node (BLOCK
);
9038 bind
= gimple_build_bind (NULL
, NULL
, block
);
9039 gsi_replace (gsi_p
, bind
, true);
9040 gimple_bind_add_stmt (bind
, stmt
);
9042 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9043 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
9044 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
9045 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
9047 gimplify_and_add (x
, &tseq
);
9048 gimple_bind_add_seq (bind
, tseq
);
9050 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9051 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9052 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9053 gimple_omp_set_body (stmt
, NULL
);
9055 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
9057 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9059 pop_gimplify_context (bind
);
9061 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9062 BLOCK_VARS (block
) = ctx
->block_vars
;
9065 /* Helper function for lower_omp_task_reductions. For a specific PASS
9066 find out the current clause it should be processed, or return false
9067 if all have been processed already. */
9070 omp_task_reduction_iterate (int pass
, enum tree_code code
,
9071 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
9072 tree
*type
, tree
*next
)
9074 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
9076 if (ccode
== OMP_CLAUSE_REDUCTION
9077 && code
!= OMP_TASKLOOP
9078 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
9080 *decl
= OMP_CLAUSE_DECL (*c
);
9081 *type
= TREE_TYPE (*decl
);
9082 if (TREE_CODE (*decl
) == MEM_REF
)
9089 if (omp_privatize_by_reference (*decl
))
9090 *type
= TREE_TYPE (*type
);
9091 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
9094 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9103 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9104 OMP_TASKGROUP only with task modifier). Register mapping of those in
9105 START sequence and reducing them and unregister them in the END sequence. */
9108 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9109 gimple_seq
*start
, gimple_seq
*end
)
9111 enum omp_clause_code ccode
9112 = (code
== OMP_TASKGROUP
9113 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9114 tree cancellable
= NULL_TREE
;
9115 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9116 if (clauses
== NULL_TREE
)
9118 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9120 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9121 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9122 && outer
->cancellable
)
9124 cancellable
= error_mark_node
;
9127 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9128 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9131 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9132 tree
*last
= &TYPE_FIELDS (record_type
);
9136 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9138 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9141 DECL_CHAIN (field
) = ifield
;
9142 last
= &DECL_CHAIN (ifield
);
9143 DECL_CONTEXT (field
) = record_type
;
9144 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9145 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9146 DECL_CONTEXT (ifield
) = record_type
;
9147 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9148 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9150 for (int pass
= 0; pass
< 2; pass
++)
9152 tree decl
, type
, next
;
9153 for (tree c
= clauses
;
9154 omp_task_reduction_iterate (pass
, code
, ccode
,
9155 &c
, &decl
, &type
, &next
); c
= next
)
9158 tree new_type
= type
;
9160 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9162 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9163 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9165 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9167 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9168 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9169 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9172 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9173 DECL_CONTEXT (field
) = record_type
;
9174 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9175 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9177 last
= &DECL_CHAIN (field
);
9179 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9181 DECL_CONTEXT (bfield
) = record_type
;
9182 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9183 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9185 last
= &DECL_CHAIN (bfield
);
9189 layout_type (record_type
);
9191 /* Build up an array which registers with the runtime all the reductions
9192 and deregisters them at the end. Format documented in libgomp/task.c. */
9193 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9194 tree avar
= create_tmp_var_raw (atype
);
9195 gimple_add_tmp_var (avar
);
9196 TREE_ADDRESSABLE (avar
) = 1;
9197 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9198 NULL_TREE
, NULL_TREE
);
9199 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9200 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9201 gimple_seq seq
= NULL
;
9202 tree sz
= fold_convert (pointer_sized_int_node
,
9203 TYPE_SIZE_UNIT (record_type
));
9205 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9206 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9207 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9208 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9209 ctx
->task_reductions
.create (1 + cnt
);
9210 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9211 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9213 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9214 gimple_seq_add_seq (start
, seq
);
9215 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9216 NULL_TREE
, NULL_TREE
);
9217 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9218 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9219 NULL_TREE
, NULL_TREE
);
9220 t
= build_int_cst (pointer_sized_int_node
,
9221 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9222 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9223 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9224 NULL_TREE
, NULL_TREE
);
9225 t
= build_int_cst (pointer_sized_int_node
, -1);
9226 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9227 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9228 NULL_TREE
, NULL_TREE
);
9229 t
= build_int_cst (pointer_sized_int_node
, 0);
9230 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9232 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9233 and for each task reduction checks a bool right after the private variable
9234 within that thread's chunk; if the bool is clear, it hasn't been
9235 initialized and thus isn't going to be reduced nor destructed, otherwise
9236 reduce and destruct it. */
9237 tree idx
= create_tmp_var (size_type_node
);
9238 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9239 tree num_thr_sz
= create_tmp_var (size_type_node
);
9240 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9241 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9242 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9244 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9246 /* For worksharing constructs or scope, only perform it in the master
9247 thread, with the exception of cancelled implicit barriers - then only
9248 handle the current thread. */
9249 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9250 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9251 tree thr_num
= create_tmp_var (integer_type_node
);
9252 g
= gimple_build_call (t
, 0);
9253 gimple_call_set_lhs (g
, thr_num
);
9254 gimple_seq_add_stmt (end
, g
);
9258 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9259 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9260 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9261 if (code
== OMP_FOR
)
9262 c
= gimple_omp_for_clauses (ctx
->stmt
);
9263 else if (code
== OMP_SECTIONS
)
9264 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9265 else /* if (code == OMP_SCOPE) */
9266 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9267 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9269 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9271 gimple_seq_add_stmt (end
, g
);
9272 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9273 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9274 gimple_seq_add_stmt (end
, g
);
9275 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9276 build_one_cst (TREE_TYPE (idx
)));
9277 gimple_seq_add_stmt (end
, g
);
9278 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9279 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9281 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9282 gimple_seq_add_stmt (end
, g
);
9283 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9285 if (code
!= OMP_PARALLEL
)
9287 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9288 tree num_thr
= create_tmp_var (integer_type_node
);
9289 g
= gimple_build_call (t
, 0);
9290 gimple_call_set_lhs (g
, num_thr
);
9291 gimple_seq_add_stmt (end
, g
);
9292 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9293 gimple_seq_add_stmt (end
, g
);
9295 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9299 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9300 OMP_CLAUSE__REDUCTEMP_
);
9301 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9302 t
= fold_convert (size_type_node
, t
);
9303 gimplify_assign (num_thr_sz
, t
, end
);
9305 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9306 NULL_TREE
, NULL_TREE
);
9307 tree data
= create_tmp_var (pointer_sized_int_node
);
9308 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9309 if (code
== OMP_TASKLOOP
)
9311 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9312 g
= gimple_build_cond (NE_EXPR
, data
,
9313 build_zero_cst (pointer_sized_int_node
),
9315 gimple_seq_add_stmt (end
, g
);
9317 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9319 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9320 ptr
= create_tmp_var (build_pointer_type (record_type
));
9322 ptr
= create_tmp_var (ptr_type_node
);
9323 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9325 tree field
= TYPE_FIELDS (record_type
);
9328 field
= DECL_CHAIN (DECL_CHAIN (field
));
9329 for (int pass
= 0; pass
< 2; pass
++)
9331 tree decl
, type
, next
;
9332 for (tree c
= clauses
;
9333 omp_task_reduction_iterate (pass
, code
, ccode
,
9334 &c
, &decl
, &type
, &next
); c
= next
)
9336 tree var
= decl
, ref
;
9337 if (TREE_CODE (decl
) == MEM_REF
)
9339 var
= TREE_OPERAND (var
, 0);
9340 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9341 var
= TREE_OPERAND (var
, 0);
9343 if (TREE_CODE (var
) == ADDR_EXPR
)
9344 var
= TREE_OPERAND (var
, 0);
9345 else if (TREE_CODE (var
) == INDIRECT_REF
)
9346 var
= TREE_OPERAND (var
, 0);
9347 tree orig_var
= var
;
9348 if (is_variable_sized (var
))
9350 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9351 var
= DECL_VALUE_EXPR (var
);
9352 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9353 var
= TREE_OPERAND (var
, 0);
9354 gcc_assert (DECL_P (var
));
9356 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9357 if (orig_var
!= var
)
9358 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9359 else if (TREE_CODE (v
) == ADDR_EXPR
)
9360 t
= build_fold_addr_expr (t
);
9361 else if (TREE_CODE (v
) == INDIRECT_REF
)
9362 t
= build_fold_indirect_ref (t
);
9363 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9365 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9366 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9367 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9369 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9370 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9371 fold_convert (size_type_node
,
9372 TREE_OPERAND (decl
, 1)));
9376 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9377 if (!omp_privatize_by_reference (decl
))
9378 t
= build_fold_addr_expr (t
);
9380 t
= fold_convert (pointer_sized_int_node
, t
);
9382 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9383 gimple_seq_add_seq (start
, seq
);
9384 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9385 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9386 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9387 t
= unshare_expr (byte_position (field
));
9388 t
= fold_convert (pointer_sized_int_node
, t
);
9389 ctx
->task_reduction_map
->put (c
, cnt
);
9390 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9393 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9394 gimple_seq_add_seq (start
, seq
);
9395 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9396 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9397 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9399 tree bfield
= DECL_CHAIN (field
);
9401 if (code
== OMP_PARALLEL
9403 || code
== OMP_SECTIONS
9404 || code
== OMP_SCOPE
)
9405 /* In parallel, worksharing or scope all threads unconditionally
9406 initialize all their task reduction private variables. */
9407 cond
= boolean_true_node
;
9408 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9410 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9411 unshare_expr (byte_position (bfield
)));
9413 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9414 gimple_seq_add_seq (end
, seq
);
9415 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9416 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9417 build_int_cst (pbool
, 0));
9420 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9421 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9422 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9423 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9424 tree condv
= create_tmp_var (boolean_type_node
);
9425 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9426 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9428 gimple_seq_add_stmt (end
, g
);
9429 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9430 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9432 /* If this reduction doesn't need destruction and parallel
9433 has been cancelled, there is nothing to do for this
9434 reduction, so jump around the merge operation. */
9435 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9436 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9437 build_zero_cst (TREE_TYPE (cancellable
)),
9439 gimple_seq_add_stmt (end
, g
);
9440 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9444 if (TREE_TYPE (ptr
) == ptr_type_node
)
9446 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9447 unshare_expr (byte_position (field
)));
9449 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9450 gimple_seq_add_seq (end
, seq
);
9451 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9452 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9453 build_int_cst (pbool
, 0));
9456 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9457 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9459 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9460 if (TREE_CODE (decl
) != MEM_REF
9461 && omp_privatize_by_reference (decl
))
9462 ref
= build_simple_mem_ref (ref
);
9463 /* reduction(-:var) sums up the partial results, so it acts
9464 identically to reduction(+:var). */
9465 if (rcode
== MINUS_EXPR
)
9467 if (TREE_CODE (decl
) == MEM_REF
)
9469 tree type
= TREE_TYPE (new_var
);
9470 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9471 tree i
= create_tmp_var (TREE_TYPE (v
));
9472 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9475 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9476 tree vv
= create_tmp_var (TREE_TYPE (v
));
9477 gimplify_assign (vv
, v
, start
);
9480 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9481 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9482 new_var
= build_fold_addr_expr (new_var
);
9483 new_var
= fold_convert (ptype
, new_var
);
9484 ref
= fold_convert (ptype
, ref
);
9485 tree m
= create_tmp_var (ptype
);
9486 gimplify_assign (m
, new_var
, end
);
9488 m
= create_tmp_var (ptype
);
9489 gimplify_assign (m
, ref
, end
);
9491 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9492 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9493 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9494 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9495 tree priv
= build_simple_mem_ref (new_var
);
9496 tree out
= build_simple_mem_ref (ref
);
9497 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9499 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9500 tree decl_placeholder
9501 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9502 tree lab6
= NULL_TREE
;
9505 /* If this reduction needs destruction and parallel
9506 has been cancelled, jump around the merge operation
9507 to the destruction. */
9508 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9509 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9510 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9511 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9513 gimple_seq_add_stmt (end
, g
);
9514 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9516 SET_DECL_VALUE_EXPR (placeholder
, out
);
9517 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9518 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9519 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9520 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9521 gimple_seq_add_seq (end
,
9522 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9523 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9524 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9526 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9527 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9530 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9531 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9534 gimple_seq tseq
= NULL
;
9535 gimplify_stmt (&x
, &tseq
);
9536 gimple_seq_add_seq (end
, tseq
);
9541 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9542 out
= unshare_expr (out
);
9543 gimplify_assign (out
, x
, end
);
9546 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9547 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9548 gimple_seq_add_stmt (end
, g
);
9549 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9550 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9551 gimple_seq_add_stmt (end
, g
);
9552 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9553 build_int_cst (TREE_TYPE (i
), 1));
9554 gimple_seq_add_stmt (end
, g
);
9555 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9556 gimple_seq_add_stmt (end
, g
);
9557 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9559 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9561 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9562 tree oldv
= NULL_TREE
;
9563 tree lab6
= NULL_TREE
;
9566 /* If this reduction needs destruction and parallel
9567 has been cancelled, jump around the merge operation
9568 to the destruction. */
9569 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9570 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9571 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9572 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9574 gimple_seq_add_stmt (end
, g
);
9575 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9577 if (omp_privatize_by_reference (decl
)
9578 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9580 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9581 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9582 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9583 gimplify_assign (refv
, ref
, end
);
9584 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9585 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9586 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9587 tree d
= maybe_lookup_decl (decl
, ctx
);
9589 if (DECL_HAS_VALUE_EXPR_P (d
))
9590 oldv
= DECL_VALUE_EXPR (d
);
9591 if (omp_privatize_by_reference (var
))
9593 tree v
= fold_convert (TREE_TYPE (d
),
9594 build_fold_addr_expr (new_var
));
9595 SET_DECL_VALUE_EXPR (d
, v
);
9598 SET_DECL_VALUE_EXPR (d
, new_var
);
9599 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9600 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9602 SET_DECL_VALUE_EXPR (d
, oldv
);
9605 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9606 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9608 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9609 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9610 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9611 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9613 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9614 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9617 gimple_seq tseq
= NULL
;
9618 gimplify_stmt (&x
, &tseq
);
9619 gimple_seq_add_seq (end
, tseq
);
9624 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9625 ref
= unshare_expr (ref
);
9626 gimplify_assign (ref
, x
, end
);
9628 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9630 field
= DECL_CHAIN (bfield
);
9634 if (code
== OMP_TASKGROUP
)
9636 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9637 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9638 gimple_seq_add_stmt (start
, g
);
9643 if (code
== OMP_FOR
)
9644 c
= gimple_omp_for_clauses (ctx
->stmt
);
9645 else if (code
== OMP_SECTIONS
)
9646 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9647 else if (code
== OMP_SCOPE
)
9648 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9650 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9651 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9652 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9653 build_fold_addr_expr (avar
));
9654 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9657 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9658 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9660 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9661 gimple_seq_add_stmt (end
, g
);
9662 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9663 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9665 enum built_in_function bfn
9666 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9667 t
= builtin_decl_explicit (bfn
);
9668 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9672 arg
= create_tmp_var (c_bool_type
);
9673 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9677 arg
= build_int_cst (c_bool_type
, 0);
9678 g
= gimple_build_call (t
, 1, arg
);
9682 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9683 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9685 gimple_seq_add_stmt (end
, g
);
9687 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9688 t
= build_constructor (atype
, NULL
);
9689 TREE_THIS_VOLATILE (t
) = 1;
9690 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9693 /* Expand code for an OpenMP taskgroup directive. */
9696 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9698 gimple
*stmt
= gsi_stmt (*gsi_p
);
9701 gimple_seq dseq
= NULL
;
9702 tree block
= make_node (BLOCK
);
9704 bind
= gimple_build_bind (NULL
, NULL
, block
);
9705 gsi_replace (gsi_p
, bind
, true);
9706 gimple_bind_add_stmt (bind
, stmt
);
9708 push_gimplify_context ();
9710 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9712 gimple_bind_add_stmt (bind
, x
);
9714 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9715 gimple_omp_taskgroup_clauses (stmt
),
9716 gimple_bind_body_ptr (bind
), &dseq
);
9718 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9719 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9720 gimple_omp_set_body (stmt
, NULL
);
9722 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9723 gimple_bind_add_seq (bind
, dseq
);
9725 pop_gimplify_context (bind
);
9727 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9728 BLOCK_VARS (block
) = ctx
->block_vars
;
9732 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9735 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9738 struct omp_for_data fd
;
9739 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9742 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9743 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9744 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9748 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9749 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9750 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
9751 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
9753 /* Merge depend clauses from multiple adjacent
9754 #pragma omp ordered depend(sink:...) constructs
9755 into one #pragma omp ordered depend(sink:...), so that
9756 we can optimize them together. */
9757 gimple_stmt_iterator gsi
= *gsi_p
;
9759 while (!gsi_end_p (gsi
))
9761 gimple
*stmt
= gsi_stmt (gsi
);
9762 if (is_gimple_debug (stmt
)
9763 || gimple_code (stmt
) == GIMPLE_NOP
)
9768 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9770 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9771 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9773 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
9774 || OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9777 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9779 gsi_remove (&gsi
, true);
9783 /* Canonicalize sink dependence clauses into one folded clause if
9786 The basic algorithm is to create a sink vector whose first
9787 element is the GCD of all the first elements, and whose remaining
9788 elements are the minimum of the subsequent columns.
9790 We ignore dependence vectors whose first element is zero because
9791 such dependencies are known to be executed by the same thread.
9793 We take into account the direction of the loop, so a minimum
9794 becomes a maximum if the loop is iterating forwards. We also
9795 ignore sink clauses where the loop direction is unknown, or where
9796 the offsets are clearly invalid because they are not a multiple
9797 of the loop increment.
9801 #pragma omp for ordered(2)
9802 for (i=0; i < N; ++i)
9803 for (j=0; j < M; ++j)
9805 #pragma omp ordered \
9806 depend(sink:i-8,j-2) \
9807 depend(sink:i,j-1) \ // Completely ignored because i+0.
9808 depend(sink:i-4,j-3) \
9809 depend(sink:i-6,j-4)
9810 #pragma omp ordered depend(source)
9815 depend(sink:-gcd(8,4,6),-min(2,3,4))
9820 /* FIXME: Computing GCD's where the first element is zero is
9821 non-trivial in the presence of collapsed loops. Do this later. */
9822 if (fd
.collapse
> 1)
9825 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9827 /* wide_int is not a POD so it must be default-constructed. */
9828 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9829 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9831 tree folded_dep
= NULL_TREE
;
9832 /* TRUE if the first dimension's offset is negative. */
9833 bool neg_offset_p
= false;
9835 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9837 while ((c
= *list_p
) != NULL
)
9839 bool remove
= false;
9841 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
);
9842 if (OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9843 goto next_ordered_clause
;
9846 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9847 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9848 vec
= TREE_CHAIN (vec
), ++i
)
9850 gcc_assert (i
< len
);
9852 /* omp_extract_for_data has canonicalized the condition. */
9853 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9854 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9855 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9856 bool maybe_lexically_later
= true;
9858 /* While the committee makes up its mind, bail if we have any
9859 non-constant steps. */
9860 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9861 goto lower_omp_ordered_ret
;
9863 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9864 if (POINTER_TYPE_P (itype
))
9866 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9867 TYPE_PRECISION (itype
),
9870 /* Ignore invalid offsets that are not multiples of the step. */
9871 if (!wi::multiple_of_p (wi::abs (offset
),
9872 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9875 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9876 "ignoring sink clause with offset that is not "
9877 "a multiple of the loop step");
9879 goto next_ordered_clause
;
9882 /* Calculate the first dimension. The first dimension of
9883 the folded dependency vector is the GCD of the first
9884 elements, while ignoring any first elements whose offset
9888 /* Ignore dependence vectors whose first dimension is 0. */
9892 goto next_ordered_clause
;
9896 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9898 error_at (OMP_CLAUSE_LOCATION (c
),
9899 "first offset must be in opposite direction "
9900 "of loop iterations");
9901 goto lower_omp_ordered_ret
;
9905 neg_offset_p
= forward
;
9906 /* Initialize the first time around. */
9907 if (folded_dep
== NULL_TREE
)
9910 folded_deps
[0] = offset
;
9913 folded_deps
[0] = wi::gcd (folded_deps
[0],
9917 /* Calculate minimum for the remaining dimensions. */
9920 folded_deps
[len
+ i
- 1] = offset
;
9921 if (folded_dep
== c
)
9922 folded_deps
[i
] = offset
;
9923 else if (maybe_lexically_later
9924 && !wi::eq_p (folded_deps
[i
], offset
))
9926 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9930 for (j
= 1; j
<= i
; j
++)
9931 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9934 maybe_lexically_later
= false;
9938 gcc_assert (i
== len
);
9942 next_ordered_clause
:
9944 *list_p
= OMP_CLAUSE_CHAIN (c
);
9946 list_p
= &OMP_CLAUSE_CHAIN (c
);
9952 folded_deps
[0] = -folded_deps
[0];
9954 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9955 if (POINTER_TYPE_P (itype
))
9958 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9959 = wide_int_to_tree (itype
, folded_deps
[0]);
9960 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9961 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9964 lower_omp_ordered_ret
:
9966 /* Ordered without clauses is #pragma omp threads, while we want
9967 a nop instead if we remove all clauses. */
9968 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9969 gsi_replace (gsi_p
, gimple_build_nop (), true);
9973 /* Expand code for an OpenMP ordered directive. */
9976 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9979 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9980 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9983 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9985 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9988 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9989 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9990 OMP_CLAUSE_THREADS
);
9992 if (gimple_omp_ordered_standalone_p (ord_stmt
))
9994 /* FIXME: This is needs to be moved to the expansion to verify various
9995 conditions only testable on cfg with dominators computed, and also
9996 all the depend clauses to be merged still might need to be available
9997 for the runtime checks. */
9999 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
10003 push_gimplify_context ();
10005 block
= make_node (BLOCK
);
10006 bind
= gimple_build_bind (NULL
, NULL
, block
);
10007 gsi_replace (gsi_p
, bind
, true);
10008 gimple_bind_add_stmt (bind
, stmt
);
10012 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
10013 build_int_cst (NULL_TREE
, threads
));
10014 cfun
->has_simduid_loops
= true;
10017 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
10019 gimple_bind_add_stmt (bind
, x
);
10021 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
10024 counter
= create_tmp_var (integer_type_node
);
10025 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
10026 gimple_call_set_lhs (g
, counter
);
10027 gimple_bind_add_stmt (bind
, g
);
10029 body
= create_artificial_label (UNKNOWN_LOCATION
);
10030 test
= create_artificial_label (UNKNOWN_LOCATION
);
10031 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
10033 tree simt_pred
= create_tmp_var (integer_type_node
);
10034 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
10035 gimple_call_set_lhs (g
, simt_pred
);
10036 gimple_bind_add_stmt (bind
, g
);
10038 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
10039 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
10040 gimple_bind_add_stmt (bind
, g
);
10042 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
10044 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10045 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10046 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10047 gimple_omp_set_body (stmt
, NULL
);
10051 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
10052 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
10053 gimple_bind_add_stmt (bind
, g
);
10055 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
10056 tree nonneg
= create_tmp_var (integer_type_node
);
10057 gimple_seq tseq
= NULL
;
10058 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
10059 gimple_bind_add_seq (bind
, tseq
);
10061 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
10062 gimple_call_set_lhs (g
, nonneg
);
10063 gimple_bind_add_stmt (bind
, g
);
10065 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
10066 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
10067 gimple_bind_add_stmt (bind
, g
);
10069 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
10072 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
10073 build_int_cst (NULL_TREE
, threads
));
10075 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
10077 gimple_bind_add_stmt (bind
, x
);
10079 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10081 pop_gimplify_context (bind
);
10083 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10084 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10088 /* Expand code for an OpenMP scan directive and the structured block
10089 before the scan directive. */
10092 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10094 gimple
*stmt
= gsi_stmt (*gsi_p
);
10096 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
10097 tree lane
= NULL_TREE
;
10098 gimple_seq before
= NULL
;
10099 omp_context
*octx
= ctx
->outer
;
10101 if (octx
->scan_exclusive
&& !has_clauses
)
10103 gimple_stmt_iterator gsi2
= *gsi_p
;
10105 gimple
*stmt2
= gsi_stmt (gsi2
);
10106 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10107 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10108 the one with exclusive clause(s), comes first. */
10110 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10111 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10113 gsi_remove (gsi_p
, false);
10114 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10115 ctx
= maybe_lookup_ctx (stmt2
);
10117 lower_omp_scan (gsi_p
, ctx
);
10122 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10123 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10124 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10125 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10126 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10127 && !gimple_omp_for_combined_p (octx
->stmt
));
10128 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10129 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10132 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10133 OMP_CLAUSE__SIMDUID_
))
10135 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10136 lane
= create_tmp_var (unsigned_type_node
);
10137 tree t
= build_int_cst (integer_type_node
,
10139 : octx
->scan_inclusive
? 2 : 3);
10141 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10142 gimple_call_set_lhs (g
, lane
);
10143 gimple_seq_add_stmt (&before
, g
);
10146 if (is_simd
|| is_for
)
10148 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10149 c
; c
= OMP_CLAUSE_CHAIN (c
))
10150 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10151 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10153 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10154 tree var
= OMP_CLAUSE_DECL (c
);
10155 tree new_var
= lookup_decl (var
, octx
);
10156 tree val
= new_var
;
10157 tree var2
= NULL_TREE
;
10158 tree var3
= NULL_TREE
;
10159 tree var4
= NULL_TREE
;
10160 tree lane0
= NULL_TREE
;
10161 tree new_vard
= new_var
;
10162 if (omp_privatize_by_reference (var
))
10164 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10167 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10169 val
= DECL_VALUE_EXPR (new_vard
);
10170 if (new_vard
!= new_var
)
10172 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10173 val
= TREE_OPERAND (val
, 0);
10175 if (TREE_CODE (val
) == ARRAY_REF
10176 && VAR_P (TREE_OPERAND (val
, 0)))
10178 tree v
= TREE_OPERAND (val
, 0);
10179 if (lookup_attribute ("omp simd array",
10180 DECL_ATTRIBUTES (v
)))
10182 val
= unshare_expr (val
);
10183 lane0
= TREE_OPERAND (val
, 1);
10184 TREE_OPERAND (val
, 1) = lane
;
10185 var2
= lookup_decl (v
, octx
);
10186 if (octx
->scan_exclusive
)
10187 var4
= lookup_decl (var2
, octx
);
10189 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10190 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10193 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10194 var2
, lane
, NULL_TREE
, NULL_TREE
);
10195 TREE_THIS_NOTRAP (var2
) = 1;
10196 if (octx
->scan_exclusive
)
10198 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10199 var4
, lane
, NULL_TREE
,
10201 TREE_THIS_NOTRAP (var4
) = 1;
10212 var2
= build_outer_var_ref (var
, octx
);
10213 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10215 var3
= maybe_lookup_decl (new_vard
, octx
);
10216 if (var3
== new_vard
|| var3
== NULL_TREE
)
10218 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10220 var4
= maybe_lookup_decl (var3
, octx
);
10221 if (var4
== var3
|| var4
== NULL_TREE
)
10223 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10234 && octx
->scan_exclusive
10236 && var4
== NULL_TREE
)
10237 var4
= create_tmp_var (TREE_TYPE (val
));
10239 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10241 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10246 /* If we've added a separate identity element
10247 variable, copy it over into val. */
10248 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10250 gimplify_and_add (x
, &before
);
10252 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10254 /* Otherwise, assign to it the identity element. */
10255 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10257 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10258 tree ref
= build_outer_var_ref (var
, octx
);
10259 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10260 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10263 if (new_vard
!= new_var
)
10264 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10265 SET_DECL_VALUE_EXPR (new_vard
, val
);
10267 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10268 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10269 lower_omp (&tseq
, octx
);
10271 SET_DECL_VALUE_EXPR (new_vard
, x
);
10272 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10273 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10274 gimple_seq_add_seq (&before
, tseq
);
10276 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10282 if (octx
->scan_exclusive
)
10284 tree v4
= unshare_expr (var4
);
10285 tree v2
= unshare_expr (var2
);
10286 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10287 gimplify_and_add (x
, &before
);
10289 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10290 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10291 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10293 if (x
&& new_vard
!= new_var
)
10294 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10296 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10297 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10298 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10299 lower_omp (&tseq
, octx
);
10300 gimple_seq_add_seq (&before
, tseq
);
10301 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10303 SET_DECL_VALUE_EXPR (new_vard
, x
);
10304 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10305 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10306 if (octx
->scan_inclusive
)
10308 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10310 gimplify_and_add (x
, &before
);
10312 else if (lane0
== NULL_TREE
)
10314 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10316 gimplify_and_add (x
, &before
);
10324 /* input phase. Set val to initializer before
10326 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10327 gimplify_assign (val
, x
, &before
);
10332 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10333 if (code
== MINUS_EXPR
)
10336 tree x
= build2 (code
, TREE_TYPE (var2
),
10337 unshare_expr (var2
), unshare_expr (val
));
10338 if (octx
->scan_inclusive
)
10340 gimplify_assign (unshare_expr (var2
), x
, &before
);
10341 gimplify_assign (val
, var2
, &before
);
10345 gimplify_assign (unshare_expr (var4
),
10346 unshare_expr (var2
), &before
);
10347 gimplify_assign (var2
, x
, &before
);
10348 if (lane0
== NULL_TREE
)
10349 gimplify_assign (val
, var4
, &before
);
10353 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10355 tree vexpr
= unshare_expr (var4
);
10356 TREE_OPERAND (vexpr
, 1) = lane0
;
10357 if (new_vard
!= new_var
)
10358 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10359 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10363 if (is_simd
&& !is_for_simd
)
10365 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10366 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10367 gsi_replace (gsi_p
, gimple_build_nop (), true);
10370 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10373 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (stmt
));
10374 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10379 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10380 substitution of a couple of function calls. But in the NAMED case,
10381 requires that languages coordinate a symbol name. It is therefore
10382 best put here in common code. */
10384 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10387 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10390 tree name
, lock
, unlock
;
10391 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10393 location_t loc
= gimple_location (stmt
);
10396 name
= gimple_omp_critical_name (stmt
);
10401 if (!critical_name_mutexes
)
10402 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10404 tree
*n
= critical_name_mutexes
->get (name
);
10409 decl
= create_tmp_var_raw (ptr_type_node
);
10411 new_str
= ACONCAT ((".gomp_critical_user_",
10412 IDENTIFIER_POINTER (name
), NULL
));
10413 DECL_NAME (decl
) = get_identifier (new_str
);
10414 TREE_PUBLIC (decl
) = 1;
10415 TREE_STATIC (decl
) = 1;
10416 DECL_COMMON (decl
) = 1;
10417 DECL_ARTIFICIAL (decl
) = 1;
10418 DECL_IGNORED_P (decl
) = 1;
10420 varpool_node::finalize_decl (decl
);
10422 critical_name_mutexes
->put (name
, decl
);
10427 /* If '#pragma omp critical' is inside offloaded region or
10428 inside function marked as offloadable, the symbol must be
10429 marked as offloadable too. */
10431 if (cgraph_node::get (current_function_decl
)->offloadable
)
10432 varpool_node::get_create (decl
)->offloadable
= 1;
10434 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10435 if (is_gimple_omp_offloaded (octx
->stmt
))
10437 varpool_node::get_create (decl
)->offloadable
= 1;
10441 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10442 lock
= build_call_expr_loc (loc
, lock
, 1,
10443 build_fold_addr_expr_loc (loc
, decl
));
10445 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10446 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10447 build_fold_addr_expr_loc (loc
, decl
));
10451 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10452 lock
= build_call_expr_loc (loc
, lock
, 0);
10454 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10455 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10458 push_gimplify_context ();
10460 block
= make_node (BLOCK
);
10461 bind
= gimple_build_bind (NULL
, NULL
, block
);
10462 gsi_replace (gsi_p
, bind
, true);
10463 gimple_bind_add_stmt (bind
, stmt
);
10465 tbody
= gimple_bind_body (bind
);
10466 gimplify_and_add (lock
, &tbody
);
10467 gimple_bind_set_body (bind
, tbody
);
10469 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10470 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10471 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10472 gimple_omp_set_body (stmt
, NULL
);
10474 tbody
= gimple_bind_body (bind
);
10475 gimplify_and_add (unlock
, &tbody
);
10476 gimple_bind_set_body (bind
, tbody
);
10478 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10480 pop_gimplify_context (bind
);
10481 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10482 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10485 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10486 for a lastprivate clause. Given a loop control predicate of (V
10487 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10488 is appended to *DLIST, iterator initialization is appended to
10489 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10490 to be emitted in a critical section. */
10493 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10494 gimple_seq
*dlist
, gimple_seq
*clist
,
10495 struct omp_context
*ctx
)
10497 tree clauses
, cond
, vinit
;
10498 enum tree_code cond_code
;
10501 cond_code
= fd
->loop
.cond_code
;
10502 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10504 /* When possible, use a strict equality expression. This can let VRP
10505 type optimizations deduce the value and remove a copy. */
10506 if (tree_fits_shwi_p (fd
->loop
.step
))
10508 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10509 if (step
== 1 || step
== -1)
10510 cond_code
= EQ_EXPR
;
10513 tree n2
= fd
->loop
.n2
;
10514 if (fd
->collapse
> 1
10515 && TREE_CODE (n2
) != INTEGER_CST
10516 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10518 struct omp_context
*taskreg_ctx
= NULL
;
10519 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10521 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10522 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10523 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10525 if (gimple_omp_for_combined_into_p (gfor
))
10527 gcc_assert (ctx
->outer
->outer
10528 && is_parallel_ctx (ctx
->outer
->outer
));
10529 taskreg_ctx
= ctx
->outer
->outer
;
10533 struct omp_for_data outer_fd
;
10534 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10535 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10538 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10539 taskreg_ctx
= ctx
->outer
->outer
;
10541 else if (is_taskreg_ctx (ctx
->outer
))
10542 taskreg_ctx
= ctx
->outer
;
10546 tree taskreg_clauses
10547 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10548 tree innerc
= omp_find_clause (taskreg_clauses
,
10549 OMP_CLAUSE__LOOPTEMP_
);
10550 gcc_assert (innerc
);
10551 int count
= fd
->collapse
;
10553 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10554 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10555 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10557 for (i
= 0; i
< count
; i
++)
10559 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10560 OMP_CLAUSE__LOOPTEMP_
);
10561 gcc_assert (innerc
);
10563 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10564 OMP_CLAUSE__LOOPTEMP_
);
10566 n2
= fold_convert (TREE_TYPE (n2
),
10567 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10571 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10573 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10575 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10576 if (!gimple_seq_empty_p (stmts
))
10578 gimple_seq_add_seq (&stmts
, *dlist
);
10581 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10582 vinit
= fd
->loop
.n1
;
10583 if (cond_code
== EQ_EXPR
10584 && tree_fits_shwi_p (fd
->loop
.n2
)
10585 && ! integer_zerop (fd
->loop
.n2
))
10586 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10588 vinit
= unshare_expr (vinit
);
10590 /* Initialize the iterator variable, so that threads that don't execute
10591 any iterations don't execute the lastprivate clauses by accident. */
10592 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10596 /* OpenACC privatization.
10598 Or, in other words, *sharing* at the respective OpenACC level of
10601 From a correctness perspective, a non-addressable variable can't be accessed
10602 outside the current thread, so it can go in a (faster than shared memory)
10603 register -- though that register may need to be broadcast in some
10604 circumstances. A variable can only meaningfully be "shared" across workers
10605 or vector lanes if its address is taken, e.g. by a call to an atomic
10608 From an optimisation perspective, the answer might be fuzzier: maybe
10609 sometimes, using shared memory directly would be faster than
10613 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10614 const location_t loc
, const tree c
,
10617 const dump_user_location_t d_u_loc
10618 = dump_user_location_t::from_location_t (loc
);
10619 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10621 # pragma GCC diagnostic push
10622 # pragma GCC diagnostic ignored "-Wformat"
10624 dump_printf_loc (l_dump_flags
, d_u_loc
,
10625 "variable %<%T%> ", decl
);
10627 # pragma GCC diagnostic pop
10630 dump_printf (l_dump_flags
,
10632 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10634 dump_printf (l_dump_flags
,
10635 "declared in block ");
10639 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10642 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10644 /* There is some differentiation depending on block vs. clause. */
10649 if (res
&& !VAR_P (decl
))
10651 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10652 privatized into a new VAR_DECL. */
10653 gcc_checking_assert (TREE_CODE (decl
) != PARM_DECL
);
10657 if (dump_enabled_p ())
10659 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10660 dump_printf (l_dump_flags
,
10661 "potentially has improper OpenACC privatization level: %qs\n",
10662 get_tree_code_name (TREE_CODE (decl
)));
10666 if (res
&& block
&& TREE_STATIC (decl
))
10670 if (dump_enabled_p ())
10672 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10673 dump_printf (l_dump_flags
,
10674 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10679 if (res
&& block
&& DECL_EXTERNAL (decl
))
10683 if (dump_enabled_p ())
10685 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10686 dump_printf (l_dump_flags
,
10687 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10692 if (res
&& !TREE_ADDRESSABLE (decl
))
10696 if (dump_enabled_p ())
10698 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10699 dump_printf (l_dump_flags
,
10700 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10701 "not addressable");
10707 if (dump_enabled_p ())
10709 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10710 dump_printf (l_dump_flags
,
10711 "is candidate for adjusting OpenACC privatization level\n");
10715 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10717 print_generic_decl (dump_file
, decl
, dump_flags
);
10718 fprintf (dump_file
, "\n");
10724 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10728 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10730 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10731 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10733 tree decl
= OMP_CLAUSE_DECL (c
);
10735 tree new_decl
= lookup_decl (decl
, ctx
);
10737 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
,
10741 gcc_checking_assert
10742 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10743 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10747 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10751 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10753 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10755 tree new_decl
= lookup_decl (decl
, ctx
);
10756 gcc_checking_assert (new_decl
== decl
);
10758 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
,
10762 gcc_checking_assert
10763 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10764 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10768 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10771 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10772 struct walk_stmt_info
*wi
)
10774 gimple
*stmt
= gsi_stmt (*gsi_p
);
10776 *handled_ops_p
= true;
10777 switch (gimple_code (stmt
))
10781 case GIMPLE_OMP_FOR
:
10782 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10783 && gimple_omp_for_combined_into_p (stmt
))
10784 *handled_ops_p
= false;
10787 case GIMPLE_OMP_SCAN
:
10788 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10789 return integer_zero_node
;
10796 /* Helper function for lower_omp_for, add transformations for a worksharing
10797 loop with scan directives inside of it.
10798 For worksharing loop not combined with simd, transform:
10799 #pragma omp for reduction(inscan,+:r) private(i)
10800 for (i = 0; i < n; i = i + 1)
10805 #pragma omp scan inclusive(r)
10811 into two worksharing loops + code to merge results:
10813 num_threads = omp_get_num_threads ();
10814 thread_num = omp_get_thread_num ();
10815 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10820 // For UDRs this is UDR init, or if ctors are needed, copy from
10821 // var3 that has been constructed to contain the neutral element.
10825 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10826 // a shared array with num_threads elements and rprivb to a local array
10827 // number of elements equal to the number of (contiguous) iterations the
10828 // current thread will perform. controlb and controlp variables are
10829 // temporaries to handle deallocation of rprivb at the end of second
10831 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10832 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10833 for (i = 0; i < n; i = i + 1)
10836 // For UDRs this is UDR init or copy from var3.
10838 // This is the input phase from user code.
10842 // For UDRs this is UDR merge.
10844 // Rather than handing it over to the user, save to local thread's
10846 rprivb[ivar] = var2;
10847 // For exclusive scan, the above two statements are swapped.
10851 // And remember the final value from this thread's into the shared
10853 rpriva[(sizetype) thread_num] = var2;
10854 // If more than one thread, compute using Work-Efficient prefix sum
10855 // the inclusive parallel scan of the rpriva array.
10856 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10861 num_threadsu = (unsigned int) num_threads;
10862 thread_numup1 = (unsigned int) thread_num + 1;
10865 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10869 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10874 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10875 mul = REALPART_EXPR <cplx>;
10876 ovf = IMAGPART_EXPR <cplx>;
10877 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10880 andvm1 = andv + 4294967295;
10882 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10884 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10885 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10886 rpriva[l] = rpriva[l - k] + rpriva[l];
10888 if (down == 0) goto <D.2121>; else goto <D.2122>;
10896 if (k != 0) goto <D.2108>; else goto <D.2103>;
10898 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10900 // For UDRs this is UDR init or copy from var3.
10904 var2 = rpriva[thread_num - 1];
10907 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10908 reduction(inscan,+:r) private(i)
10909 for (i = 0; i < n; i = i + 1)
10912 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10913 r = var2 + rprivb[ivar];
10916 // This is the scan phase from user code.
10918 // Plus a bump of the iterator.
10924 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10925 struct omp_for_data
*fd
, omp_context
*ctx
)
10927 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10928 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10930 gimple_seq body
= gimple_omp_body (stmt
);
10931 gimple_stmt_iterator input1_gsi
= gsi_none ();
10932 struct walk_stmt_info wi
;
10933 memset (&wi
, 0, sizeof (wi
));
10934 wi
.val_only
= true;
10935 wi
.info
= (void *) &input1_gsi
;
10936 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10937 gcc_assert (!gsi_end_p (input1_gsi
));
10939 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10940 gimple_stmt_iterator gsi
= input1_gsi
;
10942 gimple_stmt_iterator scan1_gsi
= gsi
;
10943 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10944 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10946 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10947 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10948 gimple_omp_set_body (input_stmt1
, NULL
);
10949 gimple_omp_set_body (scan_stmt1
, NULL
);
10950 gimple_omp_set_body (stmt
, NULL
);
10952 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10953 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10954 gimple_omp_set_body (stmt
, body
);
10955 gimple_omp_set_body (input_stmt1
, input_body
);
10957 gimple_stmt_iterator input2_gsi
= gsi_none ();
10958 memset (&wi
, 0, sizeof (wi
));
10959 wi
.val_only
= true;
10960 wi
.info
= (void *) &input2_gsi
;
10961 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10962 gcc_assert (!gsi_end_p (input2_gsi
));
10964 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10967 gimple_stmt_iterator scan2_gsi
= gsi
;
10968 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10969 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10970 gimple_omp_set_body (scan_stmt2
, scan_body
);
10972 gimple_stmt_iterator input3_gsi
= gsi_none ();
10973 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10974 gimple_stmt_iterator input4_gsi
= gsi_none ();
10975 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10976 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10977 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10978 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10981 memset (&wi
, 0, sizeof (wi
));
10982 wi
.val_only
= true;
10983 wi
.info
= (void *) &input3_gsi
;
10984 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10985 gcc_assert (!gsi_end_p (input3_gsi
));
10987 input_stmt3
= gsi_stmt (input3_gsi
);
10991 scan_stmt3
= gsi_stmt (gsi
);
10992 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10994 memset (&wi
, 0, sizeof (wi
));
10995 wi
.val_only
= true;
10996 wi
.info
= (void *) &input4_gsi
;
10997 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10998 gcc_assert (!gsi_end_p (input4_gsi
));
11000 input_stmt4
= gsi_stmt (input4_gsi
);
11004 scan_stmt4
= gsi_stmt (gsi
);
11005 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
11007 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
11008 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
11011 tree num_threads
= create_tmp_var (integer_type_node
);
11012 tree thread_num
= create_tmp_var (integer_type_node
);
11013 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
11014 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
11015 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
11016 gimple_call_set_lhs (g
, num_threads
);
11017 gimple_seq_add_stmt (body_p
, g
);
11018 g
= gimple_build_call (threadnum_decl
, 0);
11019 gimple_call_set_lhs (g
, thread_num
);
11020 gimple_seq_add_stmt (body_p
, g
);
11022 tree ivar
= create_tmp_var (sizetype
);
11023 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
11024 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
11025 tree k
= create_tmp_var (unsigned_type_node
);
11026 tree l
= create_tmp_var (unsigned_type_node
);
11028 gimple_seq clist
= NULL
, mdlist
= NULL
;
11029 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
11030 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
11031 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
11032 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
11033 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11034 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11035 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
11037 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11038 tree var
= OMP_CLAUSE_DECL (c
);
11039 tree new_var
= lookup_decl (var
, ctx
);
11040 tree var3
= NULL_TREE
;
11041 tree new_vard
= new_var
;
11042 if (omp_privatize_by_reference (var
))
11043 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
11044 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11046 var3
= maybe_lookup_decl (new_vard
, ctx
);
11047 if (var3
== new_vard
)
11051 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
11052 tree rpriva
= create_tmp_var (ptype
);
11053 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11054 OMP_CLAUSE_DECL (nc
) = rpriva
;
11056 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11058 tree rprivb
= create_tmp_var (ptype
);
11059 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11060 OMP_CLAUSE_DECL (nc
) = rprivb
;
11061 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
11063 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11065 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
11066 if (new_vard
!= new_var
)
11067 TREE_ADDRESSABLE (var2
) = 1;
11068 gimple_add_tmp_var (var2
);
11070 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
11071 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11072 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11073 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11074 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11076 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
11077 thread_num
, integer_minus_one_node
);
11078 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11079 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11080 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11081 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11082 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11084 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
11085 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11086 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11087 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11088 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11090 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
11091 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11092 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11093 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11094 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11095 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11097 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
11098 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11099 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
11100 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11102 tree var4
= is_for_simd
? new_var
: var2
;
11103 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
11106 var5
= lookup_decl (var
, input_simd_ctx
);
11107 var6
= lookup_decl (var
, scan_simd_ctx
);
11108 if (new_vard
!= new_var
)
11110 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11111 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11114 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11116 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11119 x
= lang_hooks
.decls
.omp_clause_default_ctor
11120 (c
, var2
, build_outer_var_ref (var
, ctx
));
11122 gimplify_and_add (x
, &clist
);
11124 x
= build_outer_var_ref (var
, ctx
);
11125 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11127 gimplify_and_add (x
, &thr01_list
);
11129 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11130 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11133 x
= unshare_expr (var4
);
11134 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11135 gimplify_and_add (x
, &thrn1_list
);
11136 x
= unshare_expr (var4
);
11137 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11138 gimplify_and_add (x
, &thr02_list
);
11140 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11142 /* Otherwise, assign to it the identity element. */
11143 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11144 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11147 if (new_vard
!= new_var
)
11148 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11149 SET_DECL_VALUE_EXPR (new_vard
, val
);
11150 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11152 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11153 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11154 lower_omp (&tseq
, ctx
);
11155 gimple_seq_add_seq (&thrn1_list
, tseq
);
11156 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11157 lower_omp (&tseq
, ctx
);
11158 gimple_seq_add_seq (&thr02_list
, tseq
);
11159 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11160 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11161 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11163 SET_DECL_VALUE_EXPR (new_vard
, y
);
11166 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11167 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11171 x
= unshare_expr (var4
);
11172 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11173 gimplify_and_add (x
, &thrn2_list
);
11177 x
= unshare_expr (rprivb_ref
);
11178 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11179 gimplify_and_add (x
, &scan1_list
);
11183 if (ctx
->scan_exclusive
)
11185 x
= unshare_expr (rprivb_ref
);
11186 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11187 gimplify_and_add (x
, &scan1_list
);
11190 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11191 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11192 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11193 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11194 lower_omp (&tseq
, ctx
);
11195 gimple_seq_add_seq (&scan1_list
, tseq
);
11197 if (ctx
->scan_inclusive
)
11199 x
= unshare_expr (rprivb_ref
);
11200 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11201 gimplify_and_add (x
, &scan1_list
);
11205 x
= unshare_expr (rpriva_ref
);
11206 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11207 unshare_expr (var4
));
11208 gimplify_and_add (x
, &mdlist
);
11210 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11211 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11212 gimplify_and_add (x
, &input2_list
);
11215 if (new_vard
!= new_var
)
11216 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11218 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11219 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11220 SET_DECL_VALUE_EXPR (new_vard
, val
);
11221 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11224 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11225 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11228 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11229 lower_omp (&tseq
, ctx
);
11231 SET_DECL_VALUE_EXPR (new_vard
, y
);
11234 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11235 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11239 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11240 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11241 lower_omp (&tseq
, ctx
);
11243 gimple_seq_add_seq (&input2_list
, tseq
);
11245 x
= build_outer_var_ref (var
, ctx
);
11246 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11247 gimplify_and_add (x
, &last_list
);
11249 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11250 gimplify_and_add (x
, &reduc_list
);
11251 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11252 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11254 if (new_vard
!= new_var
)
11255 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11256 SET_DECL_VALUE_EXPR (new_vard
, val
);
11257 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11258 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11259 lower_omp (&tseq
, ctx
);
11260 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11261 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11262 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11264 SET_DECL_VALUE_EXPR (new_vard
, y
);
11267 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11268 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11270 gimple_seq_add_seq (&reduc_list
, tseq
);
11271 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11272 gimplify_and_add (x
, &reduc_list
);
11274 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11276 gimplify_and_add (x
, dlist
);
11280 x
= build_outer_var_ref (var
, ctx
);
11281 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11283 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11284 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11286 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11288 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11290 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11291 if (code
== MINUS_EXPR
)
11295 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11298 if (ctx
->scan_exclusive
)
11299 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11301 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11302 gimplify_assign (var2
, x
, &scan1_list
);
11303 if (ctx
->scan_inclusive
)
11304 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11308 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11311 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11312 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11314 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11317 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11318 unshare_expr (rprival_ref
));
11319 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11323 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11324 gimple_seq_add_stmt (&scan1_list
, g
);
11325 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11326 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11327 ? scan_stmt4
: scan_stmt2
), g
);
11329 tree controlb
= create_tmp_var (boolean_type_node
);
11330 tree controlp
= create_tmp_var (ptr_type_node
);
11331 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11332 OMP_CLAUSE_DECL (nc
) = controlb
;
11333 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11335 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11336 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11337 OMP_CLAUSE_DECL (nc
) = controlp
;
11338 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11340 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11341 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11342 OMP_CLAUSE_DECL (nc
) = controlb
;
11343 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11345 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11346 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11347 OMP_CLAUSE_DECL (nc
) = controlp
;
11348 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11350 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11352 *cp1
= gimple_omp_for_clauses (stmt
);
11353 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11354 *cp2
= gimple_omp_for_clauses (new_stmt
);
11355 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11359 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11360 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11362 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11364 gsi_remove (&input3_gsi
, true);
11365 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11367 gsi_remove (&scan3_gsi
, true);
11368 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11370 gsi_remove (&input4_gsi
, true);
11371 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11373 gsi_remove (&scan4_gsi
, true);
11377 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11378 gimple_omp_set_body (input_stmt2
, input2_list
);
11381 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11383 gsi_remove (&input1_gsi
, true);
11384 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11386 gsi_remove (&scan1_gsi
, true);
11387 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11389 gsi_remove (&input2_gsi
, true);
11390 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11392 gsi_remove (&scan2_gsi
, true);
11394 gimple_seq_add_seq (body_p
, clist
);
11396 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11397 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11398 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11399 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11400 gimple_seq_add_stmt (body_p
, g
);
11401 g
= gimple_build_label (lab1
);
11402 gimple_seq_add_stmt (body_p
, g
);
11403 gimple_seq_add_seq (body_p
, thr01_list
);
11404 g
= gimple_build_goto (lab3
);
11405 gimple_seq_add_stmt (body_p
, g
);
11406 g
= gimple_build_label (lab2
);
11407 gimple_seq_add_stmt (body_p
, g
);
11408 gimple_seq_add_seq (body_p
, thrn1_list
);
11409 g
= gimple_build_label (lab3
);
11410 gimple_seq_add_stmt (body_p
, g
);
11412 g
= gimple_build_assign (ivar
, size_zero_node
);
11413 gimple_seq_add_stmt (body_p
, g
);
11415 gimple_seq_add_stmt (body_p
, stmt
);
11416 gimple_seq_add_seq (body_p
, body
);
11417 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11420 g
= gimple_build_omp_return (true);
11421 gimple_seq_add_stmt (body_p
, g
);
11422 gimple_seq_add_seq (body_p
, mdlist
);
11424 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11425 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11426 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11427 gimple_seq_add_stmt (body_p
, g
);
11428 g
= gimple_build_label (lab1
);
11429 gimple_seq_add_stmt (body_p
, g
);
11431 g
= omp_build_barrier (NULL
);
11432 gimple_seq_add_stmt (body_p
, g
);
11434 tree down
= create_tmp_var (unsigned_type_node
);
11435 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11436 gimple_seq_add_stmt (body_p
, g
);
11438 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11439 gimple_seq_add_stmt (body_p
, g
);
11441 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11442 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11443 gimple_seq_add_stmt (body_p
, g
);
11445 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11446 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11447 gimple_seq_add_stmt (body_p
, g
);
11449 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11450 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11451 build_int_cst (unsigned_type_node
, 1));
11452 gimple_seq_add_stmt (body_p
, g
);
11454 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11455 g
= gimple_build_label (lab3
);
11456 gimple_seq_add_stmt (body_p
, g
);
11458 tree twok
= create_tmp_var (unsigned_type_node
);
11459 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11460 gimple_seq_add_stmt (body_p
, g
);
11462 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11463 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11464 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11465 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11466 gimple_seq_add_stmt (body_p
, g
);
11467 g
= gimple_build_label (lab4
);
11468 gimple_seq_add_stmt (body_p
, g
);
11469 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11470 gimple_seq_add_stmt (body_p
, g
);
11471 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11472 gimple_seq_add_stmt (body_p
, g
);
11474 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11475 gimple_seq_add_stmt (body_p
, g
);
11476 g
= gimple_build_label (lab6
);
11477 gimple_seq_add_stmt (body_p
, g
);
11479 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11480 gimple_seq_add_stmt (body_p
, g
);
11482 g
= gimple_build_label (lab5
);
11483 gimple_seq_add_stmt (body_p
, g
);
11485 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11486 gimple_seq_add_stmt (body_p
, g
);
11488 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11489 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11490 gimple_call_set_lhs (g
, cplx
);
11491 gimple_seq_add_stmt (body_p
, g
);
11492 tree mul
= create_tmp_var (unsigned_type_node
);
11493 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11494 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11495 gimple_seq_add_stmt (body_p
, g
);
11496 tree ovf
= create_tmp_var (unsigned_type_node
);
11497 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11498 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11499 gimple_seq_add_stmt (body_p
, g
);
11501 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11502 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11503 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11505 gimple_seq_add_stmt (body_p
, g
);
11506 g
= gimple_build_label (lab7
);
11507 gimple_seq_add_stmt (body_p
, g
);
11509 tree andv
= create_tmp_var (unsigned_type_node
);
11510 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11511 gimple_seq_add_stmt (body_p
, g
);
11512 tree andvm1
= create_tmp_var (unsigned_type_node
);
11513 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11514 build_minus_one_cst (unsigned_type_node
));
11515 gimple_seq_add_stmt (body_p
, g
);
11517 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11518 gimple_seq_add_stmt (body_p
, g
);
11520 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11521 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11522 gimple_seq_add_stmt (body_p
, g
);
11523 g
= gimple_build_label (lab9
);
11524 gimple_seq_add_stmt (body_p
, g
);
11525 gimple_seq_add_seq (body_p
, reduc_list
);
11526 g
= gimple_build_label (lab8
);
11527 gimple_seq_add_stmt (body_p
, g
);
11529 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11530 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11531 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11532 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11534 gimple_seq_add_stmt (body_p
, g
);
11535 g
= gimple_build_label (lab10
);
11536 gimple_seq_add_stmt (body_p
, g
);
11537 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11538 gimple_seq_add_stmt (body_p
, g
);
11539 g
= gimple_build_goto (lab12
);
11540 gimple_seq_add_stmt (body_p
, g
);
11541 g
= gimple_build_label (lab11
);
11542 gimple_seq_add_stmt (body_p
, g
);
11543 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11544 gimple_seq_add_stmt (body_p
, g
);
11545 g
= gimple_build_label (lab12
);
11546 gimple_seq_add_stmt (body_p
, g
);
11548 g
= omp_build_barrier (NULL
);
11549 gimple_seq_add_stmt (body_p
, g
);
11551 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11553 gimple_seq_add_stmt (body_p
, g
);
11555 g
= gimple_build_label (lab2
);
11556 gimple_seq_add_stmt (body_p
, g
);
11558 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11559 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11560 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11561 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11562 gimple_seq_add_stmt (body_p
, g
);
11563 g
= gimple_build_label (lab1
);
11564 gimple_seq_add_stmt (body_p
, g
);
11565 gimple_seq_add_seq (body_p
, thr02_list
);
11566 g
= gimple_build_goto (lab3
);
11567 gimple_seq_add_stmt (body_p
, g
);
11568 g
= gimple_build_label (lab2
);
11569 gimple_seq_add_stmt (body_p
, g
);
11570 gimple_seq_add_seq (body_p
, thrn2_list
);
11571 g
= gimple_build_label (lab3
);
11572 gimple_seq_add_stmt (body_p
, g
);
11574 g
= gimple_build_assign (ivar
, size_zero_node
);
11575 gimple_seq_add_stmt (body_p
, g
);
11576 gimple_seq_add_stmt (body_p
, new_stmt
);
11577 gimple_seq_add_seq (body_p
, new_body
);
11579 gimple_seq new_dlist
= NULL
;
11580 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11581 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11582 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11583 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11584 integer_minus_one_node
);
11585 gimple_seq_add_stmt (&new_dlist
, g
);
11586 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11587 gimple_seq_add_stmt (&new_dlist
, g
);
11588 g
= gimple_build_label (lab1
);
11589 gimple_seq_add_stmt (&new_dlist
, g
);
11590 gimple_seq_add_seq (&new_dlist
, last_list
);
11591 g
= gimple_build_label (lab2
);
11592 gimple_seq_add_stmt (&new_dlist
, g
);
11593 gimple_seq_add_seq (&new_dlist
, *dlist
);
11594 *dlist
= new_dlist
;
11597 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11598 the addresses of variables to be made private at the surrounding
11599 parallelism level. Such functions appear in the gimple code stream in two
11600 forms, e.g. for a partitioned loop:
11602 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11603 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11604 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11605 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11607 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11608 not as part of a HEAD_MARK sequence:
11610 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11612 For such stand-alone appearances, the 3rd argument is always 0, denoting
11613 gang partitioning. */
11616 lower_oacc_private_marker (omp_context
*ctx
)
11618 if (ctx
->oacc_privatization_candidates
.length () == 0)
11621 auto_vec
<tree
, 5> args
;
11623 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11624 args
.quick_push (integer_zero_node
);
11625 args
.quick_push (integer_minus_one_node
);
11629 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11631 gcc_checking_assert (TREE_ADDRESSABLE (decl
));
11632 tree addr
= build_fold_addr_expr (decl
);
11633 args
.safe_push (addr
);
11636 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11639 /* Lower code for an OMP loop directive. */
11642 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11644 tree
*rhs_p
, block
;
11645 struct omp_for_data fd
, *fdp
= NULL
;
11646 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11648 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11649 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11650 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11653 push_gimplify_context ();
11655 if (is_gimple_omp_oacc (ctx
->stmt
))
11656 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11658 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11660 block
= make_node (BLOCK
);
11661 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11662 /* Replace at gsi right away, so that 'stmt' is no member
11663 of a sequence anymore as we're going to add to a different
11665 gsi_replace (gsi_p
, new_stmt
, true);
11667 /* Move declaration of temporaries in the loop body before we make
11669 omp_for_body
= gimple_omp_body (stmt
);
11670 if (!gimple_seq_empty_p (omp_for_body
)
11671 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11674 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11675 tree vars
= gimple_bind_vars (inner_bind
);
11676 if (is_gimple_omp_oacc (ctx
->stmt
))
11677 oacc_privatization_scan_decl_chain (ctx
, vars
);
11678 gimple_bind_append_vars (new_stmt
, vars
);
11679 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11680 keep them on the inner_bind and it's block. */
11681 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11682 if (gimple_bind_block (inner_bind
))
11683 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11686 if (gimple_omp_for_combined_into_p (stmt
))
11688 omp_extract_for_data (stmt
, &fd
, NULL
);
11691 /* We need two temporaries with fd.loop.v type (istart/iend)
11692 and then (fd.collapse - 1) temporaries with the same
11693 type for count2 ... countN-1 vars if not constant. */
11695 tree type
= fd
.iter_type
;
11696 if (fd
.collapse
> 1
11697 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11698 count
+= fd
.collapse
- 1;
11700 tree type2
= NULL_TREE
;
11702 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11703 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11704 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11706 tree clauses
= *pc
;
11707 if (fd
.collapse
> 1
11709 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11710 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11711 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11712 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11714 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11715 type2
= TREE_TYPE (v
);
11721 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11722 OMP_CLAUSE__LOOPTEMP_
);
11723 if (ctx
->simt_stmt
)
11724 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11725 OMP_CLAUSE__LOOPTEMP_
);
11726 for (i
= 0; i
< count
+ count2
; i
++)
11731 gcc_assert (outerc
);
11732 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11733 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11734 OMP_CLAUSE__LOOPTEMP_
);
11738 /* If there are 2 adjacent SIMD stmts, one with _simt_
11739 clause, another without, make sure they have the same
11740 decls in _looptemp_ clauses, because the outer stmt
11741 they are combined into will look up just one inner_stmt. */
11742 if (ctx
->simt_stmt
)
11743 temp
= OMP_CLAUSE_DECL (simtc
);
11745 temp
= create_tmp_var (i
>= count
? type2
: type
);
11746 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11748 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11749 OMP_CLAUSE_DECL (*pc
) = temp
;
11750 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11751 if (ctx
->simt_stmt
)
11752 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11753 OMP_CLAUSE__LOOPTEMP_
);
11758 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11762 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11763 OMP_CLAUSE_REDUCTION
);
11764 tree rtmp
= NULL_TREE
;
11767 tree type
= build_pointer_type (pointer_sized_int_node
);
11768 tree temp
= create_tmp_var (type
);
11769 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11770 OMP_CLAUSE_DECL (c
) = temp
;
11771 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11772 gimple_omp_for_set_clauses (stmt
, c
);
11773 lower_omp_task_reductions (ctx
, OMP_FOR
,
11774 gimple_omp_for_clauses (stmt
),
11775 &tred_ilist
, &tred_dlist
);
11777 rtmp
= make_ssa_name (type
);
11778 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11781 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11784 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11786 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11787 gimple_omp_for_pre_body (stmt
));
11789 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11791 gcall
*private_marker
= NULL
;
11792 if (is_gimple_omp_oacc (ctx
->stmt
)
11793 && !gimple_seq_empty_p (omp_for_body
))
11794 private_marker
= lower_oacc_private_marker (ctx
);
11796 /* Lower the header expressions. At this point, we can assume that
11797 the header is of the form:
11799 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11801 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11802 using the .omp_data_s mapping, if needed. */
11803 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11805 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11806 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11808 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11809 TREE_VEC_ELT (*rhs_p
, 1)
11810 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11811 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11812 TREE_VEC_ELT (*rhs_p
, 2)
11813 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11815 else if (!is_gimple_min_invariant (*rhs_p
))
11816 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11817 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11818 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11820 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11821 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11823 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11824 TREE_VEC_ELT (*rhs_p
, 1)
11825 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11826 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11827 TREE_VEC_ELT (*rhs_p
, 2)
11828 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11830 else if (!is_gimple_min_invariant (*rhs_p
))
11831 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11832 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11833 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11835 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11836 if (!is_gimple_min_invariant (*rhs_p
))
11837 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11840 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11842 gimple_seq_add_seq (&body
, cnt_list
);
11844 /* Once lowered, extract the bounds and clauses. */
11845 omp_extract_for_data (stmt
, &fd
, NULL
);
11847 if (is_gimple_omp_oacc (ctx
->stmt
)
11848 && !ctx_in_oacc_kernels_region (ctx
))
11849 lower_oacc_head_tail (gimple_location (stmt
),
11850 gimple_omp_for_clauses (stmt
), private_marker
,
11851 &oacc_head
, &oacc_tail
, ctx
);
11853 /* Add OpenACC partitioning and reduction markers just before the loop. */
11855 gimple_seq_add_seq (&body
, oacc_head
);
11857 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11859 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11860 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11861 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11862 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11864 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11865 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11866 OMP_CLAUSE_LINEAR_STEP (c
)
11867 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11871 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11872 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11873 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11876 gimple_seq_add_stmt (&body
, stmt
);
11877 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11880 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11883 /* After the loop, add exit clauses. */
11884 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11888 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11889 gcall
*g
= gimple_build_call (fndecl
, 0);
11890 gimple_seq_add_stmt (&body
, g
);
11891 gimple_seq_add_seq (&body
, clist
);
11892 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11893 g
= gimple_build_call (fndecl
, 0);
11894 gimple_seq_add_stmt (&body
, g
);
11897 if (ctx
->cancellable
)
11898 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11900 gimple_seq_add_seq (&body
, dlist
);
11904 gimple_seq_add_seq (&tred_ilist
, body
);
11908 body
= maybe_catch_exception (body
);
11910 /* Region exit marker goes at the end of the loop body. */
11911 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11912 gimple_seq_add_stmt (&body
, g
);
11914 gimple_seq_add_seq (&body
, tred_dlist
);
11916 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11919 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11921 /* Add OpenACC joining and reduction markers just after the loop. */
11923 gimple_seq_add_seq (&body
, oacc_tail
);
11925 pop_gimplify_context (new_stmt
);
11927 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11928 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11929 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11930 if (BLOCK_VARS (block
))
11931 TREE_USED (block
) = 1;
11933 gimple_bind_set_body (new_stmt
, body
);
11934 gimple_omp_set_body (stmt
, NULL
);
11935 gimple_omp_for_set_pre_body (stmt
, NULL
);
11938 /* Callback for walk_stmts. Check if the current statement only contains
11939 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11942 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11943 bool *handled_ops_p
,
11944 struct walk_stmt_info
*wi
)
11946 int *info
= (int *) wi
->info
;
11947 gimple
*stmt
= gsi_stmt (*gsi_p
);
11949 *handled_ops_p
= true;
11950 switch (gimple_code (stmt
))
11956 case GIMPLE_OMP_FOR
:
11957 case GIMPLE_OMP_SECTIONS
:
11958 *info
= *info
== 0 ? 1 : -1;
11967 struct omp_taskcopy_context
11969 /* This field must be at the beginning, as we do "inheritance": Some
11970 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11971 receive a copy_body_data pointer that is up-casted to an
11972 omp_context pointer. */
11978 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11980 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11982 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11983 return create_tmp_var (TREE_TYPE (var
));
11989 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11991 tree name
, new_fields
= NULL
, type
, f
;
11993 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11994 name
= DECL_NAME (TYPE_NAME (orig_type
));
11995 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11996 TYPE_DECL
, name
, type
);
11997 TYPE_NAME (type
) = name
;
11999 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
12001 tree new_f
= copy_node (f
);
12002 DECL_CONTEXT (new_f
) = type
;
12003 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
12004 TREE_CHAIN (new_f
) = new_fields
;
12005 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12006 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12007 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
12009 new_fields
= new_f
;
12010 tcctx
->cb
.decl_map
->put (f
, new_f
);
12012 TYPE_FIELDS (type
) = nreverse (new_fields
);
12013 layout_type (type
);
12017 /* Create task copyfn. */
12020 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
12022 struct function
*child_cfun
;
12023 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
12024 tree record_type
, srecord_type
, bind
, list
;
12025 bool record_needs_remap
= false, srecord_needs_remap
= false;
12027 struct omp_taskcopy_context tcctx
;
12028 location_t loc
= gimple_location (task_stmt
);
12029 size_t looptempno
= 0;
12031 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
12032 task_cpyfns
.safe_push (task_stmt
);
12033 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
12034 gcc_assert (child_cfun
->cfg
== NULL
);
12035 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
12037 /* Reset DECL_CONTEXT on function arguments. */
12038 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
12039 DECL_CONTEXT (t
) = child_fn
;
12041 /* Populate the function. */
12042 push_gimplify_context ();
12043 push_cfun (child_cfun
);
12045 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12046 TREE_SIDE_EFFECTS (bind
) = 1;
12048 DECL_SAVED_TREE (child_fn
) = bind
;
12049 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
12051 /* Remap src and dst argument types if needed. */
12052 record_type
= ctx
->record_type
;
12053 srecord_type
= ctx
->srecord_type
;
12054 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
12055 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12057 record_needs_remap
= true;
12060 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
12061 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12063 srecord_needs_remap
= true;
12067 if (record_needs_remap
|| srecord_needs_remap
)
12069 memset (&tcctx
, '\0', sizeof (tcctx
));
12070 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
12071 tcctx
.cb
.dst_fn
= child_fn
;
12072 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
12073 gcc_checking_assert (tcctx
.cb
.src_node
);
12074 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
12075 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
12076 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
12077 tcctx
.cb
.eh_lp_nr
= 0;
12078 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
12079 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
12082 if (record_needs_remap
)
12083 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
12084 if (srecord_needs_remap
)
12085 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
12088 tcctx
.cb
.decl_map
= NULL
;
12090 arg
= DECL_ARGUMENTS (child_fn
);
12091 TREE_TYPE (arg
) = build_pointer_type (record_type
);
12092 sarg
= DECL_CHAIN (arg
);
12093 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
12095 /* First pass: initialize temporaries used in record_type and srecord_type
12096 sizes and field offsets. */
12097 if (tcctx
.cb
.decl_map
)
12098 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12099 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12103 decl
= OMP_CLAUSE_DECL (c
);
12104 p
= tcctx
.cb
.decl_map
->get (decl
);
12107 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12108 sf
= (tree
) n
->value
;
12109 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12110 src
= build_simple_mem_ref_loc (loc
, sarg
);
12111 src
= omp_build_component_ref (src
, sf
);
12112 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12113 append_to_statement_list (t
, &list
);
12116 /* Second pass: copy shared var pointers and copy construct non-VLA
12117 firstprivate vars. */
12118 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12119 switch (OMP_CLAUSE_CODE (c
))
12121 splay_tree_key key
;
12122 case OMP_CLAUSE_SHARED
:
12123 decl
= OMP_CLAUSE_DECL (c
);
12124 key
= (splay_tree_key
) decl
;
12125 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12126 key
= (splay_tree_key
) &DECL_UID (decl
);
12127 n
= splay_tree_lookup (ctx
->field_map
, key
);
12130 f
= (tree
) n
->value
;
12131 if (tcctx
.cb
.decl_map
)
12132 f
= *tcctx
.cb
.decl_map
->get (f
);
12133 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12134 sf
= (tree
) n
->value
;
12135 if (tcctx
.cb
.decl_map
)
12136 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12137 src
= build_simple_mem_ref_loc (loc
, sarg
);
12138 src
= omp_build_component_ref (src
, sf
);
12139 dst
= build_simple_mem_ref_loc (loc
, arg
);
12140 dst
= omp_build_component_ref (dst
, f
);
12141 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12142 append_to_statement_list (t
, &list
);
12144 case OMP_CLAUSE_REDUCTION
:
12145 case OMP_CLAUSE_IN_REDUCTION
:
12146 decl
= OMP_CLAUSE_DECL (c
);
12147 if (TREE_CODE (decl
) == MEM_REF
)
12149 decl
= TREE_OPERAND (decl
, 0);
12150 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12151 decl
= TREE_OPERAND (decl
, 0);
12152 if (TREE_CODE (decl
) == INDIRECT_REF
12153 || TREE_CODE (decl
) == ADDR_EXPR
)
12154 decl
= TREE_OPERAND (decl
, 0);
12156 key
= (splay_tree_key
) decl
;
12157 n
= splay_tree_lookup (ctx
->field_map
, key
);
12160 f
= (tree
) n
->value
;
12161 if (tcctx
.cb
.decl_map
)
12162 f
= *tcctx
.cb
.decl_map
->get (f
);
12163 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12164 sf
= (tree
) n
->value
;
12165 if (tcctx
.cb
.decl_map
)
12166 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12167 src
= build_simple_mem_ref_loc (loc
, sarg
);
12168 src
= omp_build_component_ref (src
, sf
);
12169 if (decl
!= OMP_CLAUSE_DECL (c
)
12170 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12171 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12172 src
= build_simple_mem_ref_loc (loc
, src
);
12173 dst
= build_simple_mem_ref_loc (loc
, arg
);
12174 dst
= omp_build_component_ref (dst
, f
);
12175 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12176 append_to_statement_list (t
, &list
);
12178 case OMP_CLAUSE__LOOPTEMP_
:
12179 /* Fields for first two _looptemp_ clauses are initialized by
12180 GOMP_taskloop*, the rest are handled like firstprivate. */
12181 if (looptempno
< 2)
12187 case OMP_CLAUSE__REDUCTEMP_
:
12188 case OMP_CLAUSE_FIRSTPRIVATE
:
12189 decl
= OMP_CLAUSE_DECL (c
);
12190 if (is_variable_sized (decl
))
12192 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12195 f
= (tree
) n
->value
;
12196 if (tcctx
.cb
.decl_map
)
12197 f
= *tcctx
.cb
.decl_map
->get (f
);
12198 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12201 sf
= (tree
) n
->value
;
12202 if (tcctx
.cb
.decl_map
)
12203 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12204 src
= build_simple_mem_ref_loc (loc
, sarg
);
12205 src
= omp_build_component_ref (src
, sf
);
12206 if (use_pointer_for_field (decl
, NULL
)
12207 || omp_privatize_by_reference (decl
))
12208 src
= build_simple_mem_ref_loc (loc
, src
);
12212 dst
= build_simple_mem_ref_loc (loc
, arg
);
12213 dst
= omp_build_component_ref (dst
, f
);
12214 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12215 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12218 if (ctx
->allocate_map
)
12219 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12221 tree allocator
= *allocatorp
;
12222 HOST_WIDE_INT ialign
= 0;
12223 if (TREE_CODE (allocator
) == TREE_LIST
)
12225 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12226 allocator
= TREE_PURPOSE (allocator
);
12228 if (TREE_CODE (allocator
) != INTEGER_CST
)
12230 n
= splay_tree_lookup (ctx
->sfield_map
,
12231 (splay_tree_key
) allocator
);
12232 allocator
= (tree
) n
->value
;
12233 if (tcctx
.cb
.decl_map
)
12234 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12235 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12236 allocator
= omp_build_component_ref (a
, allocator
);
12238 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12239 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12240 tree align
= build_int_cst (size_type_node
,
12242 DECL_ALIGN_UNIT (decl
)));
12243 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12244 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12246 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12247 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12248 append_to_statement_list (t
, &list
);
12249 dst
= build_simple_mem_ref_loc (loc
, dst
);
12251 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12253 append_to_statement_list (t
, &list
);
12255 case OMP_CLAUSE_PRIVATE
:
12256 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12258 decl
= OMP_CLAUSE_DECL (c
);
12259 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12260 f
= (tree
) n
->value
;
12261 if (tcctx
.cb
.decl_map
)
12262 f
= *tcctx
.cb
.decl_map
->get (f
);
12263 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12266 sf
= (tree
) n
->value
;
12267 if (tcctx
.cb
.decl_map
)
12268 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12269 src
= build_simple_mem_ref_loc (loc
, sarg
);
12270 src
= omp_build_component_ref (src
, sf
);
12271 if (use_pointer_for_field (decl
, NULL
))
12272 src
= build_simple_mem_ref_loc (loc
, src
);
12276 dst
= build_simple_mem_ref_loc (loc
, arg
);
12277 dst
= omp_build_component_ref (dst
, f
);
12278 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12279 append_to_statement_list (t
, &list
);
12285 /* Last pass: handle VLA firstprivates. */
12286 if (tcctx
.cb
.decl_map
)
12287 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12288 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12292 decl
= OMP_CLAUSE_DECL (c
);
12293 if (!is_variable_sized (decl
))
12295 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12298 f
= (tree
) n
->value
;
12299 f
= *tcctx
.cb
.decl_map
->get (f
);
12300 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12301 ind
= DECL_VALUE_EXPR (decl
);
12302 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12303 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12304 n
= splay_tree_lookup (ctx
->sfield_map
,
12305 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12306 sf
= (tree
) n
->value
;
12307 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12308 src
= build_simple_mem_ref_loc (loc
, sarg
);
12309 src
= omp_build_component_ref (src
, sf
);
12310 src
= build_simple_mem_ref_loc (loc
, src
);
12311 dst
= build_simple_mem_ref_loc (loc
, arg
);
12312 dst
= omp_build_component_ref (dst
, f
);
12313 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12314 append_to_statement_list (t
, &list
);
12315 n
= splay_tree_lookup (ctx
->field_map
,
12316 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12317 df
= (tree
) n
->value
;
12318 df
= *tcctx
.cb
.decl_map
->get (df
);
12319 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12320 ptr
= omp_build_component_ref (ptr
, df
);
12321 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12322 build_fold_addr_expr_loc (loc
, dst
));
12323 append_to_statement_list (t
, &list
);
12326 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12327 append_to_statement_list (t
, &list
);
12329 if (tcctx
.cb
.decl_map
)
12330 delete tcctx
.cb
.decl_map
;
12331 pop_gimplify_context (NULL
);
12332 BIND_EXPR_BODY (bind
) = list
;
12337 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12341 size_t cnt
[5] = { 0, 0, 0, 0, 0 }, idx
= 2, i
;
12343 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12344 gcc_assert (clauses
);
12345 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12346 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12347 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12349 case OMP_CLAUSE_DEPEND_LAST
:
12350 /* Lowering already done at gimplification. */
12352 case OMP_CLAUSE_DEPEND_IN
:
12355 case OMP_CLAUSE_DEPEND_OUT
:
12356 case OMP_CLAUSE_DEPEND_INOUT
:
12359 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12362 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12365 case OMP_CLAUSE_DEPEND_INOUTSET
:
12369 gcc_unreachable ();
12371 if (cnt
[1] || cnt
[3] || cnt
[4])
12373 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3] + cnt
[4];
12374 size_t inoutidx
= total
+ idx
;
12375 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
+ 2 * cnt
[4]);
12376 tree array
= create_tmp_var (type
);
12377 TREE_ADDRESSABLE (array
) = 1;
12378 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12382 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12383 gimple_seq_add_stmt (iseq
, g
);
12384 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12387 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12388 gimple_seq_add_stmt (iseq
, g
);
12389 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12391 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12392 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12393 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12394 gimple_seq_add_stmt (iseq
, g
);
12396 for (i
= 0; i
< 5; i
++)
12400 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12401 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12405 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12407 case OMP_CLAUSE_DEPEND_IN
:
12411 case OMP_CLAUSE_DEPEND_OUT
:
12412 case OMP_CLAUSE_DEPEND_INOUT
:
12416 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12420 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12424 case OMP_CLAUSE_DEPEND_INOUTSET
:
12429 gcc_unreachable ();
12431 tree t
= OMP_CLAUSE_DECL (c
);
12434 t
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12435 size_int (inoutidx
), NULL_TREE
, NULL_TREE
);
12436 t
= build_fold_addr_expr (t
);
12439 t
= fold_convert (ptr_type_node
, t
);
12440 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12441 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12442 NULL_TREE
, NULL_TREE
);
12443 g
= gimple_build_assign (r
, t
);
12444 gimple_seq_add_stmt (iseq
, g
);
12448 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12449 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12450 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_INOUTSET
)
12452 tree t
= OMP_CLAUSE_DECL (c
);
12453 t
= fold_convert (ptr_type_node
, t
);
12454 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12455 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12456 NULL_TREE
, NULL_TREE
);
12457 g
= gimple_build_assign (r
, t
);
12458 gimple_seq_add_stmt (iseq
, g
);
12459 t
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
12460 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12461 NULL_TREE
, NULL_TREE
);
12462 g
= gimple_build_assign (r
, t
);
12463 gimple_seq_add_stmt (iseq
, g
);
12466 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12467 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12468 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12469 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12471 tree clobber
= build_clobber (type
);
12472 g
= gimple_build_assign (array
, clobber
);
12473 gimple_seq_add_stmt (oseq
, g
);
12476 /* Lower the OpenMP parallel or task directive in the current statement
12477 in GSI_P. CTX holds context information for the directive. */
12480 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12484 gimple
*stmt
= gsi_stmt (*gsi_p
);
12485 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12486 gimple_seq par_body
;
12487 location_t loc
= gimple_location (stmt
);
12489 clauses
= gimple_omp_taskreg_clauses (stmt
);
12490 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12491 && gimple_omp_task_taskwait_p (stmt
))
12499 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12500 par_body
= gimple_bind_body (par_bind
);
12502 child_fn
= ctx
->cb
.dst_fn
;
12503 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12504 && !gimple_omp_parallel_combined_p (stmt
))
12506 struct walk_stmt_info wi
;
12509 memset (&wi
, 0, sizeof (wi
));
12511 wi
.val_only
= true;
12512 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12514 gimple_omp_parallel_set_combined_p (stmt
, true);
12516 gimple_seq dep_ilist
= NULL
;
12517 gimple_seq dep_olist
= NULL
;
12518 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12519 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12521 push_gimplify_context ();
12522 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12523 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12524 &dep_ilist
, &dep_olist
);
12527 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12528 && gimple_omp_task_taskwait_p (stmt
))
12532 gsi_replace (gsi_p
, dep_bind
, true);
12533 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12534 gimple_bind_add_stmt (dep_bind
, stmt
);
12535 gimple_bind_add_seq (dep_bind
, dep_olist
);
12536 pop_gimplify_context (dep_bind
);
12541 if (ctx
->srecord_type
)
12542 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12544 gimple_seq tskred_ilist
= NULL
;
12545 gimple_seq tskred_olist
= NULL
;
12546 if ((is_task_ctx (ctx
)
12547 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12548 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12549 OMP_CLAUSE_REDUCTION
))
12550 || (is_parallel_ctx (ctx
)
12551 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12552 OMP_CLAUSE__REDUCTEMP_
)))
12554 if (dep_bind
== NULL
)
12556 push_gimplify_context ();
12557 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12559 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12561 gimple_omp_taskreg_clauses (ctx
->stmt
),
12562 &tskred_ilist
, &tskred_olist
);
12565 push_gimplify_context ();
12567 gimple_seq par_olist
= NULL
;
12568 gimple_seq par_ilist
= NULL
;
12569 gimple_seq par_rlist
= NULL
;
12570 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12571 lower_omp (&par_body
, ctx
);
12572 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12573 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12575 /* Declare all the variables created by mapping and the variables
12576 declared in the scope of the parallel body. */
12577 record_vars_into (ctx
->block_vars
, child_fn
);
12578 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12579 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12581 if (ctx
->record_type
)
12584 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12585 : ctx
->record_type
, ".omp_data_o");
12586 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12587 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12588 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12591 gimple_seq olist
= NULL
;
12592 gimple_seq ilist
= NULL
;
12593 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12594 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12596 if (ctx
->record_type
)
12598 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12599 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12603 /* Once all the expansions are done, sequence all the different
12604 fragments inside gimple_omp_body. */
12606 gimple_seq new_body
= NULL
;
12608 if (ctx
->record_type
)
12610 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12611 /* fixup_child_record_type might have changed receiver_decl's type. */
12612 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12613 gimple_seq_add_stmt (&new_body
,
12614 gimple_build_assign (ctx
->receiver_decl
, t
));
12617 gimple_seq_add_seq (&new_body
, par_ilist
);
12618 gimple_seq_add_seq (&new_body
, par_body
);
12619 gimple_seq_add_seq (&new_body
, par_rlist
);
12620 if (ctx
->cancellable
)
12621 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12622 gimple_seq_add_seq (&new_body
, par_olist
);
12623 new_body
= maybe_catch_exception (new_body
);
12624 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12625 gimple_seq_add_stmt (&new_body
,
12626 gimple_build_omp_continue (integer_zero_node
,
12627 integer_zero_node
));
12628 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12629 gimple_omp_set_body (stmt
, new_body
);
12631 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12632 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12634 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12635 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12636 gimple_bind_add_seq (bind
, ilist
);
12637 gimple_bind_add_stmt (bind
, stmt
);
12638 gimple_bind_add_seq (bind
, olist
);
12640 pop_gimplify_context (NULL
);
12644 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12645 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12646 gimple_bind_add_stmt (dep_bind
, bind
);
12647 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12648 gimple_bind_add_seq (dep_bind
, dep_olist
);
12649 pop_gimplify_context (dep_bind
);
12653 /* Lower the GIMPLE_OMP_TARGET in the current statement
12654 in GSI_P. CTX holds context information for the directive. */
12657 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12660 tree child_fn
, t
, c
;
12661 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12662 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12663 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12664 location_t loc
= gimple_location (stmt
);
12665 bool offloaded
, data_region
;
12666 unsigned int map_cnt
= 0;
12667 tree in_reduction_clauses
= NULL_TREE
;
12669 offloaded
= is_gimple_omp_offloaded (stmt
);
12670 switch (gimple_omp_target_kind (stmt
))
12672 case GF_OMP_TARGET_KIND_REGION
:
12674 q
= &in_reduction_clauses
;
12675 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12676 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12679 q
= &OMP_CLAUSE_CHAIN (*q
);
12680 *p
= OMP_CLAUSE_CHAIN (*p
);
12683 p
= &OMP_CLAUSE_CHAIN (*p
);
12685 *p
= in_reduction_clauses
;
12687 case GF_OMP_TARGET_KIND_UPDATE
:
12688 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12689 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12690 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12691 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12692 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12693 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12694 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12695 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12696 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12697 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12698 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12699 data_region
= false;
12701 case GF_OMP_TARGET_KIND_DATA
:
12702 case GF_OMP_TARGET_KIND_OACC_DATA
:
12703 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12704 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12705 data_region
= true;
12708 gcc_unreachable ();
12711 /* Ensure that requires map is written via output_offload_tables, even if only
12712 'target (enter/exit) data' is used in the translation unit. */
12713 if (ENABLE_OFFLOADING
&& (omp_requires_mask
& OMP_REQUIRES_TARGET_USED
))
12714 g
->have_offload
= true;
12716 clauses
= gimple_omp_target_clauses (stmt
);
12718 gimple_seq dep_ilist
= NULL
;
12719 gimple_seq dep_olist
= NULL
;
12720 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12721 if (has_depend
|| in_reduction_clauses
)
12723 push_gimplify_context ();
12724 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12726 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12727 &dep_ilist
, &dep_olist
);
12728 if (in_reduction_clauses
)
12729 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12737 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12738 tgt_body
= gimple_bind_body (tgt_bind
);
12740 else if (data_region
)
12741 tgt_body
= gimple_omp_body (stmt
);
12742 child_fn
= ctx
->cb
.dst_fn
;
12744 push_gimplify_context ();
12747 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12748 switch (OMP_CLAUSE_CODE (c
))
12754 case OMP_CLAUSE_MAP
:
12756 /* First check what we're prepared to handle in the following. */
12757 switch (OMP_CLAUSE_MAP_KIND (c
))
12759 case GOMP_MAP_ALLOC
:
12761 case GOMP_MAP_FROM
:
12762 case GOMP_MAP_TOFROM
:
12763 case GOMP_MAP_POINTER
:
12764 case GOMP_MAP_TO_PSET
:
12765 case GOMP_MAP_DELETE
:
12766 case GOMP_MAP_RELEASE
:
12767 case GOMP_MAP_ALWAYS_TO
:
12768 case GOMP_MAP_ALWAYS_FROM
:
12769 case GOMP_MAP_ALWAYS_TOFROM
:
12770 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12771 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12772 case GOMP_MAP_STRUCT
:
12773 case GOMP_MAP_ALWAYS_POINTER
:
12774 case GOMP_MAP_ATTACH
:
12775 case GOMP_MAP_DETACH
:
12776 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12777 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12779 case GOMP_MAP_IF_PRESENT
:
12780 case GOMP_MAP_FORCE_ALLOC
:
12781 case GOMP_MAP_FORCE_TO
:
12782 case GOMP_MAP_FORCE_FROM
:
12783 case GOMP_MAP_FORCE_TOFROM
:
12784 case GOMP_MAP_FORCE_PRESENT
:
12785 case GOMP_MAP_FORCE_DEVICEPTR
:
12786 case GOMP_MAP_DEVICE_RESIDENT
:
12787 case GOMP_MAP_LINK
:
12788 case GOMP_MAP_FORCE_DETACH
:
12789 gcc_assert (is_gimple_omp_oacc (stmt
));
12792 gcc_unreachable ();
12796 case OMP_CLAUSE_TO
:
12797 case OMP_CLAUSE_FROM
:
12799 var
= OMP_CLAUSE_DECL (c
);
12802 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12803 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12804 && (OMP_CLAUSE_MAP_KIND (c
)
12805 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12810 if (DECL_SIZE (var
)
12811 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12813 tree var2
= DECL_VALUE_EXPR (var
);
12814 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12815 var2
= TREE_OPERAND (var2
, 0);
12816 gcc_assert (DECL_P (var2
));
12821 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12822 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12823 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12825 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12827 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12828 && varpool_node::get_create (var
)->offloadable
)
12831 tree type
= build_pointer_type (TREE_TYPE (var
));
12832 tree new_var
= lookup_decl (var
, ctx
);
12833 x
= create_tmp_var_raw (type
, get_name (new_var
));
12834 gimple_add_tmp_var (x
);
12835 x
= build_simple_mem_ref (x
);
12836 SET_DECL_VALUE_EXPR (new_var
, x
);
12837 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12842 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12843 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12844 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12845 && is_omp_target (stmt
))
12847 gcc_assert (maybe_lookup_field (c
, ctx
));
12852 if (!maybe_lookup_field (var
, ctx
))
12855 /* Don't remap compute constructs' reduction variables, because the
12856 intermediate result must be local to each gang. */
12857 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12858 && is_gimple_omp_oacc (ctx
->stmt
)
12859 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12861 x
= build_receiver_ref (var
, true, ctx
);
12862 tree new_var
= lookup_decl (var
, ctx
);
12864 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12865 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12866 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12867 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12868 x
= build_simple_mem_ref (x
);
12869 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12871 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12872 if (omp_privatize_by_reference (new_var
)
12873 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12874 || DECL_BY_REFERENCE (var
)))
12876 /* Create a local object to hold the instance
12878 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12879 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12880 tree inst
= create_tmp_var (type
, id
);
12881 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12882 x
= build_fold_addr_expr (inst
);
12884 gimplify_assign (new_var
, x
, &fplist
);
12886 else if (DECL_P (new_var
))
12888 SET_DECL_VALUE_EXPR (new_var
, x
);
12889 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12892 gcc_unreachable ();
12897 case OMP_CLAUSE_FIRSTPRIVATE
:
12898 omp_firstprivate_recv
:
12899 gcc_checking_assert (offloaded
);
12900 if (is_gimple_omp_oacc (ctx
->stmt
))
12902 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12903 gcc_checking_assert (!is_oacc_kernels (ctx
));
12904 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12905 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12907 goto oacc_firstprivate
;
12910 var
= OMP_CLAUSE_DECL (c
);
12911 if (!omp_privatize_by_reference (var
)
12912 && !is_gimple_reg_type (TREE_TYPE (var
)))
12914 tree new_var
= lookup_decl (var
, ctx
);
12915 if (is_variable_sized (var
))
12917 tree pvar
= DECL_VALUE_EXPR (var
);
12918 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12919 pvar
= TREE_OPERAND (pvar
, 0);
12920 gcc_assert (DECL_P (pvar
));
12921 tree new_pvar
= lookup_decl (pvar
, ctx
);
12922 x
= build_fold_indirect_ref (new_pvar
);
12923 TREE_THIS_NOTRAP (x
) = 1;
12926 x
= build_receiver_ref (var
, true, ctx
);
12927 SET_DECL_VALUE_EXPR (new_var
, x
);
12928 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12930 /* Fortran array descriptors: firstprivate of data + attach. */
12931 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
12932 && lang_hooks
.decls
.omp_array_data (var
, true))
12936 case OMP_CLAUSE_PRIVATE
:
12937 gcc_checking_assert (offloaded
);
12938 if (is_gimple_omp_oacc (ctx
->stmt
))
12940 /* No 'private' clauses on OpenACC 'kernels'. */
12941 gcc_checking_assert (!is_oacc_kernels (ctx
));
12942 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12943 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12947 var
= OMP_CLAUSE_DECL (c
);
12948 if (is_variable_sized (var
))
12950 tree new_var
= lookup_decl (var
, ctx
);
12951 tree pvar
= DECL_VALUE_EXPR (var
);
12952 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12953 pvar
= TREE_OPERAND (pvar
, 0);
12954 gcc_assert (DECL_P (pvar
));
12955 tree new_pvar
= lookup_decl (pvar
, ctx
);
12956 x
= build_fold_indirect_ref (new_pvar
);
12957 TREE_THIS_NOTRAP (x
) = 1;
12958 SET_DECL_VALUE_EXPR (new_var
, x
);
12959 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12963 case OMP_CLAUSE_USE_DEVICE_PTR
:
12964 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12965 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12966 case OMP_CLAUSE_IS_DEVICE_PTR
:
12967 var
= OMP_CLAUSE_DECL (c
);
12968 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12970 while (TREE_CODE (var
) == INDIRECT_REF
12971 || TREE_CODE (var
) == ARRAY_REF
)
12972 var
= TREE_OPERAND (var
, 0);
12973 if (lang_hooks
.decls
.omp_array_data (var
, true))
12974 goto omp_firstprivate_recv
;
12977 if (is_variable_sized (var
))
12979 tree new_var
= lookup_decl (var
, ctx
);
12980 tree pvar
= DECL_VALUE_EXPR (var
);
12981 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12982 pvar
= TREE_OPERAND (pvar
, 0);
12983 gcc_assert (DECL_P (pvar
));
12984 tree new_pvar
= lookup_decl (pvar
, ctx
);
12985 x
= build_fold_indirect_ref (new_pvar
);
12986 TREE_THIS_NOTRAP (x
) = 1;
12987 SET_DECL_VALUE_EXPR (new_var
, x
);
12988 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12990 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12991 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12992 && !omp_privatize_by_reference (var
)
12993 && !omp_is_allocatable_or_ptr (var
)
12994 && !lang_hooks
.decls
.omp_array_data (var
, true))
12995 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12997 tree new_var
= lookup_decl (var
, ctx
);
12998 tree type
= build_pointer_type (TREE_TYPE (var
));
12999 x
= create_tmp_var_raw (type
, get_name (new_var
));
13000 gimple_add_tmp_var (x
);
13001 x
= build_simple_mem_ref (x
);
13002 SET_DECL_VALUE_EXPR (new_var
, x
);
13003 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13007 tree new_var
= lookup_decl (var
, ctx
);
13008 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
13009 gimple_add_tmp_var (x
);
13010 SET_DECL_VALUE_EXPR (new_var
, x
);
13011 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13018 target_nesting_level
++;
13019 lower_omp (&tgt_body
, ctx
);
13020 target_nesting_level
--;
13022 else if (data_region
)
13023 lower_omp (&tgt_body
, ctx
);
13027 /* Declare all the variables created by mapping and the variables
13028 declared in the scope of the target body. */
13029 record_vars_into (ctx
->block_vars
, child_fn
);
13030 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
13031 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
13036 if (ctx
->record_type
)
13039 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
13040 DECL_NAMELESS (ctx
->sender_decl
) = 1;
13041 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
13042 t
= make_tree_vec (3);
13043 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
13044 TREE_VEC_ELT (t
, 1)
13045 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
13046 ".omp_data_sizes");
13047 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
13048 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
13049 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
13050 tree tkind_type
= short_unsigned_type_node
;
13051 int talign_shift
= 8;
13052 TREE_VEC_ELT (t
, 2)
13053 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
13054 ".omp_data_kinds");
13055 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
13056 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
13057 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
13058 gimple_omp_target_set_data_arg (stmt
, t
);
13060 vec
<constructor_elt
, va_gc
> *vsize
;
13061 vec
<constructor_elt
, va_gc
> *vkind
;
13062 vec_alloc (vsize
, map_cnt
);
13063 vec_alloc (vkind
, map_cnt
);
13064 unsigned int map_idx
= 0;
13066 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13067 switch (OMP_CLAUSE_CODE (c
))
13069 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
13070 unsigned int talign
;
13075 case OMP_CLAUSE_MAP
:
13076 case OMP_CLAUSE_TO
:
13077 case OMP_CLAUSE_FROM
:
13078 oacc_firstprivate_map
:
13080 ovar
= OMP_CLAUSE_DECL (c
);
13081 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13082 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13083 || (OMP_CLAUSE_MAP_KIND (c
)
13084 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13086 if (!DECL_P (ovar
))
13088 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13089 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
13091 nc
= OMP_CLAUSE_CHAIN (c
);
13092 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
13093 == get_base_address (ovar
));
13094 ovar
= OMP_CLAUSE_DECL (nc
);
13098 tree x
= build_sender_ref (ovar
, ctx
);
13100 if (in_reduction_clauses
13101 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13102 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13104 v
= unshare_expr (v
);
13106 while (handled_component_p (*p
)
13107 || TREE_CODE (*p
) == INDIRECT_REF
13108 || TREE_CODE (*p
) == ADDR_EXPR
13109 || TREE_CODE (*p
) == MEM_REF
13110 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
13111 p
= &TREE_OPERAND (*p
, 0);
13113 if (is_variable_sized (d
))
13115 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13116 d
= DECL_VALUE_EXPR (d
);
13117 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13118 d
= TREE_OPERAND (d
, 0);
13119 gcc_assert (DECL_P (d
));
13122 = (splay_tree_key
) &DECL_CONTEXT (d
);
13123 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13128 *p
= build_fold_indirect_ref (nd
);
13130 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
13131 gimplify_assign (x
, v
, &ilist
);
13137 if (DECL_SIZE (ovar
)
13138 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
13140 tree ovar2
= DECL_VALUE_EXPR (ovar
);
13141 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
13142 ovar2
= TREE_OPERAND (ovar2
, 0);
13143 gcc_assert (DECL_P (ovar2
));
13146 if (!maybe_lookup_field (ovar
, ctx
)
13147 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13148 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13149 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13153 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13154 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13155 talign
= DECL_ALIGN_UNIT (ovar
);
13160 if (in_reduction_clauses
13161 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13162 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13165 if (is_variable_sized (d
))
13167 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13168 d
= DECL_VALUE_EXPR (d
);
13169 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13170 d
= TREE_OPERAND (d
, 0);
13171 gcc_assert (DECL_P (d
));
13174 = (splay_tree_key
) &DECL_CONTEXT (d
);
13175 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13180 var
= build_fold_indirect_ref (nd
);
13183 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13186 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13187 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13188 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13189 && is_omp_target (stmt
))
13191 x
= build_sender_ref (c
, ctx
);
13192 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13196 x
= build_sender_ref (ovar
, ctx
);
13198 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13199 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13200 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13201 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13203 gcc_assert (offloaded
);
13205 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13206 mark_addressable (avar
);
13207 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13208 talign
= DECL_ALIGN_UNIT (avar
);
13209 avar
= build_fold_addr_expr (avar
);
13210 gimplify_assign (x
, avar
, &ilist
);
13212 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13214 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13215 if (!omp_privatize_by_reference (var
))
13217 if (is_gimple_reg (var
)
13218 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13219 suppress_warning (var
);
13220 var
= build_fold_addr_expr (var
);
13223 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13224 gimplify_assign (x
, var
, &ilist
);
13226 else if (is_gimple_reg (var
))
13228 gcc_assert (offloaded
);
13229 tree avar
= create_tmp_var (TREE_TYPE (var
));
13230 mark_addressable (avar
);
13231 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13232 if (GOMP_MAP_COPY_TO_P (map_kind
)
13233 || map_kind
== GOMP_MAP_POINTER
13234 || map_kind
== GOMP_MAP_TO_PSET
13235 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13237 /* If we need to initialize a temporary
13238 with VAR because it is not addressable, and
13239 the variable hasn't been initialized yet, then
13240 we'll get a warning for the store to avar.
13241 Don't warn in that case, the mapping might
13243 suppress_warning (var
, OPT_Wuninitialized
);
13244 gimplify_assign (avar
, var
, &ilist
);
13246 avar
= build_fold_addr_expr (avar
);
13247 gimplify_assign (x
, avar
, &ilist
);
13248 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13249 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13250 && !TYPE_READONLY (TREE_TYPE (var
)))
13252 x
= unshare_expr (x
);
13253 x
= build_simple_mem_ref (x
);
13254 gimplify_assign (var
, x
, &olist
);
13259 /* While MAP is handled explicitly by the FE,
13260 for 'target update', only the identified is passed. */
13261 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13262 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13263 && (omp_is_allocatable_or_ptr (var
)
13264 && omp_check_optional_argument (var
, false)))
13265 var
= build_fold_indirect_ref (var
);
13266 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13267 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13268 || (!omp_is_allocatable_or_ptr (var
)
13269 && !omp_check_optional_argument (var
, false)))
13270 var
= build_fold_addr_expr (var
);
13271 gimplify_assign (x
, var
, &ilist
);
13275 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13277 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13278 s
= TREE_TYPE (ovar
);
13279 if (TREE_CODE (s
) == REFERENCE_TYPE
13280 || omp_check_optional_argument (ovar
, false))
13282 s
= TYPE_SIZE_UNIT (s
);
13285 s
= OMP_CLAUSE_SIZE (c
);
13286 if (s
== NULL_TREE
)
13287 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13288 s
= fold_convert (size_type_node
, s
);
13289 purpose
= size_int (map_idx
++);
13290 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13291 if (TREE_CODE (s
) != INTEGER_CST
)
13292 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13294 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13295 switch (OMP_CLAUSE_CODE (c
))
13297 case OMP_CLAUSE_MAP
:
13298 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13299 tkind_zero
= tkind
;
13300 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13303 case GOMP_MAP_ALLOC
:
13304 case GOMP_MAP_IF_PRESENT
:
13306 case GOMP_MAP_FROM
:
13307 case GOMP_MAP_TOFROM
:
13308 case GOMP_MAP_ALWAYS_TO
:
13309 case GOMP_MAP_ALWAYS_FROM
:
13310 case GOMP_MAP_ALWAYS_TOFROM
:
13311 case GOMP_MAP_RELEASE
:
13312 case GOMP_MAP_FORCE_TO
:
13313 case GOMP_MAP_FORCE_FROM
:
13314 case GOMP_MAP_FORCE_TOFROM
:
13315 case GOMP_MAP_FORCE_PRESENT
:
13316 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13318 case GOMP_MAP_DELETE
:
13319 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13323 if (tkind_zero
!= tkind
)
13325 if (integer_zerop (s
))
13326 tkind
= tkind_zero
;
13327 else if (integer_nonzerop (s
))
13328 tkind_zero
= tkind
;
13330 if (tkind_zero
== tkind
13331 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13332 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13333 & ~GOMP_MAP_IMPLICIT
)
13336 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13337 bits are not interfered by other special bit encodings,
13338 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13340 tkind
|= GOMP_MAP_IMPLICIT
;
13341 tkind_zero
= tkind
;
13344 case OMP_CLAUSE_FIRSTPRIVATE
:
13345 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13346 tkind
= GOMP_MAP_TO
;
13347 tkind_zero
= tkind
;
13349 case OMP_CLAUSE_TO
:
13350 tkind
= GOMP_MAP_TO
;
13351 tkind_zero
= tkind
;
13353 case OMP_CLAUSE_FROM
:
13354 tkind
= GOMP_MAP_FROM
;
13355 tkind_zero
= tkind
;
13358 gcc_unreachable ();
13360 gcc_checking_assert (tkind
13361 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13362 gcc_checking_assert (tkind_zero
13363 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13364 talign
= ceil_log2 (talign
);
13365 tkind
|= talign
<< talign_shift
;
13366 tkind_zero
|= talign
<< talign_shift
;
13367 gcc_checking_assert (tkind
13368 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13369 gcc_checking_assert (tkind_zero
13370 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13371 if (tkind
== tkind_zero
)
13372 x
= build_int_cstu (tkind_type
, tkind
);
13375 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13376 x
= build3 (COND_EXPR
, tkind_type
,
13377 fold_build2 (EQ_EXPR
, boolean_type_node
,
13378 unshare_expr (s
), size_zero_node
),
13379 build_int_cstu (tkind_type
, tkind_zero
),
13380 build_int_cstu (tkind_type
, tkind
));
13382 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13387 case OMP_CLAUSE_FIRSTPRIVATE
:
13388 omp_has_device_addr_descr
:
13389 if (is_gimple_omp_oacc (ctx
->stmt
))
13390 goto oacc_firstprivate_map
;
13391 ovar
= OMP_CLAUSE_DECL (c
);
13392 if (omp_privatize_by_reference (ovar
))
13393 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13395 talign
= DECL_ALIGN_UNIT (ovar
);
13396 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13397 x
= build_sender_ref (ovar
, ctx
);
13398 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13399 type
= TREE_TYPE (ovar
);
13400 if (omp_privatize_by_reference (ovar
))
13401 type
= TREE_TYPE (type
);
13402 if ((INTEGRAL_TYPE_P (type
)
13403 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13404 || TREE_CODE (type
) == POINTER_TYPE
)
13406 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13408 if (omp_privatize_by_reference (var
))
13409 t
= build_simple_mem_ref (var
);
13410 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13411 suppress_warning (var
);
13412 if (TREE_CODE (type
) != POINTER_TYPE
)
13413 t
= fold_convert (pointer_sized_int_node
, t
);
13414 t
= fold_convert (TREE_TYPE (x
), t
);
13415 gimplify_assign (x
, t
, &ilist
);
13417 else if (omp_privatize_by_reference (var
))
13418 gimplify_assign (x
, var
, &ilist
);
13419 else if (is_gimple_reg (var
))
13421 tree avar
= create_tmp_var (TREE_TYPE (var
));
13422 mark_addressable (avar
);
13423 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13424 suppress_warning (var
);
13425 gimplify_assign (avar
, var
, &ilist
);
13426 avar
= build_fold_addr_expr (avar
);
13427 gimplify_assign (x
, avar
, &ilist
);
13431 var
= build_fold_addr_expr (var
);
13432 gimplify_assign (x
, var
, &ilist
);
13434 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13436 else if (omp_privatize_by_reference (ovar
))
13437 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13439 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13440 s
= fold_convert (size_type_node
, s
);
13441 purpose
= size_int (map_idx
++);
13442 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13443 if (TREE_CODE (s
) != INTEGER_CST
)
13444 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13446 gcc_checking_assert (tkind
13447 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13448 talign
= ceil_log2 (talign
);
13449 tkind
|= talign
<< talign_shift
;
13450 gcc_checking_assert (tkind
13451 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13452 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13453 build_int_cstu (tkind_type
, tkind
));
13454 /* Fortran array descriptors: firstprivate of data + attach. */
13455 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13456 && lang_hooks
.decls
.omp_array_data (ovar
, true))
13458 tree not_null_lb
, null_lb
, after_lb
;
13459 tree var1
, var2
, size1
, size2
;
13460 tree present
= omp_check_optional_argument (ovar
, true);
13463 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13464 not_null_lb
= create_artificial_label (clause_loc
);
13465 null_lb
= create_artificial_label (clause_loc
);
13466 after_lb
= create_artificial_label (clause_loc
);
13467 gimple_seq seq
= NULL
;
13468 present
= force_gimple_operand (present
, &seq
, true,
13470 gimple_seq_add_seq (&ilist
, seq
);
13471 gimple_seq_add_stmt (&ilist
,
13472 gimple_build_cond_from_tree (present
,
13473 not_null_lb
, null_lb
));
13474 gimple_seq_add_stmt (&ilist
,
13475 gimple_build_label (not_null_lb
));
13477 var1
= lang_hooks
.decls
.omp_array_data (var
, false);
13478 size1
= lang_hooks
.decls
.omp_array_size (var
, &ilist
);
13479 var2
= build_fold_addr_expr (x
);
13480 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
13481 var
= build_fold_addr_expr (var
);
13482 size2
= fold_build2 (POINTER_DIFF_EXPR
, ssizetype
,
13483 build_fold_addr_expr (var1
), var
);
13484 size2
= fold_convert (sizetype
, size2
);
13487 tree tmp
= create_tmp_var (TREE_TYPE (var1
));
13488 gimplify_assign (tmp
, var1
, &ilist
);
13490 tmp
= create_tmp_var (TREE_TYPE (var2
));
13491 gimplify_assign (tmp
, var2
, &ilist
);
13493 tmp
= create_tmp_var (TREE_TYPE (size1
));
13494 gimplify_assign (tmp
, size1
, &ilist
);
13496 tmp
= create_tmp_var (TREE_TYPE (size2
));
13497 gimplify_assign (tmp
, size2
, &ilist
);
13499 gimple_seq_add_stmt (&ilist
, gimple_build_goto (after_lb
));
13500 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_lb
));
13501 gimplify_assign (var1
, null_pointer_node
, &ilist
);
13502 gimplify_assign (var2
, null_pointer_node
, &ilist
);
13503 gimplify_assign (size1
, size_zero_node
, &ilist
);
13504 gimplify_assign (size2
, size_zero_node
, &ilist
);
13505 gimple_seq_add_stmt (&ilist
, gimple_build_label (after_lb
));
13507 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13508 gimplify_assign (x
, var1
, &ilist
);
13509 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13510 talign
= DECL_ALIGN_UNIT (ovar
);
13511 talign
= ceil_log2 (talign
);
13512 tkind
|= talign
<< talign_shift
;
13513 gcc_checking_assert (tkind
13515 TYPE_MAX_VALUE (tkind_type
)));
13516 purpose
= size_int (map_idx
++);
13517 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size1
);
13518 if (TREE_CODE (size1
) != INTEGER_CST
)
13519 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13520 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13521 build_int_cstu (tkind_type
, tkind
));
13522 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13523 gimplify_assign (x
, var2
, &ilist
);
13524 tkind
= GOMP_MAP_ATTACH
;
13525 purpose
= size_int (map_idx
++);
13526 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size2
);
13527 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13528 build_int_cstu (tkind_type
, tkind
));
13532 case OMP_CLAUSE_USE_DEVICE_PTR
:
13533 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13534 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13535 case OMP_CLAUSE_IS_DEVICE_PTR
:
13536 ovar
= OMP_CLAUSE_DECL (c
);
13537 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13539 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13540 goto omp_has_device_addr_descr
;
13541 while (TREE_CODE (ovar
) == INDIRECT_REF
13542 || TREE_CODE (ovar
) == ARRAY_REF
)
13543 ovar
= TREE_OPERAND (ovar
, 0);
13545 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13547 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13549 tkind
= ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13550 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13551 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13552 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13554 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13555 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13557 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13558 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13562 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13563 x
= build_sender_ref (ovar
, ctx
);
13566 if (is_gimple_omp_oacc (ctx
->stmt
))
13568 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13570 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13571 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13574 type
= TREE_TYPE (ovar
);
13575 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13576 var
= lang_hooks
.decls
.omp_array_data (var
, false);
13577 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13578 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13579 && !omp_privatize_by_reference (ovar
)
13580 && !omp_is_allocatable_or_ptr (ovar
))
13581 || TREE_CODE (type
) == ARRAY_TYPE
)
13582 var
= build_fold_addr_expr (var
);
13585 if (omp_privatize_by_reference (ovar
)
13586 || omp_check_optional_argument (ovar
, false)
13587 || omp_is_allocatable_or_ptr (ovar
))
13589 type
= TREE_TYPE (type
);
13590 if (POINTER_TYPE_P (type
)
13591 && TREE_CODE (type
) != ARRAY_TYPE
13592 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13593 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13594 && !omp_is_allocatable_or_ptr (ovar
))
13595 || (omp_privatize_by_reference (ovar
)
13596 && omp_is_allocatable_or_ptr (ovar
))))
13597 var
= build_simple_mem_ref (var
);
13598 var
= fold_convert (TREE_TYPE (x
), var
);
13602 present
= omp_check_optional_argument (ovar
, true);
13605 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13606 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13607 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13608 tree new_x
= unshare_expr (x
);
13609 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13611 gcond
*cond
= gimple_build_cond_from_tree (present
,
13614 gimple_seq_add_stmt (&ilist
, cond
);
13615 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13616 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13617 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13618 gimple_seq_add_stmt (&ilist
,
13619 gimple_build_label (notnull_label
));
13620 gimplify_assign (x
, var
, &ilist
);
13621 gimple_seq_add_stmt (&ilist
,
13622 gimple_build_label (opt_arg_label
));
13625 gimplify_assign (x
, var
, &ilist
);
13627 purpose
= size_int (map_idx
++);
13628 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13629 gcc_checking_assert (tkind
13630 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13631 gcc_checking_assert (tkind
13632 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13633 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13634 build_int_cstu (tkind_type
, tkind
));
13638 gcc_assert (map_idx
== map_cnt
);
13640 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13641 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13642 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13643 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13644 for (int i
= 1; i
<= 2; i
++)
13645 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13647 gimple_seq initlist
= NULL
;
13648 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13649 TREE_VEC_ELT (t
, i
)),
13650 &initlist
, true, NULL_TREE
);
13651 gimple_seq_add_seq (&ilist
, initlist
);
13653 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13654 gimple_seq_add_stmt (&olist
,
13655 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13658 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13660 tree id
= get_identifier ("omp declare target");
13661 tree decl
= TREE_VEC_ELT (t
, i
);
13662 DECL_ATTRIBUTES (decl
)
13663 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13664 varpool_node
*node
= varpool_node::get (decl
);
13667 node
->offloadable
= 1;
13668 if (ENABLE_OFFLOADING
)
13670 g
->have_offload
= true;
13671 vec_safe_push (offload_vars
, t
);
13676 tree clobber
= build_clobber (ctx
->record_type
);
13677 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13681 /* Once all the expansions are done, sequence all the different
13682 fragments inside gimple_omp_body. */
13687 && ctx
->record_type
)
13689 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13690 /* fixup_child_record_type might have changed receiver_decl's type. */
13691 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13692 gimple_seq_add_stmt (&new_body
,
13693 gimple_build_assign (ctx
->receiver_decl
, t
));
13695 gimple_seq_add_seq (&new_body
, fplist
);
13697 if (offloaded
|| data_region
)
13699 tree prev
= NULL_TREE
;
13700 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13701 switch (OMP_CLAUSE_CODE (c
))
13706 case OMP_CLAUSE_FIRSTPRIVATE
:
13707 omp_firstprivatize_data_region
:
13708 if (is_gimple_omp_oacc (ctx
->stmt
))
13710 var
= OMP_CLAUSE_DECL (c
);
13711 if (omp_privatize_by_reference (var
)
13712 || is_gimple_reg_type (TREE_TYPE (var
)))
13714 tree new_var
= lookup_decl (var
, ctx
);
13716 type
= TREE_TYPE (var
);
13717 if (omp_privatize_by_reference (var
))
13718 type
= TREE_TYPE (type
);
13719 if ((INTEGRAL_TYPE_P (type
)
13720 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13721 || TREE_CODE (type
) == POINTER_TYPE
)
13723 x
= build_receiver_ref (var
, false, ctx
);
13724 if (TREE_CODE (type
) != POINTER_TYPE
)
13725 x
= fold_convert (pointer_sized_int_node
, x
);
13726 x
= fold_convert (type
, x
);
13727 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13729 if (omp_privatize_by_reference (var
))
13731 tree v
= create_tmp_var_raw (type
, get_name (var
));
13732 gimple_add_tmp_var (v
);
13733 TREE_ADDRESSABLE (v
) = 1;
13734 gimple_seq_add_stmt (&new_body
,
13735 gimple_build_assign (v
, x
));
13736 x
= build_fold_addr_expr (v
);
13738 gimple_seq_add_stmt (&new_body
,
13739 gimple_build_assign (new_var
, x
));
13743 bool by_ref
= !omp_privatize_by_reference (var
);
13744 x
= build_receiver_ref (var
, by_ref
, ctx
);
13745 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13747 gimple_seq_add_stmt (&new_body
,
13748 gimple_build_assign (new_var
, x
));
13751 else if (is_variable_sized (var
))
13753 tree pvar
= DECL_VALUE_EXPR (var
);
13754 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13755 pvar
= TREE_OPERAND (pvar
, 0);
13756 gcc_assert (DECL_P (pvar
));
13757 tree new_var
= lookup_decl (pvar
, ctx
);
13758 x
= build_receiver_ref (var
, false, ctx
);
13759 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13760 gimple_seq_add_stmt (&new_body
,
13761 gimple_build_assign (new_var
, x
));
13764 case OMP_CLAUSE_PRIVATE
:
13765 if (is_gimple_omp_oacc (ctx
->stmt
))
13767 var
= OMP_CLAUSE_DECL (c
);
13768 if (omp_privatize_by_reference (var
))
13770 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13771 tree new_var
= lookup_decl (var
, ctx
);
13772 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13773 if (TREE_CONSTANT (x
))
13775 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13777 gimple_add_tmp_var (x
);
13778 TREE_ADDRESSABLE (x
) = 1;
13779 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13784 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13785 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13786 gimple_seq_add_stmt (&new_body
,
13787 gimple_build_assign (new_var
, x
));
13790 case OMP_CLAUSE_USE_DEVICE_PTR
:
13791 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13792 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13793 case OMP_CLAUSE_IS_DEVICE_PTR
:
13795 gimple_seq assign_body
;
13796 bool is_array_data
;
13797 bool do_optional_check
;
13798 assign_body
= NULL
;
13799 do_optional_check
= false;
13800 var
= OMP_CLAUSE_DECL (c
);
13801 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13802 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
&& is_array_data
)
13803 goto omp_firstprivatize_data_region
;
13805 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13806 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13807 x
= build_sender_ref (is_array_data
13808 ? (splay_tree_key
) &DECL_NAME (var
)
13809 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13812 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13814 while (TREE_CODE (var
) == INDIRECT_REF
13815 || TREE_CODE (var
) == ARRAY_REF
)
13816 var
= TREE_OPERAND (var
, 0);
13818 x
= build_receiver_ref (var
, false, ctx
);
13823 bool is_ref
= omp_privatize_by_reference (var
);
13824 do_optional_check
= true;
13825 /* First, we copy the descriptor data from the host; then
13826 we update its data to point to the target address. */
13827 new_var
= lookup_decl (var
, ctx
);
13828 new_var
= DECL_VALUE_EXPR (new_var
);
13831 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
13832 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13833 v2
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
13837 v2
= build_fold_indirect_ref (v2
);
13838 v
= create_tmp_var_raw (TREE_TYPE (v2
), get_name (var
));
13839 gimple_add_tmp_var (v
);
13840 TREE_ADDRESSABLE (v
) = 1;
13841 gimplify_assign (v
, v2
, &assign_body
);
13842 tree rhs
= build_fold_addr_expr (v
);
13843 gimple_seq_add_stmt (&assign_body
,
13844 gimple_build_assign (new_var
, rhs
));
13847 gimplify_assign (new_var
, v2
, &assign_body
);
13849 v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13851 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13852 gimple_seq_add_stmt (&assign_body
,
13853 gimple_build_assign (v2
, x
));
13855 else if (is_variable_sized (var
))
13857 tree pvar
= DECL_VALUE_EXPR (var
);
13858 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13859 pvar
= TREE_OPERAND (pvar
, 0);
13860 gcc_assert (DECL_P (pvar
));
13861 new_var
= lookup_decl (pvar
, ctx
);
13862 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13863 gimple_seq_add_stmt (&assign_body
,
13864 gimple_build_assign (new_var
, x
));
13866 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13867 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13868 && !omp_privatize_by_reference (var
)
13869 && !omp_is_allocatable_or_ptr (var
))
13870 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13872 new_var
= lookup_decl (var
, ctx
);
13873 new_var
= DECL_VALUE_EXPR (new_var
);
13874 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13875 new_var
= TREE_OPERAND (new_var
, 0);
13876 gcc_assert (DECL_P (new_var
));
13877 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13878 gimple_seq_add_stmt (&assign_body
,
13879 gimple_build_assign (new_var
, x
));
13883 tree type
= TREE_TYPE (var
);
13884 new_var
= lookup_decl (var
, ctx
);
13885 if (omp_privatize_by_reference (var
))
13887 type
= TREE_TYPE (type
);
13888 if (POINTER_TYPE_P (type
)
13889 && TREE_CODE (type
) != ARRAY_TYPE
13890 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13891 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13892 || (omp_privatize_by_reference (var
)
13893 && omp_is_allocatable_or_ptr (var
))))
13895 tree v
= create_tmp_var_raw (type
, get_name (var
));
13896 gimple_add_tmp_var (v
);
13897 TREE_ADDRESSABLE (v
) = 1;
13898 x
= fold_convert (type
, x
);
13899 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13901 gimple_seq_add_stmt (&assign_body
,
13902 gimple_build_assign (v
, x
));
13903 x
= build_fold_addr_expr (v
);
13904 do_optional_check
= true;
13907 new_var
= DECL_VALUE_EXPR (new_var
);
13908 x
= fold_convert (TREE_TYPE (new_var
), x
);
13909 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13910 gimple_seq_add_stmt (&assign_body
,
13911 gimple_build_assign (new_var
, x
));
13914 present
= ((do_optional_check
13915 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13916 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13920 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13921 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13922 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13923 glabel
*null_glabel
= gimple_build_label (null_label
);
13924 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13925 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13926 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13928 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13930 gcond
*cond
= gimple_build_cond_from_tree (present
,
13933 gimple_seq_add_stmt (&new_body
, cond
);
13934 gimple_seq_add_stmt (&new_body
, null_glabel
);
13935 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13936 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13937 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13938 gimple_seq_add_seq (&new_body
, assign_body
);
13939 gimple_seq_add_stmt (&new_body
,
13940 gimple_build_label (opt_arg_label
));
13943 gimple_seq_add_seq (&new_body
, assign_body
);
13946 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13947 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13948 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13949 or references to VLAs. */
13950 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13951 switch (OMP_CLAUSE_CODE (c
))
13956 case OMP_CLAUSE_MAP
:
13957 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13958 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13960 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13961 poly_int64 offset
= 0;
13963 var
= OMP_CLAUSE_DECL (c
);
13965 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13966 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13968 && varpool_node::get_create (var
)->offloadable
)
13970 if (TREE_CODE (var
) == INDIRECT_REF
13971 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13972 var
= TREE_OPERAND (var
, 0);
13973 if (TREE_CODE (var
) == COMPONENT_REF
)
13975 var
= get_addr_base_and_unit_offset (var
, &offset
);
13976 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13978 else if (DECL_SIZE (var
)
13979 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13981 tree var2
= DECL_VALUE_EXPR (var
);
13982 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13983 var2
= TREE_OPERAND (var2
, 0);
13984 gcc_assert (DECL_P (var2
));
13987 tree new_var
= lookup_decl (var
, ctx
), x
;
13988 tree type
= TREE_TYPE (new_var
);
13990 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13991 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13994 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13996 new_var
= build2 (MEM_REF
, type
,
13997 build_fold_addr_expr (new_var
),
13998 build_int_cst (build_pointer_type (type
),
14001 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
14003 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
14004 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
14005 new_var
= build2 (MEM_REF
, type
,
14006 build_fold_addr_expr (new_var
),
14007 build_int_cst (build_pointer_type (type
),
14011 is_ref
= omp_privatize_by_reference (var
);
14012 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14014 bool ref_to_array
= false;
14017 type
= TREE_TYPE (type
);
14018 if (TREE_CODE (type
) == ARRAY_TYPE
)
14020 type
= build_pointer_type (type
);
14021 ref_to_array
= true;
14024 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14026 tree decl2
= DECL_VALUE_EXPR (new_var
);
14027 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
14028 decl2
= TREE_OPERAND (decl2
, 0);
14029 gcc_assert (DECL_P (decl2
));
14031 type
= TREE_TYPE (new_var
);
14033 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
14034 x
= fold_convert_loc (clause_loc
, type
, x
);
14035 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
14037 tree bias
= OMP_CLAUSE_SIZE (c
);
14039 bias
= lookup_decl (bias
, ctx
);
14040 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
14041 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
14043 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
14044 TREE_TYPE (x
), x
, bias
);
14047 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14048 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14049 if (is_ref
&& !ref_to_array
)
14051 tree t
= create_tmp_var_raw (type
, get_name (var
));
14052 gimple_add_tmp_var (t
);
14053 TREE_ADDRESSABLE (t
) = 1;
14054 gimple_seq_add_stmt (&new_body
,
14055 gimple_build_assign (t
, x
));
14056 x
= build_fold_addr_expr_loc (clause_loc
, t
);
14058 gimple_seq_add_stmt (&new_body
,
14059 gimple_build_assign (new_var
, x
));
14062 else if (OMP_CLAUSE_CHAIN (c
)
14063 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
14065 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14066 == GOMP_MAP_FIRSTPRIVATE_POINTER
14067 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14068 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
14071 case OMP_CLAUSE_PRIVATE
:
14072 var
= OMP_CLAUSE_DECL (c
);
14073 if (is_variable_sized (var
))
14075 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14076 tree new_var
= lookup_decl (var
, ctx
);
14077 tree pvar
= DECL_VALUE_EXPR (var
);
14078 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
14079 pvar
= TREE_OPERAND (pvar
, 0);
14080 gcc_assert (DECL_P (pvar
));
14081 tree new_pvar
= lookup_decl (pvar
, ctx
);
14082 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14083 tree al
= size_int (DECL_ALIGN (var
));
14084 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
14085 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14086 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
14087 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14088 gimple_seq_add_stmt (&new_body
,
14089 gimple_build_assign (new_pvar
, x
));
14091 else if (omp_privatize_by_reference (var
)
14092 && !is_gimple_omp_oacc (ctx
->stmt
))
14094 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14095 tree new_var
= lookup_decl (var
, ctx
);
14096 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
14097 if (TREE_CONSTANT (x
))
14102 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14103 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
14104 tree al
= size_int (TYPE_ALIGN (rtype
));
14105 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14108 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14109 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14110 gimple_seq_add_stmt (&new_body
,
14111 gimple_build_assign (new_var
, x
));
14116 gimple_seq fork_seq
= NULL
;
14117 gimple_seq join_seq
= NULL
;
14119 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
14121 /* If there are reductions on the offloaded region itself, treat
14122 them as a dummy GANG loop. */
14123 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
14125 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
14127 if (private_marker
)
14128 gimple_call_set_arg (private_marker
, 2, level
);
14130 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
14131 false, NULL
, private_marker
, NULL
, &fork_seq
,
14135 gimple_seq_add_seq (&new_body
, fork_seq
);
14136 gimple_seq_add_seq (&new_body
, tgt_body
);
14137 gimple_seq_add_seq (&new_body
, join_seq
);
14141 new_body
= maybe_catch_exception (new_body
);
14142 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
14144 gimple_omp_set_body (stmt
, new_body
);
14147 bind
= gimple_build_bind (NULL
, NULL
,
14148 tgt_bind
? gimple_bind_block (tgt_bind
)
14150 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
14151 gimple_bind_add_seq (bind
, ilist
);
14152 gimple_bind_add_stmt (bind
, stmt
);
14153 gimple_bind_add_seq (bind
, olist
);
14155 pop_gimplify_context (NULL
);
14159 gimple_bind_add_seq (dep_bind
, dep_ilist
);
14160 gimple_bind_add_stmt (dep_bind
, bind
);
14161 gimple_bind_add_seq (dep_bind
, dep_olist
);
14162 pop_gimplify_context (dep_bind
);
14166 /* Expand code for an OpenMP teams directive. */
14169 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14171 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
14172 push_gimplify_context ();
14174 tree block
= make_node (BLOCK
);
14175 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
14176 gsi_replace (gsi_p
, bind
, true);
14177 gimple_seq bind_body
= NULL
;
14178 gimple_seq dlist
= NULL
;
14179 gimple_seq olist
= NULL
;
14181 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14182 OMP_CLAUSE_NUM_TEAMS
);
14183 tree num_teams_lower
= NULL_TREE
;
14184 if (num_teams
== NULL_TREE
)
14185 num_teams
= build_int_cst (unsigned_type_node
, 0);
14188 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
14189 if (num_teams_lower
)
14191 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
14192 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
14195 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
14196 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
14197 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
14199 if (num_teams_lower
== NULL_TREE
)
14200 num_teams_lower
= num_teams
;
14201 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14202 OMP_CLAUSE_THREAD_LIMIT
);
14203 if (thread_limit
== NULL_TREE
)
14204 thread_limit
= build_int_cst (unsigned_type_node
, 0);
14207 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
14208 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
14209 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
14212 location_t loc
= gimple_location (teams_stmt
);
14213 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
14214 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
14215 tree first
= create_tmp_var (rettype
);
14216 gimple_seq_add_stmt (&bind_body
,
14217 gimple_build_assign (first
, build_one_cst (rettype
)));
14218 tree llabel
= create_artificial_label (loc
);
14219 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
14221 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
14223 gimple_set_location (call
, loc
);
14224 tree temp
= create_tmp_var (rettype
);
14225 gimple_call_set_lhs (call
, temp
);
14226 gimple_seq_add_stmt (&bind_body
, call
);
14228 tree tlabel
= create_artificial_label (loc
);
14229 tree flabel
= create_artificial_label (loc
);
14230 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
14232 gimple_seq_add_stmt (&bind_body
, cond
);
14233 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
14234 gimple_seq_add_stmt (&bind_body
,
14235 gimple_build_assign (first
, build_zero_cst (rettype
)));
14237 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
14238 &bind_body
, &dlist
, ctx
, NULL
);
14239 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
14240 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
14242 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
14244 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
14245 gimple_omp_set_body (teams_stmt
, NULL
);
14246 gimple_seq_add_seq (&bind_body
, olist
);
14247 gimple_seq_add_seq (&bind_body
, dlist
);
14248 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
14249 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
14250 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
14251 gimple_bind_set_body (bind
, bind_body
);
14253 pop_gimplify_context (bind
);
14255 gimple_bind_append_vars (bind
, ctx
->block_vars
);
14256 BLOCK_VARS (block
) = ctx
->block_vars
;
14257 if (BLOCK_VARS (block
))
14258 TREE_USED (block
) = 1;
14261 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14262 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14263 of OMP context, but with make_addressable_vars set. */
14266 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14271 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14272 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14274 && DECL_HAS_VALUE_EXPR_P (t
))
14277 if (make_addressable_vars
14279 && bitmap_bit_p (make_addressable_vars
, DECL_UID (t
)))
14282 /* If a global variable has been privatized, TREE_CONSTANT on
14283 ADDR_EXPR might be wrong. */
14284 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14285 recompute_tree_invariant_for_addr_expr (t
);
14287 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14291 /* Data to be communicated between lower_omp_regimplify_operands and
14292 lower_omp_regimplify_operands_p. */
14294 struct lower_omp_regimplify_operands_data
14300 /* Helper function for lower_omp_regimplify_operands. Find
14301 omp_member_access_dummy_var vars and adjust temporarily their
14302 DECL_VALUE_EXPRs if needed. */
14305 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14308 tree t
= omp_member_access_dummy_var (*tp
);
14311 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14312 lower_omp_regimplify_operands_data
*ldata
14313 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14314 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14317 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14318 ldata
->decls
->safe_push (*tp
);
14319 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14320 SET_DECL_VALUE_EXPR (*tp
, v
);
14323 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14327 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14328 of omp_member_access_dummy_var vars during regimplification. */
14331 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14332 gimple_stmt_iterator
*gsi_p
)
14334 auto_vec
<tree
, 10> decls
;
14337 struct walk_stmt_info wi
;
14338 memset (&wi
, '\0', sizeof (wi
));
14339 struct lower_omp_regimplify_operands_data data
;
14341 data
.decls
= &decls
;
14343 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14345 gimple_regimplify_operands (stmt
, gsi_p
);
14346 while (!decls
.is_empty ())
14348 tree t
= decls
.pop ();
14349 tree v
= decls
.pop ();
14350 SET_DECL_VALUE_EXPR (t
, v
);
14355 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14357 gimple
*stmt
= gsi_stmt (*gsi_p
);
14358 struct walk_stmt_info wi
;
14361 if (gimple_has_location (stmt
))
14362 input_location
= gimple_location (stmt
);
14364 if (make_addressable_vars
)
14365 memset (&wi
, '\0', sizeof (wi
));
14367 /* If we have issued syntax errors, avoid doing any heavy lifting.
14368 Just replace the OMP directives with a NOP to avoid
14369 confusing RTL expansion. */
14370 if (seen_error () && is_gimple_omp (stmt
))
14372 gsi_replace (gsi_p
, gimple_build_nop (), true);
14376 switch (gimple_code (stmt
))
14380 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14381 if ((ctx
|| make_addressable_vars
)
14382 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14383 lower_omp_regimplify_p
,
14384 ctx
? NULL
: &wi
, NULL
)
14385 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14386 lower_omp_regimplify_p
,
14387 ctx
? NULL
: &wi
, NULL
)))
14388 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14392 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14394 case GIMPLE_EH_FILTER
:
14395 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14398 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14399 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14401 case GIMPLE_TRANSACTION
:
14402 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14406 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14408 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14409 oacc_privatization_scan_decl_chain (ctx
, vars
);
14411 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14412 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14414 case GIMPLE_OMP_PARALLEL
:
14415 case GIMPLE_OMP_TASK
:
14416 ctx
= maybe_lookup_ctx (stmt
);
14418 if (ctx
->cancellable
)
14419 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14420 lower_omp_taskreg (gsi_p
, ctx
);
14422 case GIMPLE_OMP_FOR
:
14423 ctx
= maybe_lookup_ctx (stmt
);
14425 if (ctx
->cancellable
)
14426 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14427 lower_omp_for (gsi_p
, ctx
);
14429 case GIMPLE_OMP_SECTIONS
:
14430 ctx
= maybe_lookup_ctx (stmt
);
14432 if (ctx
->cancellable
)
14433 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14434 lower_omp_sections (gsi_p
, ctx
);
14436 case GIMPLE_OMP_SCOPE
:
14437 ctx
= maybe_lookup_ctx (stmt
);
14439 lower_omp_scope (gsi_p
, ctx
);
14441 case GIMPLE_OMP_SINGLE
:
14442 ctx
= maybe_lookup_ctx (stmt
);
14444 lower_omp_single (gsi_p
, ctx
);
14446 case GIMPLE_OMP_MASTER
:
14447 case GIMPLE_OMP_MASKED
:
14448 ctx
= maybe_lookup_ctx (stmt
);
14450 lower_omp_master (gsi_p
, ctx
);
14452 case GIMPLE_OMP_TASKGROUP
:
14453 ctx
= maybe_lookup_ctx (stmt
);
14455 lower_omp_taskgroup (gsi_p
, ctx
);
14457 case GIMPLE_OMP_ORDERED
:
14458 ctx
= maybe_lookup_ctx (stmt
);
14460 lower_omp_ordered (gsi_p
, ctx
);
14462 case GIMPLE_OMP_SCAN
:
14463 ctx
= maybe_lookup_ctx (stmt
);
14465 lower_omp_scan (gsi_p
, ctx
);
14467 case GIMPLE_OMP_CRITICAL
:
14468 ctx
= maybe_lookup_ctx (stmt
);
14470 lower_omp_critical (gsi_p
, ctx
);
14472 case GIMPLE_OMP_ATOMIC_LOAD
:
14473 if ((ctx
|| make_addressable_vars
)
14474 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14475 as_a
<gomp_atomic_load
*> (stmt
)),
14476 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14477 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14479 case GIMPLE_OMP_TARGET
:
14480 ctx
= maybe_lookup_ctx (stmt
);
14482 lower_omp_target (gsi_p
, ctx
);
14484 case GIMPLE_OMP_TEAMS
:
14485 ctx
= maybe_lookup_ctx (stmt
);
14487 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14488 lower_omp_taskreg (gsi_p
, ctx
);
14490 lower_omp_teams (gsi_p
, ctx
);
14494 call_stmt
= as_a
<gcall
*> (stmt
);
14495 fndecl
= gimple_call_fndecl (call_stmt
);
14497 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14498 switch (DECL_FUNCTION_CODE (fndecl
))
14500 case BUILT_IN_GOMP_BARRIER
:
14504 case BUILT_IN_GOMP_CANCEL
:
14505 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14508 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14509 cctx
= cctx
->outer
;
14510 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14511 if (!cctx
->cancellable
)
14513 if (DECL_FUNCTION_CODE (fndecl
)
14514 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14516 stmt
= gimple_build_nop ();
14517 gsi_replace (gsi_p
, stmt
, false);
14521 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14523 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14524 gimple_call_set_fndecl (call_stmt
, fndecl
);
14525 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14528 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14529 gimple_call_set_lhs (call_stmt
, lhs
);
14530 tree fallthru_label
;
14531 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14533 g
= gimple_build_label (fallthru_label
);
14534 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14535 g
= gimple_build_cond (NE_EXPR
, lhs
,
14536 fold_convert (TREE_TYPE (lhs
),
14537 boolean_false_node
),
14538 cctx
->cancel_label
, fallthru_label
);
14539 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14546 case GIMPLE_ASSIGN
:
14547 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14549 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14550 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14551 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14552 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14553 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14554 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14555 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14556 && (gimple_omp_target_kind (up
->stmt
)
14557 == GF_OMP_TARGET_KIND_DATA
)))
14559 else if (!up
->lastprivate_conditional_map
)
14561 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14562 if (TREE_CODE (lhs
) == MEM_REF
14563 && DECL_P (TREE_OPERAND (lhs
, 0))
14564 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14565 0))) == REFERENCE_TYPE
)
14566 lhs
= TREE_OPERAND (lhs
, 0);
14568 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14571 if (up
->combined_into_simd_safelen1
)
14574 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14577 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14578 clauses
= gimple_omp_for_clauses (up
->stmt
);
14580 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14581 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14582 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14583 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14584 OMP_CLAUSE__CONDTEMP_
);
14585 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14586 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14587 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14594 if ((ctx
|| make_addressable_vars
)
14595 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14598 /* Just remove clobbers, this should happen only if we have
14599 "privatized" local addressable variables in SIMD regions,
14600 the clobber isn't needed in that case and gimplifying address
14601 of the ARRAY_REF into a pointer and creating MEM_REF based
14602 clobber would create worse code than we get with the clobber
14604 if (gimple_clobber_p (stmt
))
14606 gsi_replace (gsi_p
, gimple_build_nop (), true);
14609 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14616 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14618 location_t saved_location
= input_location
;
14619 gimple_stmt_iterator gsi
;
14620 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14621 lower_omp_1 (&gsi
, ctx
);
14622 /* During gimplification, we haven't folded statments inside offloading
14623 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14624 if (target_nesting_level
|| taskreg_nesting_level
)
14625 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14627 input_location
= saved_location
;
14630 /* Main entry point. */
14632 static unsigned int
14633 execute_lower_omp (void)
14639 /* This pass always runs, to provide PROP_gimple_lomp.
14640 But often, there is nothing to do. */
14641 if (flag_openacc
== 0 && flag_openmp
== 0
14642 && flag_openmp_simd
== 0)
14645 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14646 delete_omp_context
);
14648 body
= gimple_body (current_function_decl
);
14650 scan_omp (&body
, NULL
);
14651 gcc_assert (taskreg_nesting_level
== 0);
14652 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14653 finish_taskreg_scan (ctx
);
14654 taskreg_contexts
.release ();
14656 if (all_contexts
->root
)
14658 if (make_addressable_vars
)
14659 push_gimplify_context ();
14660 lower_omp (&body
, NULL
);
14661 if (make_addressable_vars
)
14662 pop_gimplify_context (NULL
);
14667 splay_tree_delete (all_contexts
);
14668 all_contexts
= NULL
;
14670 BITMAP_FREE (make_addressable_vars
);
14671 BITMAP_FREE (global_nonaddressable_vars
);
14673 /* If current function is a method, remove artificial dummy VAR_DECL created
14674 for non-static data member privatization, they aren't needed for
14675 debuginfo nor anything else, have been already replaced everywhere in the
14676 IL and cause problems with LTO. */
14677 if (DECL_ARGUMENTS (current_function_decl
)
14678 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14679 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14681 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14683 for (auto task_stmt
: task_cpyfns
)
14684 finalize_task_copyfn (task_stmt
);
14685 task_cpyfns
.release ();
14691 const pass_data pass_data_lower_omp
=
14693 GIMPLE_PASS
, /* type */
14694 "omplower", /* name */
14695 OPTGROUP_OMP
, /* optinfo_flags */
14696 TV_NONE
, /* tv_id */
14697 PROP_gimple_any
, /* properties_required */
14698 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14699 0, /* properties_destroyed */
14700 0, /* todo_flags_start */
14701 0, /* todo_flags_finish */
14704 class pass_lower_omp
: public gimple_opt_pass
14707 pass_lower_omp (gcc::context
*ctxt
)
14708 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14711 /* opt_pass methods: */
14712 unsigned int execute (function
*) final override
14714 return execute_lower_omp ();
14717 }; // class pass_lower_omp
14719 } // anon namespace
14722 make_pass_lower_omp (gcc::context
*ctxt
)
14724 return new pass_lower_omp (ctxt
);
14727 /* The following is a utility to diagnose structured block violations.
14728 It is not part of the "omplower" pass, as that's invoked too late. It
14729 should be invoked by the respective front ends after gimplification. */
14731 static splay_tree all_labels
;
14733 /* Check for mismatched contexts and generate an error if needed. Return
14734 true if an error is detected. */
14737 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14738 gimple
*branch_ctx
, gimple
*label_ctx
)
14740 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14741 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14743 if (label_ctx
== branch_ctx
)
14746 const char* kind
= NULL
;
14750 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14751 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14753 gcc_checking_assert (kind
== NULL
);
14759 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14763 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14764 so we could traverse it and issue a correct "exit" or "enter" error
14765 message upon a structured block violation.
14767 We built the context by building a list with tree_cons'ing, but there is
14768 no easy counterpart in gimple tuples. It seems like far too much work
14769 for issuing exit/enter error messages. If someone really misses the
14770 distinct error message... patches welcome. */
14773 /* Try to avoid confusing the user by producing and error message
14774 with correct "exit" or "enter" verbiage. We prefer "exit"
14775 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14776 if (branch_ctx
== NULL
)
14782 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14787 label_ctx
= TREE_CHAIN (label_ctx
);
14792 error ("invalid exit from %s structured block", kind
);
14794 error ("invalid entry to %s structured block", kind
);
14797 /* If it's obvious we have an invalid entry, be specific about the error. */
14798 if (branch_ctx
== NULL
)
14799 error ("invalid entry to %s structured block", kind
);
14802 /* Otherwise, be vague and lazy, but efficient. */
14803 error ("invalid branch to/from %s structured block", kind
);
14806 gsi_replace (gsi_p
, gimple_build_nop (), false);
14810 /* Pass 1: Create a minimal tree of structured blocks, and record
14811 where each label is found. */
14814 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14815 struct walk_stmt_info
*wi
)
14817 gimple
*context
= (gimple
*) wi
->info
;
14818 gimple
*inner_context
;
14819 gimple
*stmt
= gsi_stmt (*gsi_p
);
14821 *handled_ops_p
= true;
14823 switch (gimple_code (stmt
))
14827 case GIMPLE_OMP_PARALLEL
:
14828 case GIMPLE_OMP_TASK
:
14829 case GIMPLE_OMP_SCOPE
:
14830 case GIMPLE_OMP_SECTIONS
:
14831 case GIMPLE_OMP_SINGLE
:
14832 case GIMPLE_OMP_SECTION
:
14833 case GIMPLE_OMP_MASTER
:
14834 case GIMPLE_OMP_MASKED
:
14835 case GIMPLE_OMP_ORDERED
:
14836 case GIMPLE_OMP_SCAN
:
14837 case GIMPLE_OMP_CRITICAL
:
14838 case GIMPLE_OMP_TARGET
:
14839 case GIMPLE_OMP_TEAMS
:
14840 case GIMPLE_OMP_TASKGROUP
:
14841 /* The minimal context here is just the current OMP construct. */
14842 inner_context
= stmt
;
14843 wi
->info
= inner_context
;
14844 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14845 wi
->info
= context
;
14848 case GIMPLE_OMP_FOR
:
14849 inner_context
= stmt
;
14850 wi
->info
= inner_context
;
14851 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14853 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14854 diagnose_sb_1
, NULL
, wi
);
14855 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14856 wi
->info
= context
;
14860 splay_tree_insert (all_labels
,
14861 (splay_tree_key
) gimple_label_label (
14862 as_a
<glabel
*> (stmt
)),
14863 (splay_tree_value
) context
);
14873 /* Pass 2: Check each branch and see if its context differs from that of
14874 the destination label's context. */
14877 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14878 struct walk_stmt_info
*wi
)
14880 gimple
*context
= (gimple
*) wi
->info
;
14882 gimple
*stmt
= gsi_stmt (*gsi_p
);
14884 *handled_ops_p
= true;
14886 switch (gimple_code (stmt
))
14890 case GIMPLE_OMP_PARALLEL
:
14891 case GIMPLE_OMP_TASK
:
14892 case GIMPLE_OMP_SCOPE
:
14893 case GIMPLE_OMP_SECTIONS
:
14894 case GIMPLE_OMP_SINGLE
:
14895 case GIMPLE_OMP_SECTION
:
14896 case GIMPLE_OMP_MASTER
:
14897 case GIMPLE_OMP_MASKED
:
14898 case GIMPLE_OMP_ORDERED
:
14899 case GIMPLE_OMP_SCAN
:
14900 case GIMPLE_OMP_CRITICAL
:
14901 case GIMPLE_OMP_TARGET
:
14902 case GIMPLE_OMP_TEAMS
:
14903 case GIMPLE_OMP_TASKGROUP
:
14905 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14906 wi
->info
= context
;
14909 case GIMPLE_OMP_FOR
:
14911 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14913 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14914 diagnose_sb_2
, NULL
, wi
);
14915 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14916 wi
->info
= context
;
14921 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14922 tree lab
= gimple_cond_true_label (cond_stmt
);
14925 n
= splay_tree_lookup (all_labels
,
14926 (splay_tree_key
) lab
);
14927 diagnose_sb_0 (gsi_p
, context
,
14928 n
? (gimple
*) n
->value
: NULL
);
14930 lab
= gimple_cond_false_label (cond_stmt
);
14933 n
= splay_tree_lookup (all_labels
,
14934 (splay_tree_key
) lab
);
14935 diagnose_sb_0 (gsi_p
, context
,
14936 n
? (gimple
*) n
->value
: NULL
);
14943 tree lab
= gimple_goto_dest (stmt
);
14944 if (TREE_CODE (lab
) != LABEL_DECL
)
14947 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14948 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14952 case GIMPLE_SWITCH
:
14954 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14956 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14958 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14959 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14960 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14966 case GIMPLE_RETURN
:
14967 diagnose_sb_0 (gsi_p
, context
, NULL
);
14977 static unsigned int
14978 diagnose_omp_structured_block_errors (void)
14980 struct walk_stmt_info wi
;
14981 gimple_seq body
= gimple_body (current_function_decl
);
14983 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14985 memset (&wi
, 0, sizeof (wi
));
14986 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14988 memset (&wi
, 0, sizeof (wi
));
14989 wi
.want_locations
= true;
14990 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14992 gimple_set_body (current_function_decl
, body
);
14994 splay_tree_delete (all_labels
);
15002 const pass_data pass_data_diagnose_omp_blocks
=
15004 GIMPLE_PASS
, /* type */
15005 "*diagnose_omp_blocks", /* name */
15006 OPTGROUP_OMP
, /* optinfo_flags */
15007 TV_NONE
, /* tv_id */
15008 PROP_gimple_any
, /* properties_required */
15009 0, /* properties_provided */
15010 0, /* properties_destroyed */
15011 0, /* todo_flags_start */
15012 0, /* todo_flags_finish */
15015 class pass_diagnose_omp_blocks
: public gimple_opt_pass
15018 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15019 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
15022 /* opt_pass methods: */
15023 bool gate (function
*) final override
15025 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
15027 unsigned int execute (function
*) final override
15029 return diagnose_omp_structured_block_errors ();
15032 }; // class pass_diagnose_omp_blocks
15034 } // anon namespace
15037 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15039 return new pass_diagnose_omp_blocks (ctxt
);
15043 #include "gt-omp-low.h"