1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap task_shared_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
195 static void scan_omp (gimple_seq
*, omp_context
*);
196 static tree
scan_omp_1_op (tree
*, int *, void *);
198 #define WALK_SUBSTMTS \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212 is_oacc_parallel_or_serial (omp_context
*ctx
)
214 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
215 return ((outer_type
== GIMPLE_OMP_TARGET
)
216 && ((gimple_omp_target_kind (ctx
->stmt
)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
218 || (gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
226 is_oacc_kernels (omp_context
*ctx
)
228 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
229 return ((outer_type
== GIMPLE_OMP_TARGET
)
230 && (gimple_omp_target_kind (ctx
->stmt
)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
237 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
239 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
240 return ((outer_type
== GIMPLE_OMP_TARGET
)
241 && ((gimple_omp_target_kind (ctx
->stmt
)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
243 || (gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
251 is_omp_target (gimple
*stmt
)
253 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
255 int kind
= gimple_omp_target_kind (stmt
);
256 return (kind
== GF_OMP_TARGET_KIND_REGION
257 || kind
== GF_OMP_TARGET_KIND_DATA
258 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
269 omp_member_access_dummy_var (tree decl
)
272 || !DECL_ARTIFICIAL (decl
)
273 || !DECL_IGNORED_P (decl
)
274 || !DECL_HAS_VALUE_EXPR_P (decl
)
275 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
278 tree v
= DECL_VALUE_EXPR (decl
);
279 if (TREE_CODE (v
) != COMPONENT_REF
)
283 switch (TREE_CODE (v
))
289 case POINTER_PLUS_EXPR
:
290 v
= TREE_OPERAND (v
, 0);
293 if (DECL_CONTEXT (v
) == current_function_decl
294 && DECL_ARTIFICIAL (v
)
295 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
303 /* Helper for unshare_and_remap, called through walk_tree. */
306 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
308 tree
*pair
= (tree
*) data
;
311 *tp
= unshare_expr (pair
[1]);
314 else if (IS_TYPE_OR_DECL_P (*tp
))
319 /* Return unshare_expr (X) with all occurrences of FROM
323 unshare_and_remap (tree x
, tree from
, tree to
)
325 tree pair
[2] = { from
, to
};
326 x
= unshare_expr (x
);
327 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
334 scan_omp_op (tree
*tp
, omp_context
*ctx
)
336 struct walk_stmt_info wi
;
338 memset (&wi
, 0, sizeof (wi
));
340 wi
.want_locations
= true;
342 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
345 static void lower_omp (gimple_seq
*, omp_context
*);
346 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
347 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 /* Return true if CTX is for an omp parallel. */
352 is_parallel_ctx (omp_context
*ctx
)
354 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
358 /* Return true if CTX is for an omp task. */
361 is_task_ctx (omp_context
*ctx
)
363 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
367 /* Return true if CTX is for an omp taskloop. */
370 is_taskloop_ctx (omp_context
*ctx
)
372 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
377 /* Return true if CTX is for a host omp teams. */
380 is_host_teams_ctx (omp_context
*ctx
)
382 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
391 is_taskreg_ctx (omp_context
*ctx
)
393 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
396 /* Return true if EXPR is variable sized. */
399 is_variable_sized (const_tree expr
)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
409 lookup_decl (tree var
, omp_context
*ctx
)
411 tree
*n
= ctx
->cb
.decl_map
->get (var
);
416 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
418 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
419 return n
? *n
: NULL_TREE
;
423 lookup_field (tree var
, omp_context
*ctx
)
426 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
427 return (tree
) n
->value
;
431 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
434 n
= splay_tree_lookup (ctx
->sfield_map
435 ? ctx
->sfield_map
: ctx
->field_map
, key
);
436 return (tree
) n
->value
;
440 lookup_sfield (tree var
, omp_context
*ctx
)
442 return lookup_sfield ((splay_tree_key
) var
, ctx
);
446 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
449 n
= splay_tree_lookup (ctx
->field_map
, key
);
450 return n
? (tree
) n
->value
: NULL_TREE
;
454 maybe_lookup_field (tree var
, omp_context
*ctx
)
456 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
463 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
466 || TYPE_ATOMIC (TREE_TYPE (decl
)))
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
489 /* Do not use copy-in/copy-out for variables that have their
491 if (is_global_var (decl
))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl
))
501 if (!global_nonaddressable_vars
)
502 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
503 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars
,
510 else if (TREE_ADDRESSABLE (decl
))
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
515 if (TREE_READONLY (decl
)
516 || ((TREE_CODE (decl
) == RESULT_DECL
517 || TREE_CODE (decl
) == PARM_DECL
)
518 && DECL_BY_REFERENCE (decl
)))
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx
->is_nested
)
530 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
531 if ((is_taskreg_ctx (up
)
532 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up
->stmt
)))
534 && maybe_lookup_decl (decl
, up
))
541 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
543 for (c
= gimple_omp_target_clauses (up
->stmt
);
544 c
; c
= OMP_CLAUSE_CHAIN (c
))
545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c
) == decl
)
550 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
551 c
; c
= OMP_CLAUSE_CHAIN (c
))
552 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c
) == decl
)
557 goto maybe_mark_addressable_and_ret
;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx
))
567 maybe_mark_addressable_and_ret
:
568 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
569 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
574 if (!task_shared_vars
)
575 task_shared_vars
= BITMAP_ALLOC (NULL
);
576 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
577 TREE_ADDRESSABLE (outer
) = 1;
586 /* Construct a new automatic decl similar to VAR. */
589 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
591 tree copy
= copy_var_decl (var
, name
, type
);
593 DECL_CONTEXT (copy
) = current_function_decl
;
594 DECL_CHAIN (copy
) = ctx
->block_vars
;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
599 if (TREE_ADDRESSABLE (var
)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
604 TREE_ADDRESSABLE (copy
) = 0;
605 ctx
->block_vars
= copy
;
611 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
613 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
618 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
621 omp_build_component_ref (tree obj
, tree field
)
623 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
624 if (TREE_THIS_VOLATILE (field
))
625 TREE_THIS_VOLATILE (ret
) |= 1;
626 if (TREE_READONLY (field
))
627 TREE_READONLY (ret
) |= 1;
631 /* Build tree nodes to access the field for VAR on the receiver side. */
634 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
636 tree x
, field
= lookup_field (var
, ctx
);
638 /* If the receiver record type was remapped in the child function,
639 remap the field into the new record type. */
640 x
= maybe_lookup_field (field
, ctx
);
644 x
= build_simple_mem_ref (ctx
->receiver_decl
);
645 TREE_THIS_NOTRAP (x
) = 1;
646 x
= omp_build_component_ref (x
, field
);
649 x
= build_simple_mem_ref (x
);
650 TREE_THIS_NOTRAP (x
) = 1;
656 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
657 of a parallel, this is a component reference; for workshare constructs
658 this is some variable. */
661 build_outer_var_ref (tree var
, omp_context
*ctx
,
662 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
665 omp_context
*outer
= ctx
->outer
;
666 for (; outer
; outer
= outer
->outer
)
668 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
670 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
671 && !maybe_lookup_decl (var
, outer
))
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
678 else if (is_variable_sized (var
))
680 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
681 x
= build_outer_var_ref (x
, ctx
, code
);
682 x
= build_simple_mem_ref (x
);
684 else if (is_taskreg_ctx (ctx
))
686 bool by_ref
= use_pointer_for_field (var
, NULL
);
687 x
= build_receiver_ref (var
, by_ref
, ctx
);
689 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
690 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
692 || (code
== OMP_CLAUSE_PRIVATE
693 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
694 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
695 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
697 /* #pragma omp simd isn't a worksharing construct, and can reference
698 even private vars in its linear etc. clauses.
699 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
700 to private vars in all worksharing constructs. */
702 if (outer
&& is_taskreg_ctx (outer
))
703 x
= lookup_decl (var
, outer
);
705 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
709 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
713 = splay_tree_lookup (outer
->field_map
,
714 (splay_tree_key
) &DECL_UID (var
));
717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
720 x
= lookup_decl (var
, outer
);
724 tree field
= (tree
) n
->value
;
725 /* If the receiver record type was remapped in the child function,
726 remap the field into the new record type. */
727 x
= maybe_lookup_field (field
, outer
);
731 x
= build_simple_mem_ref (outer
->receiver_decl
);
732 x
= omp_build_component_ref (x
, field
);
733 if (use_pointer_for_field (var
, outer
))
734 x
= build_simple_mem_ref (x
);
738 x
= lookup_decl (var
, outer
);
739 else if (omp_privatize_by_reference (var
))
740 /* This can happen with orphaned constructs. If var is reference, it is
741 possible it is shared and as such valid. */
743 else if (omp_member_access_dummy_var (var
))
750 tree t
= omp_member_access_dummy_var (var
);
753 x
= DECL_VALUE_EXPR (var
);
754 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
756 x
= unshare_and_remap (x
, t
, o
);
758 x
= unshare_expr (x
);
762 if (omp_privatize_by_reference (var
))
763 x
= build_simple_mem_ref (x
);
768 /* Build tree nodes to access the field for VAR on the sender side. */
771 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
773 tree field
= lookup_sfield (key
, ctx
);
774 return omp_build_component_ref (ctx
->sender_decl
, field
);
778 build_sender_ref (tree var
, omp_context
*ctx
)
780 return build_sender_ref ((splay_tree_key
) var
, ctx
);
783 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
784 BASE_POINTERS_RESTRICT, declare the field with restrict. */
787 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
789 tree field
, type
, sfield
= NULL_TREE
;
790 splay_tree_key key
= (splay_tree_key
) var
;
792 if ((mask
& 16) != 0)
794 key
= (splay_tree_key
) &DECL_NAME (var
);
795 gcc_checking_assert (key
!= (splay_tree_key
) var
);
799 key
= (splay_tree_key
) &DECL_UID (var
);
800 gcc_checking_assert (key
!= (splay_tree_key
) var
);
802 gcc_assert ((mask
& 1) == 0
803 || !splay_tree_lookup (ctx
->field_map
, key
));
804 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
805 || !splay_tree_lookup (ctx
->sfield_map
, key
));
806 gcc_assert ((mask
& 3) == 3
807 || !is_gimple_omp_oacc (ctx
->stmt
));
809 type
= TREE_TYPE (var
);
810 if ((mask
& 16) != 0)
811 type
= lang_hooks
.decls
.omp_array_data (var
, true);
813 /* Prevent redeclaring the var in the split-off function with a restrict
814 pointer type. Note that we only clear type itself, restrict qualifiers in
815 the pointed-to type will be ignored by points-to analysis. */
816 if (POINTER_TYPE_P (type
)
817 && TYPE_RESTRICT (type
))
818 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
822 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
823 type
= build_pointer_type (build_pointer_type (type
));
826 type
= build_pointer_type (type
);
827 else if ((mask
& (32 | 3)) == 1
828 && omp_privatize_by_reference (var
))
829 type
= TREE_TYPE (type
);
831 field
= build_decl (DECL_SOURCE_LOCATION (var
),
832 FIELD_DECL
, DECL_NAME (var
), type
);
834 /* Remember what variable this field was created for. This does have a
835 side effect of making dwarf2out ignore this member, so for helpful
836 debugging we clear it later in delete_omp_context. */
837 DECL_ABSTRACT_ORIGIN (field
) = var
;
838 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
840 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
841 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
842 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
845 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
849 insert_field_into_struct (ctx
->record_type
, field
);
850 if (ctx
->srecord_type
)
852 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
853 FIELD_DECL
, DECL_NAME (var
), type
);
854 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
855 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
856 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
857 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
858 insert_field_into_struct (ctx
->srecord_type
, sfield
);
863 if (ctx
->srecord_type
== NULL_TREE
)
867 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
868 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
869 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
871 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
872 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
873 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
874 insert_field_into_struct (ctx
->srecord_type
, sfield
);
875 splay_tree_insert (ctx
->sfield_map
,
876 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
877 (splay_tree_value
) sfield
);
881 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
882 : ctx
->srecord_type
, field
);
886 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
887 if ((mask
& 2) && ctx
->sfield_map
)
888 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
892 install_var_local (tree var
, omp_context
*ctx
)
894 tree new_var
= omp_copy_decl_1 (var
, ctx
);
895 insert_decl_map (&ctx
->cb
, var
, new_var
);
899 /* Adjust the replacement for DECL in CTX for the new context. This means
900 copying the DECL_VALUE_EXPR, and fixing up the type. */
903 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
907 new_decl
= lookup_decl (decl
, ctx
);
909 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
911 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
912 && DECL_HAS_VALUE_EXPR_P (decl
))
914 tree ve
= DECL_VALUE_EXPR (decl
);
915 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
916 SET_DECL_VALUE_EXPR (new_decl
, ve
);
917 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
920 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
922 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
923 if (size
== error_mark_node
)
924 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
925 DECL_SIZE (new_decl
) = size
;
927 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
928 if (size
== error_mark_node
)
929 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
930 DECL_SIZE_UNIT (new_decl
) = size
;
934 /* The callback for remap_decl. Search all containing contexts for a
935 mapping of the variable; this avoids having to duplicate the splay
936 tree ahead of time. We know a mapping doesn't already exist in the
937 given context. Create new mappings to implement default semantics. */
940 omp_copy_decl (tree var
, copy_body_data
*cb
)
942 omp_context
*ctx
= (omp_context
*) cb
;
945 if (TREE_CODE (var
) == LABEL_DECL
)
947 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
949 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
950 DECL_CONTEXT (new_var
) = current_function_decl
;
951 insert_decl_map (&ctx
->cb
, var
, new_var
);
955 while (!is_taskreg_ctx (ctx
))
960 new_var
= maybe_lookup_decl (var
, ctx
);
965 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
968 return error_mark_node
;
971 /* Create a new context, with OUTER_CTX being the surrounding context. */
974 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
976 omp_context
*ctx
= XCNEW (omp_context
);
978 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
979 (splay_tree_value
) ctx
);
984 ctx
->outer
= outer_ctx
;
985 ctx
->cb
= outer_ctx
->cb
;
986 ctx
->cb
.block
= NULL
;
987 ctx
->depth
= outer_ctx
->depth
+ 1;
991 ctx
->cb
.src_fn
= current_function_decl
;
992 ctx
->cb
.dst_fn
= current_function_decl
;
993 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
994 gcc_checking_assert (ctx
->cb
.src_node
);
995 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
996 ctx
->cb
.src_cfun
= cfun
;
997 ctx
->cb
.copy_decl
= omp_copy_decl
;
998 ctx
->cb
.eh_lp_nr
= 0;
999 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
1000 ctx
->cb
.adjust_array_error_bounds
= true;
1001 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1005 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1010 static gimple_seq
maybe_catch_exception (gimple_seq
);
1012 /* Finalize task copyfn. */
1015 finalize_task_copyfn (gomp_task
*task_stmt
)
1017 struct function
*child_cfun
;
1019 gimple_seq seq
= NULL
, new_seq
;
1022 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1023 if (child_fn
== NULL_TREE
)
1026 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1027 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1029 push_cfun (child_cfun
);
1030 bind
= gimplify_body (child_fn
, false);
1031 gimple_seq_add_stmt (&seq
, bind
);
1032 new_seq
= maybe_catch_exception (seq
);
1035 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1037 gimple_seq_add_stmt (&seq
, bind
);
1039 gimple_set_body (child_fn
, seq
);
1042 /* Inform the callgraph about the new function. */
1043 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1044 node
->parallelized_function
= 1;
1045 cgraph_node::add_new_function (child_fn
, false);
1048 /* Destroy a omp_context data structures. Called through the splay tree
1049 value delete callback. */
1052 delete_omp_context (splay_tree_value value
)
1054 omp_context
*ctx
= (omp_context
*) value
;
1056 delete ctx
->cb
.decl_map
;
1059 splay_tree_delete (ctx
->field_map
);
1060 if (ctx
->sfield_map
)
1061 splay_tree_delete (ctx
->sfield_map
);
1063 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1064 it produces corrupt debug information. */
1065 if (ctx
->record_type
)
1068 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1069 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1071 if (ctx
->srecord_type
)
1074 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1075 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1078 if (is_task_ctx (ctx
))
1079 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1081 if (ctx
->task_reduction_map
)
1083 ctx
->task_reductions
.release ();
1084 delete ctx
->task_reduction_map
;
1087 delete ctx
->lastprivate_conditional_map
;
1088 delete ctx
->allocate_map
;
1093 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1097 fixup_child_record_type (omp_context
*ctx
)
1099 tree f
, type
= ctx
->record_type
;
1101 if (!ctx
->receiver_decl
)
1103 /* ??? It isn't sufficient to just call remap_type here, because
1104 variably_modified_type_p doesn't work the way we expect for
1105 record types. Testing each field for whether it needs remapping
1106 and creating a new record by hand works, however. */
1107 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1108 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1112 tree name
, new_fields
= NULL
;
1114 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1115 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1116 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1117 TYPE_DECL
, name
, type
);
1118 TYPE_NAME (type
) = name
;
1120 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1122 tree new_f
= copy_node (f
);
1123 DECL_CONTEXT (new_f
) = type
;
1124 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1125 DECL_CHAIN (new_f
) = new_fields
;
1126 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1127 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1129 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1133 /* Arrange to be able to look up the receiver field
1134 given the sender field. */
1135 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1136 (splay_tree_value
) new_f
);
1138 TYPE_FIELDS (type
) = nreverse (new_fields
);
1142 /* In a target region we never modify any of the pointers in *.omp_data_i,
1143 so attempt to help the optimizers. */
1144 if (is_gimple_omp_offloaded (ctx
->stmt
))
1145 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1147 TREE_TYPE (ctx
->receiver_decl
)
1148 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1151 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1152 specified by CLAUSES. */
1155 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1158 bool scan_array_reductions
= false;
1160 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1161 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1162 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1163 /* omp_default_mem_alloc is 1 */
1164 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1165 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1167 if (ctx
->allocate_map
== NULL
)
1168 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1169 tree val
= integer_zero_node
;
1170 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1171 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1172 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1173 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1174 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1177 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1181 switch (OMP_CLAUSE_CODE (c
))
1183 case OMP_CLAUSE_PRIVATE
:
1184 decl
= OMP_CLAUSE_DECL (c
);
1185 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1187 else if (!is_variable_sized (decl
))
1188 install_var_local (decl
, ctx
);
1191 case OMP_CLAUSE_SHARED
:
1192 decl
= OMP_CLAUSE_DECL (c
);
1193 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1194 ctx
->allocate_map
->remove (decl
);
1195 /* Ignore shared directives in teams construct inside of
1196 target construct. */
1197 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1198 && !is_host_teams_ctx (ctx
))
1200 /* Global variables don't need to be copied,
1201 the receiver side will use them directly. */
1202 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1203 if (is_global_var (odecl
))
1205 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1208 gcc_assert (is_taskreg_ctx (ctx
));
1209 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1210 || !is_variable_sized (decl
));
1211 /* Global variables don't need to be copied,
1212 the receiver side will use them directly. */
1213 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1215 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1217 use_pointer_for_field (decl
, ctx
);
1220 by_ref
= use_pointer_for_field (decl
, NULL
);
1221 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1222 || TREE_ADDRESSABLE (decl
)
1224 || omp_privatize_by_reference (decl
))
1226 by_ref
= use_pointer_for_field (decl
, ctx
);
1227 install_var_field (decl
, by_ref
, 3, ctx
);
1228 install_var_local (decl
, ctx
);
1231 /* We don't need to copy const scalar vars back. */
1232 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1235 case OMP_CLAUSE_REDUCTION
:
1236 /* Collect 'reduction' clauses on OpenACC compute construct. */
1237 if (is_gimple_omp_oacc (ctx
->stmt
)
1238 && is_gimple_omp_offloaded (ctx
->stmt
))
1240 /* No 'reduction' clauses on OpenACC 'kernels'. */
1241 gcc_checking_assert (!is_oacc_kernels (ctx
));
1242 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1243 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1245 ctx
->local_reduction_clauses
1246 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1250 case OMP_CLAUSE_IN_REDUCTION
:
1251 decl
= OMP_CLAUSE_DECL (c
);
1252 if (ctx
->allocate_map
1253 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1254 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1255 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1256 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1257 || is_task_ctx (ctx
)))
1260 if (ctx
->allocate_map
->get (decl
))
1261 ctx
->allocate_map
->remove (decl
);
1263 if (TREE_CODE (decl
) == MEM_REF
)
1265 tree t
= TREE_OPERAND (decl
, 0);
1266 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1267 t
= TREE_OPERAND (t
, 0);
1268 if (TREE_CODE (t
) == INDIRECT_REF
1269 || TREE_CODE (t
) == ADDR_EXPR
)
1270 t
= TREE_OPERAND (t
, 0);
1271 if (is_omp_target (ctx
->stmt
))
1273 if (is_variable_sized (t
))
1275 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1276 t
= DECL_VALUE_EXPR (t
);
1277 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1278 t
= TREE_OPERAND (t
, 0);
1279 gcc_assert (DECL_P (t
));
1283 scan_omp_op (&at
, ctx
->outer
);
1284 tree nt
= omp_copy_decl_1 (at
, ctx
);
1285 splay_tree_insert (ctx
->field_map
,
1286 (splay_tree_key
) &DECL_CONTEXT (t
),
1287 (splay_tree_value
) nt
);
1289 splay_tree_insert (ctx
->field_map
,
1290 (splay_tree_key
) &DECL_CONTEXT (at
),
1291 (splay_tree_value
) nt
);
1294 install_var_local (t
, ctx
);
1295 if (is_taskreg_ctx (ctx
)
1296 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1297 || (is_task_ctx (ctx
)
1298 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1299 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1300 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1301 == POINTER_TYPE
)))))
1302 && !is_variable_sized (t
)
1303 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1304 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1305 && !is_task_ctx (ctx
))))
1307 by_ref
= use_pointer_for_field (t
, NULL
);
1308 if (is_task_ctx (ctx
)
1309 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1310 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1312 install_var_field (t
, false, 1, ctx
);
1313 install_var_field (t
, by_ref
, 2, ctx
);
1316 install_var_field (t
, by_ref
, 3, ctx
);
1320 if (is_omp_target (ctx
->stmt
))
1324 scan_omp_op (&at
, ctx
->outer
);
1325 tree nt
= omp_copy_decl_1 (at
, ctx
);
1326 splay_tree_insert (ctx
->field_map
,
1327 (splay_tree_key
) &DECL_CONTEXT (decl
),
1328 (splay_tree_value
) nt
);
1330 splay_tree_insert (ctx
->field_map
,
1331 (splay_tree_key
) &DECL_CONTEXT (at
),
1332 (splay_tree_value
) nt
);
1335 if (is_task_ctx (ctx
)
1336 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1337 && OMP_CLAUSE_REDUCTION_TASK (c
)
1338 && is_parallel_ctx (ctx
)))
1340 /* Global variables don't need to be copied,
1341 the receiver side will use them directly. */
1342 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1344 by_ref
= use_pointer_for_field (decl
, ctx
);
1345 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1346 install_var_field (decl
, by_ref
, 3, ctx
);
1348 install_var_local (decl
, ctx
);
1351 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1352 && OMP_CLAUSE_REDUCTION_TASK (c
))
1354 install_var_local (decl
, ctx
);
1359 case OMP_CLAUSE_LASTPRIVATE
:
1360 /* Let the corresponding firstprivate clause create
1362 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1366 case OMP_CLAUSE_FIRSTPRIVATE
:
1367 case OMP_CLAUSE_LINEAR
:
1368 decl
= OMP_CLAUSE_DECL (c
);
1370 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1372 && is_gimple_omp_offloaded (ctx
->stmt
))
1374 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1376 by_ref
= !omp_privatize_by_reference (decl
);
1377 install_var_field (decl
, by_ref
, 3, ctx
);
1379 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1380 install_var_field (decl
, true, 3, ctx
);
1382 install_var_field (decl
, false, 3, ctx
);
1384 if (is_variable_sized (decl
))
1386 if (is_task_ctx (ctx
))
1388 if (ctx
->allocate_map
1389 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1392 if (ctx
->allocate_map
->get (decl
))
1393 ctx
->allocate_map
->remove (decl
);
1395 install_var_field (decl
, false, 1, ctx
);
1399 else if (is_taskreg_ctx (ctx
))
1402 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1403 by_ref
= use_pointer_for_field (decl
, NULL
);
1405 if (is_task_ctx (ctx
)
1406 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1408 if (ctx
->allocate_map
1409 && ctx
->allocate_map
->get (decl
))
1410 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1412 install_var_field (decl
, false, 1, ctx
);
1414 install_var_field (decl
, by_ref
, 2, ctx
);
1417 install_var_field (decl
, by_ref
, 3, ctx
);
1419 install_var_local (decl
, ctx
);
1422 case OMP_CLAUSE_USE_DEVICE_PTR
:
1423 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1424 decl
= OMP_CLAUSE_DECL (c
);
1426 /* Fortran array descriptors. */
1427 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1428 install_var_field (decl
, false, 19, ctx
);
1429 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1430 && !omp_privatize_by_reference (decl
)
1431 && !omp_is_allocatable_or_ptr (decl
))
1432 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1433 install_var_field (decl
, true, 11, ctx
);
1435 install_var_field (decl
, false, 11, ctx
);
1436 if (DECL_SIZE (decl
)
1437 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1439 tree decl2
= DECL_VALUE_EXPR (decl
);
1440 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1441 decl2
= TREE_OPERAND (decl2
, 0);
1442 gcc_assert (DECL_P (decl2
));
1443 install_var_local (decl2
, ctx
);
1445 install_var_local (decl
, ctx
);
1448 case OMP_CLAUSE_IS_DEVICE_PTR
:
1449 decl
= OMP_CLAUSE_DECL (c
);
1452 case OMP_CLAUSE__LOOPTEMP_
:
1453 case OMP_CLAUSE__REDUCTEMP_
:
1454 gcc_assert (is_taskreg_ctx (ctx
));
1455 decl
= OMP_CLAUSE_DECL (c
);
1456 install_var_field (decl
, false, 3, ctx
);
1457 install_var_local (decl
, ctx
);
1460 case OMP_CLAUSE_COPYPRIVATE
:
1461 case OMP_CLAUSE_COPYIN
:
1462 decl
= OMP_CLAUSE_DECL (c
);
1463 by_ref
= use_pointer_for_field (decl
, NULL
);
1464 install_var_field (decl
, by_ref
, 3, ctx
);
1467 case OMP_CLAUSE_FINAL
:
1469 case OMP_CLAUSE_NUM_THREADS
:
1470 case OMP_CLAUSE_NUM_TEAMS
:
1471 case OMP_CLAUSE_THREAD_LIMIT
:
1472 case OMP_CLAUSE_DEVICE
:
1473 case OMP_CLAUSE_SCHEDULE
:
1474 case OMP_CLAUSE_DIST_SCHEDULE
:
1475 case OMP_CLAUSE_DEPEND
:
1476 case OMP_CLAUSE_PRIORITY
:
1477 case OMP_CLAUSE_GRAINSIZE
:
1478 case OMP_CLAUSE_NUM_TASKS
:
1479 case OMP_CLAUSE_NUM_GANGS
:
1480 case OMP_CLAUSE_NUM_WORKERS
:
1481 case OMP_CLAUSE_VECTOR_LENGTH
:
1482 case OMP_CLAUSE_DETACH
:
1483 case OMP_CLAUSE_FILTER
:
1485 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1489 case OMP_CLAUSE_FROM
:
1490 case OMP_CLAUSE_MAP
:
1492 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1493 decl
= OMP_CLAUSE_DECL (c
);
1494 /* Global variables with "omp declare target" attribute
1495 don't need to be copied, the receiver side will use them
1496 directly. However, global variables with "omp declare target link"
1497 attribute need to be copied. Or when ALWAYS modifier is used. */
1498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1500 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1501 && (OMP_CLAUSE_MAP_KIND (c
)
1502 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1503 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1504 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1505 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1506 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1507 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1508 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1509 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1510 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1511 && varpool_node::get_create (decl
)->offloadable
1512 && !lookup_attribute ("omp declare target link",
1513 DECL_ATTRIBUTES (decl
)))
1515 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1516 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1518 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1519 not offloaded; there is nothing to map for those. */
1520 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1521 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1522 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1525 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1527 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1528 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1529 && is_omp_target (ctx
->stmt
))
1531 /* If this is an offloaded region, an attach operation should
1532 only exist when the pointer variable is mapped in a prior
1534 if (is_gimple_omp_offloaded (ctx
->stmt
))
1536 (maybe_lookup_decl (decl
, ctx
)
1537 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1538 && lookup_attribute ("omp declare target",
1539 DECL_ATTRIBUTES (decl
))));
1541 /* By itself, attach/detach is generated as part of pointer
1542 variable mapping and should not create new variables in the
1543 offloaded region, however sender refs for it must be created
1544 for its address to be passed to the runtime. */
1546 = build_decl (OMP_CLAUSE_LOCATION (c
),
1547 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1548 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1549 insert_field_into_struct (ctx
->record_type
, field
);
1550 /* To not clash with a map of the pointer variable itself,
1551 attach/detach maps have their field looked up by the *clause*
1552 tree expression, not the decl. */
1553 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1554 (splay_tree_key
) c
));
1555 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1556 (splay_tree_value
) field
);
1559 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1560 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1561 || (OMP_CLAUSE_MAP_KIND (c
)
1562 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1564 if (TREE_CODE (decl
) == COMPONENT_REF
1565 || (TREE_CODE (decl
) == INDIRECT_REF
1566 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1567 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1568 == REFERENCE_TYPE
)))
1570 if (DECL_SIZE (decl
)
1571 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1573 tree decl2
= DECL_VALUE_EXPR (decl
);
1574 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1575 decl2
= TREE_OPERAND (decl2
, 0);
1576 gcc_assert (DECL_P (decl2
));
1577 install_var_local (decl2
, ctx
);
1579 install_var_local (decl
, ctx
);
1584 if (DECL_SIZE (decl
)
1585 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1587 tree decl2
= DECL_VALUE_EXPR (decl
);
1588 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1589 decl2
= TREE_OPERAND (decl2
, 0);
1590 gcc_assert (DECL_P (decl2
));
1591 install_var_field (decl2
, true, 3, ctx
);
1592 install_var_local (decl2
, ctx
);
1593 install_var_local (decl
, ctx
);
1597 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1598 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1599 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1600 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1601 install_var_field (decl
, true, 7, ctx
);
1603 install_var_field (decl
, true, 3, ctx
);
1604 if (is_gimple_omp_offloaded (ctx
->stmt
)
1605 && !(is_gimple_omp_oacc (ctx
->stmt
)
1606 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1607 install_var_local (decl
, ctx
);
1612 tree base
= get_base_address (decl
);
1613 tree nc
= OMP_CLAUSE_CHAIN (c
);
1616 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1617 && OMP_CLAUSE_DECL (nc
) == base
1618 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1619 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1621 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1622 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1628 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1629 decl
= OMP_CLAUSE_DECL (c
);
1631 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1632 (splay_tree_key
) decl
));
1634 = build_decl (OMP_CLAUSE_LOCATION (c
),
1635 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1636 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1637 insert_field_into_struct (ctx
->record_type
, field
);
1638 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1639 (splay_tree_value
) field
);
1644 case OMP_CLAUSE_ORDER
:
1645 ctx
->order_concurrent
= true;
1648 case OMP_CLAUSE_BIND
:
1652 case OMP_CLAUSE_NOWAIT
:
1653 case OMP_CLAUSE_ORDERED
:
1654 case OMP_CLAUSE_COLLAPSE
:
1655 case OMP_CLAUSE_UNTIED
:
1656 case OMP_CLAUSE_MERGEABLE
:
1657 case OMP_CLAUSE_PROC_BIND
:
1658 case OMP_CLAUSE_SAFELEN
:
1659 case OMP_CLAUSE_SIMDLEN
:
1660 case OMP_CLAUSE_THREADS
:
1661 case OMP_CLAUSE_SIMD
:
1662 case OMP_CLAUSE_NOGROUP
:
1663 case OMP_CLAUSE_DEFAULTMAP
:
1664 case OMP_CLAUSE_ASYNC
:
1665 case OMP_CLAUSE_WAIT
:
1666 case OMP_CLAUSE_GANG
:
1667 case OMP_CLAUSE_WORKER
:
1668 case OMP_CLAUSE_VECTOR
:
1669 case OMP_CLAUSE_INDEPENDENT
:
1670 case OMP_CLAUSE_AUTO
:
1671 case OMP_CLAUSE_SEQ
:
1672 case OMP_CLAUSE_TILE
:
1673 case OMP_CLAUSE__SIMT_
:
1674 case OMP_CLAUSE_DEFAULT
:
1675 case OMP_CLAUSE_NONTEMPORAL
:
1676 case OMP_CLAUSE_IF_PRESENT
:
1677 case OMP_CLAUSE_FINALIZE
:
1678 case OMP_CLAUSE_TASK_REDUCTION
:
1679 case OMP_CLAUSE_ALLOCATE
:
1682 case OMP_CLAUSE_ALIGNED
:
1683 decl
= OMP_CLAUSE_DECL (c
);
1684 if (is_global_var (decl
)
1685 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1686 install_var_local (decl
, ctx
);
1689 case OMP_CLAUSE__CONDTEMP_
:
1690 decl
= OMP_CLAUSE_DECL (c
);
1691 if (is_parallel_ctx (ctx
))
1693 install_var_field (decl
, false, 3, ctx
);
1694 install_var_local (decl
, ctx
);
1696 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1697 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1698 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1699 install_var_local (decl
, ctx
);
1702 case OMP_CLAUSE__CACHE_
:
1703 case OMP_CLAUSE_NOHOST
:
1709 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1711 switch (OMP_CLAUSE_CODE (c
))
1713 case OMP_CLAUSE_LASTPRIVATE
:
1714 /* Let the corresponding firstprivate clause create
1716 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1717 scan_array_reductions
= true;
1718 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1722 case OMP_CLAUSE_FIRSTPRIVATE
:
1723 case OMP_CLAUSE_PRIVATE
:
1724 case OMP_CLAUSE_LINEAR
:
1725 case OMP_CLAUSE_IS_DEVICE_PTR
:
1726 decl
= OMP_CLAUSE_DECL (c
);
1727 if (is_variable_sized (decl
))
1729 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1730 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1731 && is_gimple_omp_offloaded (ctx
->stmt
))
1733 tree decl2
= DECL_VALUE_EXPR (decl
);
1734 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1735 decl2
= TREE_OPERAND (decl2
, 0);
1736 gcc_assert (DECL_P (decl2
));
1737 install_var_local (decl2
, ctx
);
1738 fixup_remapped_decl (decl2
, ctx
, false);
1740 install_var_local (decl
, ctx
);
1742 fixup_remapped_decl (decl
, ctx
,
1743 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1744 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1745 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1746 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1747 scan_array_reductions
= true;
1750 case OMP_CLAUSE_REDUCTION
:
1751 case OMP_CLAUSE_IN_REDUCTION
:
1752 decl
= OMP_CLAUSE_DECL (c
);
1753 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1755 if (is_variable_sized (decl
))
1756 install_var_local (decl
, ctx
);
1757 fixup_remapped_decl (decl
, ctx
, false);
1759 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1760 scan_array_reductions
= true;
1763 case OMP_CLAUSE_TASK_REDUCTION
:
1764 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1765 scan_array_reductions
= true;
1768 case OMP_CLAUSE_SHARED
:
1769 /* Ignore shared directives in teams construct inside of
1770 target construct. */
1771 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1772 && !is_host_teams_ctx (ctx
))
1774 decl
= OMP_CLAUSE_DECL (c
);
1775 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1777 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1779 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1782 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1783 install_var_field (decl
, by_ref
, 11, ctx
);
1786 fixup_remapped_decl (decl
, ctx
, false);
1789 case OMP_CLAUSE_MAP
:
1790 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1792 decl
= OMP_CLAUSE_DECL (c
);
1794 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1795 && (OMP_CLAUSE_MAP_KIND (c
)
1796 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1797 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1798 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1799 && varpool_node::get_create (decl
)->offloadable
)
1801 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1802 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1803 && is_omp_target (ctx
->stmt
)
1804 && !is_gimple_omp_offloaded (ctx
->stmt
))
1808 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1809 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1810 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1811 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1813 tree new_decl
= lookup_decl (decl
, ctx
);
1814 TREE_TYPE (new_decl
)
1815 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1817 else if (DECL_SIZE (decl
)
1818 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1820 tree decl2
= DECL_VALUE_EXPR (decl
);
1821 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1822 decl2
= TREE_OPERAND (decl2
, 0);
1823 gcc_assert (DECL_P (decl2
));
1824 fixup_remapped_decl (decl2
, ctx
, false);
1825 fixup_remapped_decl (decl
, ctx
, true);
1828 fixup_remapped_decl (decl
, ctx
, false);
1832 case OMP_CLAUSE_COPYPRIVATE
:
1833 case OMP_CLAUSE_COPYIN
:
1834 case OMP_CLAUSE_DEFAULT
:
1836 case OMP_CLAUSE_NUM_THREADS
:
1837 case OMP_CLAUSE_NUM_TEAMS
:
1838 case OMP_CLAUSE_THREAD_LIMIT
:
1839 case OMP_CLAUSE_DEVICE
:
1840 case OMP_CLAUSE_SCHEDULE
:
1841 case OMP_CLAUSE_DIST_SCHEDULE
:
1842 case OMP_CLAUSE_NOWAIT
:
1843 case OMP_CLAUSE_ORDERED
:
1844 case OMP_CLAUSE_COLLAPSE
:
1845 case OMP_CLAUSE_UNTIED
:
1846 case OMP_CLAUSE_FINAL
:
1847 case OMP_CLAUSE_MERGEABLE
:
1848 case OMP_CLAUSE_PROC_BIND
:
1849 case OMP_CLAUSE_SAFELEN
:
1850 case OMP_CLAUSE_SIMDLEN
:
1851 case OMP_CLAUSE_ALIGNED
:
1852 case OMP_CLAUSE_DEPEND
:
1853 case OMP_CLAUSE_DETACH
:
1854 case OMP_CLAUSE_ALLOCATE
:
1855 case OMP_CLAUSE__LOOPTEMP_
:
1856 case OMP_CLAUSE__REDUCTEMP_
:
1858 case OMP_CLAUSE_FROM
:
1859 case OMP_CLAUSE_PRIORITY
:
1860 case OMP_CLAUSE_GRAINSIZE
:
1861 case OMP_CLAUSE_NUM_TASKS
:
1862 case OMP_CLAUSE_THREADS
:
1863 case OMP_CLAUSE_SIMD
:
1864 case OMP_CLAUSE_NOGROUP
:
1865 case OMP_CLAUSE_DEFAULTMAP
:
1866 case OMP_CLAUSE_ORDER
:
1867 case OMP_CLAUSE_BIND
:
1868 case OMP_CLAUSE_USE_DEVICE_PTR
:
1869 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1870 case OMP_CLAUSE_NONTEMPORAL
:
1871 case OMP_CLAUSE_ASYNC
:
1872 case OMP_CLAUSE_WAIT
:
1873 case OMP_CLAUSE_NUM_GANGS
:
1874 case OMP_CLAUSE_NUM_WORKERS
:
1875 case OMP_CLAUSE_VECTOR_LENGTH
:
1876 case OMP_CLAUSE_GANG
:
1877 case OMP_CLAUSE_WORKER
:
1878 case OMP_CLAUSE_VECTOR
:
1879 case OMP_CLAUSE_INDEPENDENT
:
1880 case OMP_CLAUSE_AUTO
:
1881 case OMP_CLAUSE_SEQ
:
1882 case OMP_CLAUSE_TILE
:
1883 case OMP_CLAUSE__SIMT_
:
1884 case OMP_CLAUSE_IF_PRESENT
:
1885 case OMP_CLAUSE_FINALIZE
:
1886 case OMP_CLAUSE_FILTER
:
1887 case OMP_CLAUSE__CONDTEMP_
:
1890 case OMP_CLAUSE__CACHE_
:
1891 case OMP_CLAUSE_NOHOST
:
1897 gcc_checking_assert (!scan_array_reductions
1898 || !is_gimple_omp_oacc (ctx
->stmt
));
1899 if (scan_array_reductions
)
1901 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1902 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1903 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1904 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1905 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1907 omp_context
*rctx
= ctx
;
1908 if (is_omp_target (ctx
->stmt
))
1910 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1911 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1913 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1914 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1915 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1916 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1917 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1918 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1922 /* Create a new name for omp child function. Returns an identifier. */
1925 create_omp_child_function_name (bool task_copy
)
1927 return clone_function_name_numbered (current_function_decl
,
1928 task_copy
? "_omp_cpyfn" : "_omp_fn");
1931 /* Return true if CTX may belong to offloaded code: either if current function
1932 is offloaded, or any enclosing context corresponds to a target region. */
1935 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1937 if (cgraph_node::get (current_function_decl
)->offloadable
)
1939 for (; ctx
; ctx
= ctx
->outer
)
1940 if (is_gimple_omp_offloaded (ctx
->stmt
))
1945 /* Build a decl for the omp child function. It'll not contain a body
1946 yet, just the bare decl. */
1949 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1951 tree decl
, type
, name
, t
;
1953 name
= create_omp_child_function_name (task_copy
);
1955 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1956 ptr_type_node
, NULL_TREE
);
1958 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1960 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1962 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1965 ctx
->cb
.dst_fn
= decl
;
1967 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1969 TREE_STATIC (decl
) = 1;
1970 TREE_USED (decl
) = 1;
1971 DECL_ARTIFICIAL (decl
) = 1;
1972 DECL_IGNORED_P (decl
) = 0;
1973 TREE_PUBLIC (decl
) = 0;
1974 DECL_UNINLINABLE (decl
) = 1;
1975 DECL_EXTERNAL (decl
) = 0;
1976 DECL_CONTEXT (decl
) = NULL_TREE
;
1977 DECL_INITIAL (decl
) = make_node (BLOCK
);
1978 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1979 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1980 /* Remove omp declare simd attribute from the new attributes. */
1981 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1983 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1986 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1987 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1988 *p
= TREE_CHAIN (*p
);
1991 tree chain
= TREE_CHAIN (*p
);
1992 *p
= copy_node (*p
);
1993 p
= &TREE_CHAIN (*p
);
1997 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1998 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1999 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2000 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2001 DECL_FUNCTION_VERSIONED (decl
)
2002 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2004 if (omp_maybe_offloaded_ctx (ctx
))
2006 cgraph_node::get_create (decl
)->offloadable
= 1;
2007 if (ENABLE_OFFLOADING
)
2008 g
->have_offload
= true;
2011 if (cgraph_node::get_create (decl
)->offloadable
)
2013 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2014 ? "omp target entrypoint"
2015 : "omp declare target");
2016 if (lookup_attribute ("omp declare target",
2017 DECL_ATTRIBUTES (current_function_decl
)))
2019 if (is_gimple_omp_offloaded (ctx
->stmt
))
2020 DECL_ATTRIBUTES (decl
)
2021 = remove_attribute ("omp declare target",
2022 copy_list (DECL_ATTRIBUTES (decl
)));
2027 DECL_ATTRIBUTES (decl
)
2028 = tree_cons (get_identifier (target_attr
),
2029 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2032 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2033 RESULT_DECL
, NULL_TREE
, void_type_node
);
2034 DECL_ARTIFICIAL (t
) = 1;
2035 DECL_IGNORED_P (t
) = 1;
2036 DECL_CONTEXT (t
) = decl
;
2037 DECL_RESULT (decl
) = t
;
2039 tree data_name
= get_identifier (".omp_data_i");
2040 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2042 DECL_ARTIFICIAL (t
) = 1;
2043 DECL_NAMELESS (t
) = 1;
2044 DECL_ARG_TYPE (t
) = ptr_type_node
;
2045 DECL_CONTEXT (t
) = current_function_decl
;
2047 TREE_READONLY (t
) = 1;
2048 DECL_ARGUMENTS (decl
) = t
;
2050 ctx
->receiver_decl
= t
;
2053 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2054 PARM_DECL
, get_identifier (".omp_data_o"),
2056 DECL_ARTIFICIAL (t
) = 1;
2057 DECL_NAMELESS (t
) = 1;
2058 DECL_ARG_TYPE (t
) = ptr_type_node
;
2059 DECL_CONTEXT (t
) = current_function_decl
;
2061 TREE_ADDRESSABLE (t
) = 1;
2062 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2063 DECL_ARGUMENTS (decl
) = t
;
2066 /* Allocate memory for the function structure. The call to
2067 allocate_struct_function clobbers CFUN, so we need to restore
2069 push_struct_function (decl
);
2070 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2071 init_tree_ssa (cfun
);
2075 /* Callback for walk_gimple_seq. Check if combined parallel
2076 contains gimple_omp_for_combined_into_p OMP_FOR. */
2079 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2080 bool *handled_ops_p
,
2081 struct walk_stmt_info
*wi
)
2083 gimple
*stmt
= gsi_stmt (*gsi_p
);
2085 *handled_ops_p
= true;
2086 switch (gimple_code (stmt
))
2090 case GIMPLE_OMP_FOR
:
2091 if (gimple_omp_for_combined_into_p (stmt
)
2092 && gimple_omp_for_kind (stmt
)
2093 == *(const enum gf_mask
*) (wi
->info
))
2096 return integer_zero_node
;
2105 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2108 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2109 omp_context
*outer_ctx
)
2111 struct walk_stmt_info wi
;
2113 memset (&wi
, 0, sizeof (wi
));
2115 wi
.info
= (void *) &msk
;
2116 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2117 if (wi
.info
!= (void *) &msk
)
2119 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2120 struct omp_for_data fd
;
2121 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2122 /* We need two temporaries with fd.loop.v type (istart/iend)
2123 and then (fd.collapse - 1) temporaries with the same
2124 type for count2 ... countN-1 vars if not constant. */
2125 size_t count
= 2, i
;
2126 tree type
= fd
.iter_type
;
2128 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2130 count
+= fd
.collapse
- 1;
2131 /* If there are lastprivate clauses on the inner
2132 GIMPLE_OMP_FOR, add one more temporaries for the total number
2133 of iterations (product of count1 ... countN-1). */
2134 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2135 OMP_CLAUSE_LASTPRIVATE
)
2136 || (msk
== GF_OMP_FOR_KIND_FOR
2137 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2138 OMP_CLAUSE_LASTPRIVATE
)))
2140 tree temp
= create_tmp_var (type
);
2141 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2142 OMP_CLAUSE__LOOPTEMP_
);
2143 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2144 OMP_CLAUSE_DECL (c
) = temp
;
2145 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2146 gimple_omp_taskreg_set_clauses (stmt
, c
);
2149 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2150 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2151 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2153 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2154 tree type2
= TREE_TYPE (v
);
2156 for (i
= 0; i
< 3; i
++)
2158 tree temp
= create_tmp_var (type2
);
2159 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2160 OMP_CLAUSE__LOOPTEMP_
);
2161 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2162 OMP_CLAUSE_DECL (c
) = temp
;
2163 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2164 gimple_omp_taskreg_set_clauses (stmt
, c
);
2168 for (i
= 0; i
< count
; i
++)
2170 tree temp
= create_tmp_var (type
);
2171 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2172 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2173 OMP_CLAUSE_DECL (c
) = temp
;
2174 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2175 gimple_omp_taskreg_set_clauses (stmt
, c
);
2178 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2179 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2180 OMP_CLAUSE_REDUCTION
))
2182 tree type
= build_pointer_type (pointer_sized_int_node
);
2183 tree temp
= create_tmp_var (type
);
2184 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2185 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2186 OMP_CLAUSE_DECL (c
) = temp
;
2187 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2188 gimple_omp_task_set_clauses (stmt
, c
);
2192 /* Scan an OpenMP parallel directive. */
2195 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2199 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2201 /* Ignore parallel directives with empty bodies, unless there
2202 are copyin clauses. */
2204 && empty_body_p (gimple_omp_body (stmt
))
2205 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2206 OMP_CLAUSE_COPYIN
) == NULL
)
2208 gsi_replace (gsi
, gimple_build_nop (), false);
2212 if (gimple_omp_parallel_combined_p (stmt
))
2213 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2214 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2215 OMP_CLAUSE_REDUCTION
);
2216 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2217 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2219 tree type
= build_pointer_type (pointer_sized_int_node
);
2220 tree temp
= create_tmp_var (type
);
2221 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2223 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2224 OMP_CLAUSE_DECL (c
) = temp
;
2225 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2226 gimple_omp_parallel_set_clauses (stmt
, c
);
2229 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2232 ctx
= new_omp_context (stmt
, outer_ctx
);
2233 taskreg_contexts
.safe_push (ctx
);
2234 if (taskreg_nesting_level
> 1)
2235 ctx
->is_nested
= true;
2236 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2237 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2238 name
= create_tmp_var_name (".omp_data_s");
2239 name
= build_decl (gimple_location (stmt
),
2240 TYPE_DECL
, name
, ctx
->record_type
);
2241 DECL_ARTIFICIAL (name
) = 1;
2242 DECL_NAMELESS (name
) = 1;
2243 TYPE_NAME (ctx
->record_type
) = name
;
2244 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2245 create_omp_child_function (ctx
, false);
2246 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2248 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2249 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2251 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2252 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2255 /* Scan an OpenMP task directive. */
2258 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2262 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2264 /* Ignore task directives with empty bodies, unless they have depend
2267 && gimple_omp_body (stmt
)
2268 && empty_body_p (gimple_omp_body (stmt
))
2269 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2271 gsi_replace (gsi
, gimple_build_nop (), false);
2275 if (gimple_omp_task_taskloop_p (stmt
))
2276 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2278 ctx
= new_omp_context (stmt
, outer_ctx
);
2280 if (gimple_omp_task_taskwait_p (stmt
))
2282 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2286 taskreg_contexts
.safe_push (ctx
);
2287 if (taskreg_nesting_level
> 1)
2288 ctx
->is_nested
= true;
2289 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2290 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2291 name
= create_tmp_var_name (".omp_data_s");
2292 name
= build_decl (gimple_location (stmt
),
2293 TYPE_DECL
, name
, ctx
->record_type
);
2294 DECL_ARTIFICIAL (name
) = 1;
2295 DECL_NAMELESS (name
) = 1;
2296 TYPE_NAME (ctx
->record_type
) = name
;
2297 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2298 create_omp_child_function (ctx
, false);
2299 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2301 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2303 if (ctx
->srecord_type
)
2305 name
= create_tmp_var_name (".omp_data_a");
2306 name
= build_decl (gimple_location (stmt
),
2307 TYPE_DECL
, name
, ctx
->srecord_type
);
2308 DECL_ARTIFICIAL (name
) = 1;
2309 DECL_NAMELESS (name
) = 1;
2310 TYPE_NAME (ctx
->srecord_type
) = name
;
2311 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2312 create_omp_child_function (ctx
, true);
2315 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2317 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2319 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2320 t
= build_int_cst (long_integer_type_node
, 0);
2321 gimple_omp_task_set_arg_size (stmt
, t
);
2322 t
= build_int_cst (long_integer_type_node
, 1);
2323 gimple_omp_task_set_arg_align (stmt
, t
);
2327 /* Helper function for finish_taskreg_scan, called through walk_tree.
2328 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2329 tree, replace it in the expression. */
2332 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2336 omp_context
*ctx
= (omp_context
*) data
;
2337 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2340 if (DECL_HAS_VALUE_EXPR_P (t
))
2341 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2346 else if (IS_TYPE_OR_DECL_P (*tp
))
2351 /* If any decls have been made addressable during scan_omp,
2352 adjust their fields if needed, and layout record types
2353 of parallel/task constructs. */
2356 finish_taskreg_scan (omp_context
*ctx
)
2358 if (ctx
->record_type
== NULL_TREE
)
2361 /* If any task_shared_vars were needed, verify all
2362 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2363 statements if use_pointer_for_field hasn't changed
2364 because of that. If it did, update field types now. */
2365 if (task_shared_vars
)
2369 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2370 c
; c
= OMP_CLAUSE_CHAIN (c
))
2371 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2372 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2374 tree decl
= OMP_CLAUSE_DECL (c
);
2376 /* Global variables don't need to be copied,
2377 the receiver side will use them directly. */
2378 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2380 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2381 || !use_pointer_for_field (decl
, ctx
))
2383 tree field
= lookup_field (decl
, ctx
);
2384 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2385 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2387 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2388 TREE_THIS_VOLATILE (field
) = 0;
2389 DECL_USER_ALIGN (field
) = 0;
2390 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2391 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2392 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2393 if (ctx
->srecord_type
)
2395 tree sfield
= lookup_sfield (decl
, ctx
);
2396 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2397 TREE_THIS_VOLATILE (sfield
) = 0;
2398 DECL_USER_ALIGN (sfield
) = 0;
2399 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2400 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2401 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2406 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2408 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2409 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2412 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2413 expects to find it at the start of data. */
2414 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2415 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2419 *p
= DECL_CHAIN (*p
);
2423 p
= &DECL_CHAIN (*p
);
2424 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2425 TYPE_FIELDS (ctx
->record_type
) = f
;
2427 layout_type (ctx
->record_type
);
2428 fixup_child_record_type (ctx
);
2430 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2432 layout_type (ctx
->record_type
);
2433 fixup_child_record_type (ctx
);
2437 location_t loc
= gimple_location (ctx
->stmt
);
2438 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2440 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2442 /* Move VLA fields to the end. */
2443 p
= &TYPE_FIELDS (ctx
->record_type
);
2445 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2446 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2449 *p
= TREE_CHAIN (*p
);
2450 TREE_CHAIN (*q
) = NULL_TREE
;
2451 q
= &TREE_CHAIN (*q
);
2454 p
= &DECL_CHAIN (*p
);
2456 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2458 /* Move fields corresponding to first and second _looptemp_
2459 clause first. There are filled by GOMP_taskloop
2460 and thus need to be in specific positions. */
2461 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2462 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2463 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2464 OMP_CLAUSE__LOOPTEMP_
);
2465 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2466 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2467 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2468 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2469 p
= &TYPE_FIELDS (ctx
->record_type
);
2471 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2472 *p
= DECL_CHAIN (*p
);
2474 p
= &DECL_CHAIN (*p
);
2475 DECL_CHAIN (f1
) = f2
;
2478 DECL_CHAIN (f2
) = f3
;
2479 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2482 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2483 TYPE_FIELDS (ctx
->record_type
) = f1
;
2484 if (ctx
->srecord_type
)
2486 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2487 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2489 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2490 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2492 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2493 *p
= DECL_CHAIN (*p
);
2495 p
= &DECL_CHAIN (*p
);
2496 DECL_CHAIN (f1
) = f2
;
2497 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2500 DECL_CHAIN (f2
) = f3
;
2501 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2504 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2505 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2512 /* Look for a firstprivate clause with the detach event handle. */
2513 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2514 c
; c
= OMP_CLAUSE_CHAIN (c
))
2516 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2518 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2519 == OMP_CLAUSE_DECL (detach_clause
))
2524 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2526 /* Move field corresponding to the detach clause first.
2527 This is filled by GOMP_task and needs to be in a
2528 specific position. */
2529 p
= &TYPE_FIELDS (ctx
->record_type
);
2532 *p
= DECL_CHAIN (*p
);
2534 p
= &DECL_CHAIN (*p
);
2535 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2536 TYPE_FIELDS (ctx
->record_type
) = field
;
2537 if (ctx
->srecord_type
)
2539 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2540 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2543 *p
= DECL_CHAIN (*p
);
2545 p
= &DECL_CHAIN (*p
);
2546 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2547 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2550 layout_type (ctx
->record_type
);
2551 fixup_child_record_type (ctx
);
2552 if (ctx
->srecord_type
)
2553 layout_type (ctx
->srecord_type
);
2554 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2555 TYPE_SIZE_UNIT (ctx
->record_type
));
2556 if (TREE_CODE (t
) != INTEGER_CST
)
2558 t
= unshare_expr (t
);
2559 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2561 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2562 t
= build_int_cst (long_integer_type_node
,
2563 TYPE_ALIGN_UNIT (ctx
->record_type
));
2564 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2568 /* Find the enclosing offload context. */
2570 static omp_context
*
2571 enclosing_target_ctx (omp_context
*ctx
)
2573 for (; ctx
; ctx
= ctx
->outer
)
2574 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2580 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2582 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2585 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2587 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2589 gimple
*stmt
= ctx
->stmt
;
2590 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2591 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2598 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2599 (This doesn't include OpenACC 'kernels' decomposed parts.)
2600 Until kernels handling moves to use the same loop indirection
2601 scheme as parallel, we need to do this checking early. */
2604 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2606 bool checking
= true;
2607 unsigned outer_mask
= 0;
2608 unsigned this_mask
= 0;
2609 bool has_seq
= false, has_auto
= false;
2612 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2616 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2618 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2621 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2623 switch (OMP_CLAUSE_CODE (c
))
2625 case OMP_CLAUSE_GANG
:
2626 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2628 case OMP_CLAUSE_WORKER
:
2629 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2631 case OMP_CLAUSE_VECTOR
:
2632 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2634 case OMP_CLAUSE_SEQ
:
2637 case OMP_CLAUSE_AUTO
:
2647 if (has_seq
&& (this_mask
|| has_auto
))
2648 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2649 " OpenACC loop specifiers");
2650 else if (has_auto
&& this_mask
)
2651 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2652 " OpenACC loop specifiers");
2654 if (this_mask
& outer_mask
)
2655 error_at (gimple_location (stmt
), "inner loop uses same"
2656 " OpenACC parallelism as containing loop");
2659 return outer_mask
| this_mask
;
2662 /* Scan a GIMPLE_OMP_FOR. */
2664 static omp_context
*
2665 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2669 tree clauses
= gimple_omp_for_clauses (stmt
);
2671 ctx
= new_omp_context (stmt
, outer_ctx
);
2673 if (is_gimple_omp_oacc (stmt
))
2675 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2677 if (!(tgt
&& is_oacc_kernels (tgt
)))
2678 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2681 switch (OMP_CLAUSE_CODE (c
))
2683 case OMP_CLAUSE_GANG
:
2684 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2687 case OMP_CLAUSE_WORKER
:
2688 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2691 case OMP_CLAUSE_VECTOR
:
2692 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2701 /* By construction, this is impossible for OpenACC 'kernels'
2702 decomposed parts. */
2703 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2705 error_at (OMP_CLAUSE_LOCATION (c
),
2706 "argument not permitted on %qs clause",
2707 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2709 inform (gimple_location (tgt
->stmt
),
2710 "enclosing parent compute construct");
2711 else if (oacc_get_fn_attrib (current_function_decl
))
2712 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2713 "enclosing routine");
2719 if (tgt
&& is_oacc_kernels (tgt
))
2720 check_oacc_kernel_gwv (stmt
, ctx
);
2722 /* Collect all variables named in reductions on this loop. Ensure
2723 that, if this loop has a reduction on some variable v, and there is
2724 a reduction on v somewhere in an outer context, then there is a
2725 reduction on v on all intervening loops as well. */
2726 tree local_reduction_clauses
= NULL
;
2727 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2729 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2730 local_reduction_clauses
2731 = tree_cons (NULL
, c
, local_reduction_clauses
);
2733 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2734 ctx
->outer_reduction_clauses
2735 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2736 ctx
->outer
->outer_reduction_clauses
);
2737 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2738 tree local_iter
= local_reduction_clauses
;
2739 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2741 tree local_clause
= TREE_VALUE (local_iter
);
2742 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2743 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2744 bool have_outer_reduction
= false;
2745 tree ctx_iter
= outer_reduction_clauses
;
2746 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2748 tree outer_clause
= TREE_VALUE (ctx_iter
);
2749 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2750 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2751 if (outer_var
== local_var
&& outer_op
!= local_op
)
2753 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2754 "conflicting reduction operations for %qE",
2756 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2757 "location of the previous reduction for %qE",
2760 if (outer_var
== local_var
)
2762 have_outer_reduction
= true;
2766 if (have_outer_reduction
)
2768 /* There is a reduction on outer_var both on this loop and on
2769 some enclosing loop. Walk up the context tree until such a
2770 loop with a reduction on outer_var is found, and complain
2771 about all intervening loops that do not have such a
2773 struct omp_context
*curr_loop
= ctx
->outer
;
2775 while (curr_loop
!= NULL
)
2777 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2778 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2780 tree curr_clause
= TREE_VALUE (curr_iter
);
2781 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2782 if (curr_var
== local_var
)
2789 warning_at (gimple_location (curr_loop
->stmt
), 0,
2790 "nested loop in reduction needs "
2791 "reduction clause for %qE",
2795 curr_loop
= curr_loop
->outer
;
2799 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2800 ctx
->outer_reduction_clauses
2801 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2802 ctx
->outer_reduction_clauses
);
2804 if (tgt
&& is_oacc_kernels (tgt
))
2806 /* Strip out reductions, as they are not handled yet. */
2807 tree
*prev_ptr
= &clauses
;
2809 while (tree probe
= *prev_ptr
)
2811 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2813 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2814 *prev_ptr
= *next_ptr
;
2816 prev_ptr
= next_ptr
;
2819 gimple_omp_for_set_clauses (stmt
, clauses
);
2823 scan_sharing_clauses (clauses
, ctx
);
2825 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2826 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2828 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2829 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2830 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2831 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2833 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2837 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2840 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2841 omp_context
*outer_ctx
)
2843 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2844 gsi_replace (gsi
, bind
, false);
2845 gimple_seq seq
= NULL
;
2846 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2847 tree cond
= create_tmp_var_raw (integer_type_node
);
2848 DECL_CONTEXT (cond
) = current_function_decl
;
2849 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2850 gimple_bind_set_vars (bind
, cond
);
2851 gimple_call_set_lhs (g
, cond
);
2852 gimple_seq_add_stmt (&seq
, g
);
2853 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2854 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2855 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2856 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2857 gimple_seq_add_stmt (&seq
, g
);
2858 g
= gimple_build_label (lab1
);
2859 gimple_seq_add_stmt (&seq
, g
);
2860 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2861 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2862 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2863 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2864 gimple_omp_for_set_clauses (new_stmt
, clause
);
2865 gimple_seq_add_stmt (&seq
, new_stmt
);
2866 g
= gimple_build_goto (lab3
);
2867 gimple_seq_add_stmt (&seq
, g
);
2868 g
= gimple_build_label (lab2
);
2869 gimple_seq_add_stmt (&seq
, g
);
2870 gimple_seq_add_stmt (&seq
, stmt
);
2871 g
= gimple_build_label (lab3
);
2872 gimple_seq_add_stmt (&seq
, g
);
2873 gimple_bind_set_body (bind
, seq
);
2875 scan_omp_for (new_stmt
, outer_ctx
);
2876 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2879 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2880 struct walk_stmt_info
*);
2881 static omp_context
*maybe_lookup_ctx (gimple
*);
2883 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2884 for scan phase loop. */
2887 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2888 omp_context
*outer_ctx
)
2890 /* The only change between inclusive and exclusive scan will be
2891 within the first simd loop, so just use inclusive in the
2892 worksharing loop. */
2893 outer_ctx
->scan_inclusive
= true;
2894 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2895 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2897 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2898 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2899 gsi_replace (gsi
, input_stmt
, false);
2900 gimple_seq input_body
= NULL
;
2901 gimple_seq_add_stmt (&input_body
, stmt
);
2902 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2904 gimple_stmt_iterator input1_gsi
= gsi_none ();
2905 struct walk_stmt_info wi
;
2906 memset (&wi
, 0, sizeof (wi
));
2908 wi
.info
= (void *) &input1_gsi
;
2909 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2910 gcc_assert (!gsi_end_p (input1_gsi
));
2912 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2913 gsi_next (&input1_gsi
);
2914 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2915 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2916 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2917 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2918 std::swap (input_stmt1
, scan_stmt1
);
2920 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2921 gimple_omp_set_body (input_stmt1
, NULL
);
2923 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2924 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2926 gimple_omp_set_body (input_stmt1
, input_body1
);
2927 gimple_omp_set_body (scan_stmt1
, NULL
);
2929 gimple_stmt_iterator input2_gsi
= gsi_none ();
2930 memset (&wi
, 0, sizeof (wi
));
2932 wi
.info
= (void *) &input2_gsi
;
2933 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2935 gcc_assert (!gsi_end_p (input2_gsi
));
2937 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2938 gsi_next (&input2_gsi
);
2939 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2940 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2941 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2942 std::swap (input_stmt2
, scan_stmt2
);
2944 gimple_omp_set_body (input_stmt2
, NULL
);
2946 gimple_omp_set_body (input_stmt
, input_body
);
2947 gimple_omp_set_body (scan_stmt
, scan_body
);
2949 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2950 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2952 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2953 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2955 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2958 /* Scan an OpenMP sections directive. */
2961 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2965 ctx
= new_omp_context (stmt
, outer_ctx
);
2966 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2967 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2970 /* Scan an OpenMP single directive. */
2973 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2978 ctx
= new_omp_context (stmt
, outer_ctx
);
2979 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2980 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2981 name
= create_tmp_var_name (".omp_copy_s");
2982 name
= build_decl (gimple_location (stmt
),
2983 TYPE_DECL
, name
, ctx
->record_type
);
2984 TYPE_NAME (ctx
->record_type
) = name
;
2986 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2987 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2989 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2990 ctx
->record_type
= NULL
;
2992 layout_type (ctx
->record_type
);
2995 /* Scan a GIMPLE_OMP_TARGET. */
2998 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3002 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3003 tree clauses
= gimple_omp_target_clauses (stmt
);
3005 ctx
= new_omp_context (stmt
, outer_ctx
);
3006 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3007 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3008 name
= create_tmp_var_name (".omp_data_t");
3009 name
= build_decl (gimple_location (stmt
),
3010 TYPE_DECL
, name
, ctx
->record_type
);
3011 DECL_ARTIFICIAL (name
) = 1;
3012 DECL_NAMELESS (name
) = 1;
3013 TYPE_NAME (ctx
->record_type
) = name
;
3014 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3018 create_omp_child_function (ctx
, false);
3019 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3022 scan_sharing_clauses (clauses
, ctx
);
3023 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3025 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3026 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3029 TYPE_FIELDS (ctx
->record_type
)
3030 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3033 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3034 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3036 field
= DECL_CHAIN (field
))
3037 gcc_assert (DECL_ALIGN (field
) == align
);
3039 layout_type (ctx
->record_type
);
3041 fixup_child_record_type (ctx
);
3044 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3046 error_at (gimple_location (stmt
),
3047 "%<target%> construct with nested %<teams%> construct "
3048 "contains directives outside of the %<teams%> construct");
3049 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3053 /* Scan an OpenMP teams directive. */
3056 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3058 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3060 if (!gimple_omp_teams_host (stmt
))
3062 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3063 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3066 taskreg_contexts
.safe_push (ctx
);
3067 gcc_assert (taskreg_nesting_level
== 1);
3068 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3069 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3070 tree name
= create_tmp_var_name (".omp_data_s");
3071 name
= build_decl (gimple_location (stmt
),
3072 TYPE_DECL
, name
, ctx
->record_type
);
3073 DECL_ARTIFICIAL (name
) = 1;
3074 DECL_NAMELESS (name
) = 1;
3075 TYPE_NAME (ctx
->record_type
) = name
;
3076 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3077 create_omp_child_function (ctx
, false);
3078 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3080 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3081 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3083 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3084 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3087 /* Check nesting restrictions. */
3089 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3093 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3094 inside an OpenACC CTX. */
3095 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3096 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3097 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3099 else if (!(is_gimple_omp (stmt
)
3100 && is_gimple_omp_oacc (stmt
)))
3102 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3104 error_at (gimple_location (stmt
),
3105 "non-OpenACC construct inside of OpenACC routine");
3109 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3110 if (is_gimple_omp (octx
->stmt
)
3111 && is_gimple_omp_oacc (octx
->stmt
))
3113 error_at (gimple_location (stmt
),
3114 "non-OpenACC construct inside of OpenACC region");
3121 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3122 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3124 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3126 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3128 error_at (gimple_location (stmt
),
3129 "OpenMP constructs are not allowed in target region "
3130 "with %<ancestor%>");
3134 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3135 ctx
->teams_nested_p
= true;
3137 ctx
->nonteams_nested_p
= true;
3139 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3141 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3143 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3144 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3148 if (ctx
->order_concurrent
3149 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3150 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3151 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3153 error_at (gimple_location (stmt
),
3154 "OpenMP constructs other than %<parallel%>, %<loop%>"
3155 " or %<simd%> may not be nested inside a region with"
3156 " the %<order(concurrent)%> clause");
3159 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3161 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3162 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3164 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3165 && (ctx
->outer
== NULL
3166 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3167 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3168 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3169 != GF_OMP_FOR_KIND_FOR
)
3170 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3172 error_at (gimple_location (stmt
),
3173 "%<ordered simd threads%> must be closely "
3174 "nested inside of %<%s simd%> region",
3175 lang_GNU_Fortran () ? "do" : "for");
3181 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3182 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3183 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3185 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3186 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3188 error_at (gimple_location (stmt
),
3189 "OpenMP constructs other than "
3190 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3191 "not be nested inside %<simd%> region");
3194 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3196 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3197 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3198 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3199 OMP_CLAUSE_BIND
) == NULL_TREE
))
3200 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3202 error_at (gimple_location (stmt
),
3203 "only %<distribute%>, %<parallel%> or %<loop%> "
3204 "regions are allowed to be strictly nested inside "
3205 "%<teams%> region");
3209 else if (ctx
->order_concurrent
3210 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3211 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3212 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3213 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3216 error_at (gimple_location (stmt
),
3217 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3218 "%<simd%> may not be nested inside a %<loop%> region");
3220 error_at (gimple_location (stmt
),
3221 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3222 "%<simd%> may not be nested inside a region with "
3223 "the %<order(concurrent)%> clause");
3227 switch (gimple_code (stmt
))
3229 case GIMPLE_OMP_FOR
:
3230 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3232 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3234 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3236 error_at (gimple_location (stmt
),
3237 "%<distribute%> region must be strictly nested "
3238 "inside %<teams%> construct");
3243 /* We split taskloop into task and nested taskloop in it. */
3244 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3246 /* For now, hope this will change and loop bind(parallel) will not
3247 be allowed in lots of contexts. */
3248 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3249 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3251 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3256 switch (gimple_code (ctx
->stmt
))
3258 case GIMPLE_OMP_FOR
:
3259 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3260 == GF_OMP_FOR_KIND_OACC_LOOP
);
3263 case GIMPLE_OMP_TARGET
:
3264 switch (gimple_omp_target_kind (ctx
->stmt
))
3266 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3267 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3268 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3269 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3270 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3281 else if (oacc_get_fn_attrib (current_function_decl
))
3285 error_at (gimple_location (stmt
),
3286 "OpenACC loop directive must be associated with"
3287 " an OpenACC compute region");
3293 if (is_gimple_call (stmt
)
3294 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3295 == BUILT_IN_GOMP_CANCEL
3296 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3297 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3299 const char *bad
= NULL
;
3300 const char *kind
= NULL
;
3301 const char *construct
3302 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3303 == BUILT_IN_GOMP_CANCEL
)
3305 : "cancellation point";
3308 error_at (gimple_location (stmt
), "orphaned %qs construct",
3312 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3313 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3317 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3319 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3320 == BUILT_IN_GOMP_CANCEL
3321 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3322 ctx
->cancellable
= true;
3326 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3327 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3329 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3330 == BUILT_IN_GOMP_CANCEL
3331 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3333 ctx
->cancellable
= true;
3334 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3336 warning_at (gimple_location (stmt
), 0,
3337 "%<cancel for%> inside "
3338 "%<nowait%> for construct");
3339 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3340 OMP_CLAUSE_ORDERED
))
3341 warning_at (gimple_location (stmt
), 0,
3342 "%<cancel for%> inside "
3343 "%<ordered%> for construct");
3348 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3349 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3351 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3352 == BUILT_IN_GOMP_CANCEL
3353 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3355 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3357 ctx
->cancellable
= true;
3358 if (omp_find_clause (gimple_omp_sections_clauses
3361 warning_at (gimple_location (stmt
), 0,
3362 "%<cancel sections%> inside "
3363 "%<nowait%> sections construct");
3367 gcc_assert (ctx
->outer
3368 && gimple_code (ctx
->outer
->stmt
)
3369 == GIMPLE_OMP_SECTIONS
);
3370 ctx
->outer
->cancellable
= true;
3371 if (omp_find_clause (gimple_omp_sections_clauses
3374 warning_at (gimple_location (stmt
), 0,
3375 "%<cancel sections%> inside "
3376 "%<nowait%> sections construct");
3382 if (!is_task_ctx (ctx
)
3383 && (!is_taskloop_ctx (ctx
)
3384 || ctx
->outer
== NULL
3385 || !is_task_ctx (ctx
->outer
)))
3389 for (omp_context
*octx
= ctx
->outer
;
3390 octx
; octx
= octx
->outer
)
3392 switch (gimple_code (octx
->stmt
))
3394 case GIMPLE_OMP_TASKGROUP
:
3396 case GIMPLE_OMP_TARGET
:
3397 if (gimple_omp_target_kind (octx
->stmt
)
3398 != GF_OMP_TARGET_KIND_REGION
)
3401 case GIMPLE_OMP_PARALLEL
:
3402 case GIMPLE_OMP_TEAMS
:
3403 error_at (gimple_location (stmt
),
3404 "%<%s taskgroup%> construct not closely "
3405 "nested inside of %<taskgroup%> region",
3408 case GIMPLE_OMP_TASK
:
3409 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3411 && is_taskloop_ctx (octx
->outer
))
3414 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3415 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3424 ctx
->cancellable
= true;
3429 error_at (gimple_location (stmt
), "invalid arguments");
3434 error_at (gimple_location (stmt
),
3435 "%<%s %s%> construct not closely nested inside of %qs",
3436 construct
, kind
, bad
);
3441 case GIMPLE_OMP_SECTIONS
:
3442 case GIMPLE_OMP_SINGLE
:
3443 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3444 switch (gimple_code (ctx
->stmt
))
3446 case GIMPLE_OMP_FOR
:
3447 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3448 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3451 case GIMPLE_OMP_SECTIONS
:
3452 case GIMPLE_OMP_SINGLE
:
3453 case GIMPLE_OMP_ORDERED
:
3454 case GIMPLE_OMP_MASTER
:
3455 case GIMPLE_OMP_MASKED
:
3456 case GIMPLE_OMP_TASK
:
3457 case GIMPLE_OMP_CRITICAL
:
3458 if (is_gimple_call (stmt
))
3460 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3461 != BUILT_IN_GOMP_BARRIER
)
3463 error_at (gimple_location (stmt
),
3464 "barrier region may not be closely nested inside "
3465 "of work-sharing, %<loop%>, %<critical%>, "
3466 "%<ordered%>, %<master%>, %<masked%>, explicit "
3467 "%<task%> or %<taskloop%> region");
3470 error_at (gimple_location (stmt
),
3471 "work-sharing region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3473 "%<master%>, %<masked%>, explicit %<task%> or "
3474 "%<taskloop%> region");
3476 case GIMPLE_OMP_PARALLEL
:
3477 case GIMPLE_OMP_TEAMS
:
3479 case GIMPLE_OMP_TARGET
:
3480 if (gimple_omp_target_kind (ctx
->stmt
)
3481 == GF_OMP_TARGET_KIND_REGION
)
3488 case GIMPLE_OMP_MASTER
:
3489 case GIMPLE_OMP_MASKED
:
3490 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3491 switch (gimple_code (ctx
->stmt
))
3493 case GIMPLE_OMP_FOR
:
3494 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3495 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3498 case GIMPLE_OMP_SECTIONS
:
3499 case GIMPLE_OMP_SINGLE
:
3500 case GIMPLE_OMP_TASK
:
3501 error_at (gimple_location (stmt
),
3502 "%qs region may not be closely nested inside "
3503 "of work-sharing, %<loop%>, explicit %<task%> or "
3504 "%<taskloop%> region",
3505 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3506 ? "master" : "masked");
3508 case GIMPLE_OMP_PARALLEL
:
3509 case GIMPLE_OMP_TEAMS
:
3511 case GIMPLE_OMP_TARGET
:
3512 if (gimple_omp_target_kind (ctx
->stmt
)
3513 == GF_OMP_TARGET_KIND_REGION
)
3520 case GIMPLE_OMP_SCOPE
:
3521 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3522 switch (gimple_code (ctx
->stmt
))
3524 case GIMPLE_OMP_FOR
:
3525 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3526 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3529 case GIMPLE_OMP_SECTIONS
:
3530 case GIMPLE_OMP_SINGLE
:
3531 case GIMPLE_OMP_TASK
:
3532 case GIMPLE_OMP_CRITICAL
:
3533 case GIMPLE_OMP_ORDERED
:
3534 case GIMPLE_OMP_MASTER
:
3535 case GIMPLE_OMP_MASKED
:
3536 error_at (gimple_location (stmt
),
3537 "%<scope%> region may not be closely nested inside "
3538 "of work-sharing, %<loop%>, explicit %<task%>, "
3539 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3540 "or %<masked%> region");
3542 case GIMPLE_OMP_PARALLEL
:
3543 case GIMPLE_OMP_TEAMS
:
3545 case GIMPLE_OMP_TARGET
:
3546 if (gimple_omp_target_kind (ctx
->stmt
)
3547 == GF_OMP_TARGET_KIND_REGION
)
3554 case GIMPLE_OMP_TASK
:
3555 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3556 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3557 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3558 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3560 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3561 error_at (OMP_CLAUSE_LOCATION (c
),
3562 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3563 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3567 case GIMPLE_OMP_ORDERED
:
3568 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3569 c
; c
= OMP_CLAUSE_CHAIN (c
))
3571 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3573 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3574 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3577 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3578 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3579 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3582 /* Look for containing ordered(N) loop. */
3584 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3586 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3587 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3589 error_at (OMP_CLAUSE_LOCATION (c
),
3590 "%<ordered%> construct with %<depend%> clause "
3591 "must be closely nested inside an %<ordered%> "
3595 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3597 error_at (OMP_CLAUSE_LOCATION (c
),
3598 "%<ordered%> construct with %<depend%> clause "
3599 "must be closely nested inside a loop with "
3600 "%<ordered%> clause with a parameter");
3606 error_at (OMP_CLAUSE_LOCATION (c
),
3607 "invalid depend kind in omp %<ordered%> %<depend%>");
3611 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3612 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3614 /* ordered simd must be closely nested inside of simd region,
3615 and simd region must not encounter constructs other than
3616 ordered simd, therefore ordered simd may be either orphaned,
3617 or ctx->stmt must be simd. The latter case is handled already
3621 error_at (gimple_location (stmt
),
3622 "%<ordered%> %<simd%> must be closely nested inside "
3627 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3628 switch (gimple_code (ctx
->stmt
))
3630 case GIMPLE_OMP_CRITICAL
:
3631 case GIMPLE_OMP_TASK
:
3632 case GIMPLE_OMP_ORDERED
:
3633 ordered_in_taskloop
:
3634 error_at (gimple_location (stmt
),
3635 "%<ordered%> region may not be closely nested inside "
3636 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3637 "%<taskloop%> region");
3639 case GIMPLE_OMP_FOR
:
3640 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3641 goto ordered_in_taskloop
;
3643 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3644 OMP_CLAUSE_ORDERED
);
3647 error_at (gimple_location (stmt
),
3648 "%<ordered%> region must be closely nested inside "
3649 "a loop region with an %<ordered%> clause");
3652 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3653 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3655 error_at (gimple_location (stmt
),
3656 "%<ordered%> region without %<depend%> clause may "
3657 "not be closely nested inside a loop region with "
3658 "an %<ordered%> clause with a parameter");
3662 case GIMPLE_OMP_TARGET
:
3663 if (gimple_omp_target_kind (ctx
->stmt
)
3664 != GF_OMP_TARGET_KIND_REGION
)
3667 case GIMPLE_OMP_PARALLEL
:
3668 case GIMPLE_OMP_TEAMS
:
3669 error_at (gimple_location (stmt
),
3670 "%<ordered%> region must be closely nested inside "
3671 "a loop region with an %<ordered%> clause");
3677 case GIMPLE_OMP_CRITICAL
:
3680 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3681 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3682 if (gomp_critical
*other_crit
3683 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3684 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3686 error_at (gimple_location (stmt
),
3687 "%<critical%> region may not be nested inside "
3688 "a %<critical%> region with the same name");
3693 case GIMPLE_OMP_TEAMS
:
3696 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3697 || (gimple_omp_target_kind (ctx
->stmt
)
3698 != GF_OMP_TARGET_KIND_REGION
))
3700 /* Teams construct can appear either strictly nested inside of
3701 target construct with no intervening stmts, or can be encountered
3702 only by initial task (so must not appear inside any OpenMP
3704 error_at (gimple_location (stmt
),
3705 "%<teams%> construct must be closely nested inside of "
3706 "%<target%> construct or not nested in any OpenMP "
3711 case GIMPLE_OMP_TARGET
:
3712 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3713 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3714 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3715 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3717 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3718 error_at (OMP_CLAUSE_LOCATION (c
),
3719 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3720 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3723 if (is_gimple_omp_offloaded (stmt
)
3724 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3726 error_at (gimple_location (stmt
),
3727 "OpenACC region inside of OpenACC routine, nested "
3728 "parallelism not supported yet");
3731 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3733 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3735 if (is_gimple_omp (stmt
)
3736 && is_gimple_omp_oacc (stmt
)
3737 && is_gimple_omp (ctx
->stmt
))
3739 error_at (gimple_location (stmt
),
3740 "OpenACC construct inside of non-OpenACC region");
3746 const char *stmt_name
, *ctx_stmt_name
;
3747 switch (gimple_omp_target_kind (stmt
))
3749 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3750 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3751 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3752 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3753 stmt_name
= "target enter data"; break;
3754 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3755 stmt_name
= "target exit data"; break;
3756 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3757 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3758 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3759 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3760 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3761 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3762 stmt_name
= "enter data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3764 stmt_name
= "exit data"; break;
3765 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3766 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3768 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3769 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3770 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3771 /* OpenACC 'kernels' decomposed parts. */
3772 stmt_name
= "kernels"; break;
3773 default: gcc_unreachable ();
3775 switch (gimple_omp_target_kind (ctx
->stmt
))
3777 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3778 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3779 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3780 ctx_stmt_name
= "parallel"; break;
3781 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3782 ctx_stmt_name
= "kernels"; break;
3783 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3784 ctx_stmt_name
= "serial"; break;
3785 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3787 ctx_stmt_name
= "host_data"; break;
3788 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3789 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3790 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3791 /* OpenACC 'kernels' decomposed parts. */
3792 ctx_stmt_name
= "kernels"; break;
3793 default: gcc_unreachable ();
3796 /* OpenACC/OpenMP mismatch? */
3797 if (is_gimple_omp_oacc (stmt
)
3798 != is_gimple_omp_oacc (ctx
->stmt
))
3800 error_at (gimple_location (stmt
),
3801 "%s %qs construct inside of %s %qs region",
3802 (is_gimple_omp_oacc (stmt
)
3803 ? "OpenACC" : "OpenMP"), stmt_name
,
3804 (is_gimple_omp_oacc (ctx
->stmt
)
3805 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3808 if (is_gimple_omp_offloaded (ctx
->stmt
))
3810 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3811 if (is_gimple_omp_oacc (ctx
->stmt
))
3813 error_at (gimple_location (stmt
),
3814 "%qs construct inside of %qs region",
3815 stmt_name
, ctx_stmt_name
);
3820 warning_at (gimple_location (stmt
), 0,
3821 "%qs construct inside of %qs region",
3822 stmt_name
, ctx_stmt_name
);
3834 /* Helper function scan_omp.
3836 Callback for walk_tree or operators in walk_gimple_stmt used to
3837 scan for OMP directives in TP. */
3840 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3842 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3843 omp_context
*ctx
= (omp_context
*) wi
->info
;
3846 switch (TREE_CODE (t
))
3854 tree repl
= remap_decl (t
, &ctx
->cb
);
3855 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3861 if (ctx
&& TYPE_P (t
))
3862 *tp
= remap_type (t
, &ctx
->cb
);
3863 else if (!DECL_P (t
))
3868 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3869 if (tem
!= TREE_TYPE (t
))
3871 if (TREE_CODE (t
) == INTEGER_CST
)
3872 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3874 TREE_TYPE (t
) = tem
;
3884 /* Return true if FNDECL is a setjmp or a longjmp. */
3887 setjmp_or_longjmp_p (const_tree fndecl
)
3889 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3890 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3893 tree declname
= DECL_NAME (fndecl
);
3895 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3896 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3897 || !TREE_PUBLIC (fndecl
))
3900 const char *name
= IDENTIFIER_POINTER (declname
);
3901 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3904 /* Return true if FNDECL is an omp_* runtime API call. */
3907 omp_runtime_api_call (const_tree fndecl
)
3909 tree declname
= DECL_NAME (fndecl
);
3911 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3912 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3913 || !TREE_PUBLIC (fndecl
))
3916 const char *name
= IDENTIFIER_POINTER (declname
);
3917 if (!startswith (name
, "omp_"))
3920 static const char *omp_runtime_apis
[] =
3922 /* This array has 3 sections. First omp_* calls that don't
3923 have any suffixes. */
3931 "target_associate_ptr",
3932 "target_disassociate_ptr",
3934 "target_is_present",
3936 "target_memcpy_rect",
3938 /* Now omp_* calls that are available as omp_* and omp_*_. */
3940 "destroy_allocator",
3942 "destroy_nest_lock",
3946 "get_affinity_format",
3948 "get_default_allocator",
3949 "get_default_device",
3952 "get_initial_device",
3954 "get_max_active_levels",
3955 "get_max_task_priority",
3964 "get_partition_num_places",
3967 "get_supported_active_levels",
3969 "get_teams_thread_limit",
3978 "is_initial_device",
3980 "pause_resource_all",
3981 "set_affinity_format",
3982 "set_default_allocator",
3990 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3992 "get_ancestor_thread_num",
3994 "get_partition_place_nums",
3995 "get_place_num_procs",
3996 "get_place_proc_ids",
3999 "set_default_device",
4001 "set_max_active_levels",
4006 "set_teams_thread_limit"
4010 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4012 if (omp_runtime_apis
[i
] == NULL
)
4017 size_t len
= strlen (omp_runtime_apis
[i
]);
4018 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4019 && (name
[4 + len
] == '\0'
4021 && name
[4 + len
] == '_'
4022 && (name
[4 + len
+ 1] == '\0'
4024 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
4030 /* Helper function for scan_omp.
4032 Callback for walk_gimple_stmt used to scan for OMP directives in
4033 the current statement in GSI. */
4036 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4037 struct walk_stmt_info
*wi
)
4039 gimple
*stmt
= gsi_stmt (*gsi
);
4040 omp_context
*ctx
= (omp_context
*) wi
->info
;
4042 if (gimple_has_location (stmt
))
4043 input_location
= gimple_location (stmt
);
4045 /* Check the nesting restrictions. */
4046 bool remove
= false;
4047 if (is_gimple_omp (stmt
))
4048 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4049 else if (is_gimple_call (stmt
))
4051 tree fndecl
= gimple_call_fndecl (stmt
);
4055 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4056 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4057 && setjmp_or_longjmp_p (fndecl
)
4061 error_at (gimple_location (stmt
),
4062 "setjmp/longjmp inside %<simd%> construct");
4064 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4065 switch (DECL_FUNCTION_CODE (fndecl
))
4067 case BUILT_IN_GOMP_BARRIER
:
4068 case BUILT_IN_GOMP_CANCEL
:
4069 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4070 case BUILT_IN_GOMP_TASKYIELD
:
4071 case BUILT_IN_GOMP_TASKWAIT
:
4072 case BUILT_IN_GOMP_TASKGROUP_START
:
4073 case BUILT_IN_GOMP_TASKGROUP_END
:
4074 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4081 omp_context
*octx
= ctx
;
4082 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4084 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4087 error_at (gimple_location (stmt
),
4088 "OpenMP runtime API call %qD in a region with "
4089 "%<order(concurrent)%> clause", fndecl
);
4091 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4092 && (gimple_omp_target_kind (ctx
->stmt
)
4093 == GF_OMP_TARGET_KIND_REGION
))
4095 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4096 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4097 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4098 error_at (gimple_location (stmt
),
4099 "OpenMP runtime API call %qD in a region with "
4100 "%<device(ancestor)%> clause", fndecl
);
4107 stmt
= gimple_build_nop ();
4108 gsi_replace (gsi
, stmt
, false);
4111 *handled_ops_p
= true;
4113 switch (gimple_code (stmt
))
4115 case GIMPLE_OMP_PARALLEL
:
4116 taskreg_nesting_level
++;
4117 scan_omp_parallel (gsi
, ctx
);
4118 taskreg_nesting_level
--;
4121 case GIMPLE_OMP_TASK
:
4122 taskreg_nesting_level
++;
4123 scan_omp_task (gsi
, ctx
);
4124 taskreg_nesting_level
--;
4127 case GIMPLE_OMP_FOR
:
4128 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4129 == GF_OMP_FOR_KIND_SIMD
)
4130 && gimple_omp_for_combined_into_p (stmt
)
4131 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4133 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4134 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4135 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4137 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4141 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4142 == GF_OMP_FOR_KIND_SIMD
)
4143 && omp_maybe_offloaded_ctx (ctx
)
4144 && omp_max_simt_vf ()
4145 && gimple_omp_for_collapse (stmt
) == 1)
4146 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4148 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4151 case GIMPLE_OMP_SCOPE
:
4152 ctx
= new_omp_context (stmt
, ctx
);
4153 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4154 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4157 case GIMPLE_OMP_SECTIONS
:
4158 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4161 case GIMPLE_OMP_SINGLE
:
4162 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4165 case GIMPLE_OMP_SCAN
:
4166 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4168 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4169 ctx
->scan_inclusive
= true;
4170 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4171 ctx
->scan_exclusive
= true;
4174 case GIMPLE_OMP_SECTION
:
4175 case GIMPLE_OMP_MASTER
:
4176 case GIMPLE_OMP_ORDERED
:
4177 case GIMPLE_OMP_CRITICAL
:
4178 ctx
= new_omp_context (stmt
, ctx
);
4179 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4182 case GIMPLE_OMP_MASKED
:
4183 ctx
= new_omp_context (stmt
, ctx
);
4184 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4185 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4188 case GIMPLE_OMP_TASKGROUP
:
4189 ctx
= new_omp_context (stmt
, ctx
);
4190 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4191 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4194 case GIMPLE_OMP_TARGET
:
4195 if (is_gimple_omp_offloaded (stmt
))
4197 taskreg_nesting_level
++;
4198 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4199 taskreg_nesting_level
--;
4202 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4205 case GIMPLE_OMP_TEAMS
:
4206 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4208 taskreg_nesting_level
++;
4209 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4210 taskreg_nesting_level
--;
4213 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4220 *handled_ops_p
= false;
4222 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4224 var
= DECL_CHAIN (var
))
4225 insert_decl_map (&ctx
->cb
, var
, var
);
4229 *handled_ops_p
= false;
4237 /* Scan all the statements starting at the current statement. CTX
4238 contains context information about the OMP directives and
4239 clauses found during the scan. */
4242 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4244 location_t saved_location
;
4245 struct walk_stmt_info wi
;
4247 memset (&wi
, 0, sizeof (wi
));
4249 wi
.want_locations
= true;
4251 saved_location
= input_location
;
4252 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4253 input_location
= saved_location
;
4256 /* Re-gimplification and code generation routines. */
4258 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4259 of BIND if in a method. */
4262 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4264 if (DECL_ARGUMENTS (current_function_decl
)
4265 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4266 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4269 tree vars
= gimple_bind_vars (bind
);
4270 for (tree
*pvar
= &vars
; *pvar
; )
4271 if (omp_member_access_dummy_var (*pvar
))
4272 *pvar
= DECL_CHAIN (*pvar
);
4274 pvar
= &DECL_CHAIN (*pvar
);
4275 gimple_bind_set_vars (bind
, vars
);
4279 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4280 block and its subblocks. */
4283 remove_member_access_dummy_vars (tree block
)
4285 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4286 if (omp_member_access_dummy_var (*pvar
))
4287 *pvar
= DECL_CHAIN (*pvar
);
4289 pvar
= &DECL_CHAIN (*pvar
);
4291 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4292 remove_member_access_dummy_vars (block
);
4295 /* If a context was created for STMT when it was scanned, return it. */
4297 static omp_context
*
4298 maybe_lookup_ctx (gimple
*stmt
)
4301 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4302 return n
? (omp_context
*) n
->value
: NULL
;
4306 /* Find the mapping for DECL in CTX or the immediately enclosing
4307 context that has a mapping for DECL.
4309 If CTX is a nested parallel directive, we may have to use the decl
4310 mappings created in CTX's parent context. Suppose that we have the
4311 following parallel nesting (variable UIDs showed for clarity):
4314 #omp parallel shared(iD.1562) -> outer parallel
4315 iD.1562 = iD.1562 + 1;
4317 #omp parallel shared (iD.1562) -> inner parallel
4318 iD.1562 = iD.1562 - 1;
4320 Each parallel structure will create a distinct .omp_data_s structure
4321 for copying iD.1562 in/out of the directive:
4323 outer parallel .omp_data_s.1.i -> iD.1562
4324 inner parallel .omp_data_s.2.i -> iD.1562
4326 A shared variable mapping will produce a copy-out operation before
4327 the parallel directive and a copy-in operation after it. So, in
4328 this case we would have:
4331 .omp_data_o.1.i = iD.1562;
4332 #omp parallel shared(iD.1562) -> outer parallel
4333 .omp_data_i.1 = &.omp_data_o.1
4334 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4336 .omp_data_o.2.i = iD.1562; -> **
4337 #omp parallel shared(iD.1562) -> inner parallel
4338 .omp_data_i.2 = &.omp_data_o.2
4339 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4342 ** This is a problem. The symbol iD.1562 cannot be referenced
4343 inside the body of the outer parallel region. But since we are
4344 emitting this copy operation while expanding the inner parallel
4345 directive, we need to access the CTX structure of the outer
4346 parallel directive to get the correct mapping:
4348 .omp_data_o.2.i = .omp_data_i.1->i
4350 Since there may be other workshare or parallel directives enclosing
4351 the parallel directive, it may be necessary to walk up the context
4352 parent chain. This is not a problem in general because nested
4353 parallelism happens only rarely. */
4356 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4361 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4362 t
= maybe_lookup_decl (decl
, up
);
4364 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4366 return t
? t
: decl
;
4370 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4371 in outer contexts. */
4374 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4379 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4380 t
= maybe_lookup_decl (decl
, up
);
4382 return t
? t
: decl
;
4386 /* Construct the initialization value for reduction operation OP. */
4389 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4398 case TRUTH_ORIF_EXPR
:
4399 case TRUTH_XOR_EXPR
:
4401 return build_zero_cst (type
);
4404 case TRUTH_AND_EXPR
:
4405 case TRUTH_ANDIF_EXPR
:
4407 return fold_convert_loc (loc
, type
, integer_one_node
);
4410 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4413 if (SCALAR_FLOAT_TYPE_P (type
))
4415 REAL_VALUE_TYPE max
, min
;
4416 if (HONOR_INFINITIES (type
))
4419 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4422 real_maxval (&min
, 1, TYPE_MODE (type
));
4423 return build_real (type
, min
);
4425 else if (POINTER_TYPE_P (type
))
4428 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4429 return wide_int_to_tree (type
, min
);
4433 gcc_assert (INTEGRAL_TYPE_P (type
));
4434 return TYPE_MIN_VALUE (type
);
4438 if (SCALAR_FLOAT_TYPE_P (type
))
4440 REAL_VALUE_TYPE max
;
4441 if (HONOR_INFINITIES (type
))
4444 real_maxval (&max
, 0, TYPE_MODE (type
));
4445 return build_real (type
, max
);
4447 else if (POINTER_TYPE_P (type
))
4450 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4451 return wide_int_to_tree (type
, max
);
4455 gcc_assert (INTEGRAL_TYPE_P (type
));
4456 return TYPE_MAX_VALUE (type
);
4464 /* Construct the initialization value for reduction CLAUSE. */
4467 omp_reduction_init (tree clause
, tree type
)
4469 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4470 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4473 /* Return alignment to be assumed for var in CLAUSE, which should be
4474 OMP_CLAUSE_ALIGNED. */
4477 omp_clause_aligned_alignment (tree clause
)
4479 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4480 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4482 /* Otherwise return implementation defined alignment. */
4483 unsigned int al
= 1;
4484 opt_scalar_mode mode_iter
;
4485 auto_vector_modes modes
;
4486 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4487 static enum mode_class classes
[]
4488 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4489 for (int i
= 0; i
< 4; i
+= 2)
4490 /* The for loop above dictates that we only walk through scalar classes. */
4491 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4493 scalar_mode mode
= mode_iter
.require ();
4494 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4495 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4497 machine_mode alt_vmode
;
4498 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4499 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4500 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4503 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4504 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4506 type
= build_vector_type_for_mode (type
, vmode
);
4507 if (TYPE_MODE (type
) != vmode
)
4509 if (TYPE_ALIGN_UNIT (type
) > al
)
4510 al
= TYPE_ALIGN_UNIT (type
);
4512 return build_int_cst (integer_type_node
, al
);
4516 /* This structure is part of the interface between lower_rec_simd_input_clauses
4517 and lower_rec_input_clauses. */
4519 class omplow_simd_context
{
4521 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4525 vec
<tree
, va_heap
> simt_eargs
;
4526 gimple_seq simt_dlist
;
4527 poly_uint64_pod max_vf
;
4531 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4535 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4536 omplow_simd_context
*sctx
, tree
&ivar
,
4537 tree
&lvar
, tree
*rvar
= NULL
,
4540 if (known_eq (sctx
->max_vf
, 0U))
4542 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4543 if (maybe_gt (sctx
->max_vf
, 1U))
4545 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4546 OMP_CLAUSE_SAFELEN
);
4549 poly_uint64 safe_len
;
4550 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4551 || maybe_lt (safe_len
, 1U))
4554 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4557 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4559 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4560 c
= OMP_CLAUSE_CHAIN (c
))
4562 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4565 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4567 /* UDR reductions are not supported yet for SIMT, disable
4573 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4574 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4576 /* Doing boolean operations on non-integral types is
4577 for conformance only, it's not worth supporting this
4584 if (maybe_gt (sctx
->max_vf
, 1U))
4586 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4587 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4590 if (known_eq (sctx
->max_vf
, 1U))
4595 if (is_gimple_reg (new_var
))
4597 ivar
= lvar
= new_var
;
4600 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4601 ivar
= lvar
= create_tmp_var (type
);
4602 TREE_ADDRESSABLE (ivar
) = 1;
4603 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4604 NULL
, DECL_ATTRIBUTES (ivar
));
4605 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4606 tree clobber
= build_clobber (type
);
4607 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4608 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4612 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4613 tree avar
= create_tmp_var_raw (atype
);
4614 if (TREE_ADDRESSABLE (new_var
))
4615 TREE_ADDRESSABLE (avar
) = 1;
4616 DECL_ATTRIBUTES (avar
)
4617 = tree_cons (get_identifier ("omp simd array"), NULL
,
4618 DECL_ATTRIBUTES (avar
));
4619 gimple_add_tmp_var (avar
);
4621 if (rvar
&& !ctx
->for_simd_scan_phase
)
4623 /* For inscan reductions, create another array temporary,
4624 which will hold the reduced value. */
4625 iavar
= create_tmp_var_raw (atype
);
4626 if (TREE_ADDRESSABLE (new_var
))
4627 TREE_ADDRESSABLE (iavar
) = 1;
4628 DECL_ATTRIBUTES (iavar
)
4629 = tree_cons (get_identifier ("omp simd array"), NULL
,
4630 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4631 DECL_ATTRIBUTES (iavar
)));
4632 gimple_add_tmp_var (iavar
);
4633 ctx
->cb
.decl_map
->put (avar
, iavar
);
4634 if (sctx
->lastlane
== NULL_TREE
)
4635 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4636 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4637 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4638 TREE_THIS_NOTRAP (*rvar
) = 1;
4640 if (ctx
->scan_exclusive
)
4642 /* And for exclusive scan yet another one, which will
4643 hold the value during the scan phase. */
4644 tree savar
= create_tmp_var_raw (atype
);
4645 if (TREE_ADDRESSABLE (new_var
))
4646 TREE_ADDRESSABLE (savar
) = 1;
4647 DECL_ATTRIBUTES (savar
)
4648 = tree_cons (get_identifier ("omp simd array"), NULL
,
4649 tree_cons (get_identifier ("omp simd inscan "
4651 DECL_ATTRIBUTES (savar
)));
4652 gimple_add_tmp_var (savar
);
4653 ctx
->cb
.decl_map
->put (iavar
, savar
);
4654 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4655 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4656 TREE_THIS_NOTRAP (*rvar2
) = 1;
4659 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4660 NULL_TREE
, NULL_TREE
);
4661 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4662 NULL_TREE
, NULL_TREE
);
4663 TREE_THIS_NOTRAP (ivar
) = 1;
4664 TREE_THIS_NOTRAP (lvar
) = 1;
4666 if (DECL_P (new_var
))
4668 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4669 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4674 /* Helper function of lower_rec_input_clauses. For a reference
4675 in simd reduction, add an underlying variable it will reference. */
4678 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4680 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4681 if (TREE_CONSTANT (z
))
4683 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4684 get_name (new_vard
));
4685 gimple_add_tmp_var (z
);
4686 TREE_ADDRESSABLE (z
) = 1;
4687 z
= build_fold_addr_expr_loc (loc
, z
);
4688 gimplify_assign (new_vard
, z
, ilist
);
4692 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4693 code to emit (type) (tskred_temp[idx]). */
4696 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4699 unsigned HOST_WIDE_INT sz
4700 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4701 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4702 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4704 tree v
= create_tmp_var (pointer_sized_int_node
);
4705 gimple
*g
= gimple_build_assign (v
, r
);
4706 gimple_seq_add_stmt (ilist
, g
);
4707 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4709 v
= create_tmp_var (type
);
4710 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4711 gimple_seq_add_stmt (ilist
, g
);
4716 /* Lower early initialization of privatized variable NEW_VAR
4717 if it needs an allocator (has allocate clause). */
4720 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4721 tree
&allocate_ptr
, gimple_seq
*ilist
,
4722 omp_context
*ctx
, bool is_ref
, tree size
)
4726 gcc_assert (allocate_ptr
== NULL_TREE
);
4727 if (ctx
->allocate_map
4728 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4729 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4730 allocator
= *allocatorp
;
4731 if (allocator
== NULL_TREE
)
4733 if (!is_ref
&& omp_privatize_by_reference (var
))
4735 allocator
= NULL_TREE
;
4739 unsigned HOST_WIDE_INT ialign
= 0;
4740 if (TREE_CODE (allocator
) == TREE_LIST
)
4742 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4743 allocator
= TREE_PURPOSE (allocator
);
4745 if (TREE_CODE (allocator
) != INTEGER_CST
)
4746 allocator
= build_outer_var_ref (allocator
, ctx
);
4747 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4748 if (TREE_CODE (allocator
) != INTEGER_CST
)
4750 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4751 gimplify_assign (var
, allocator
, ilist
);
4755 tree ptr_type
, align
, sz
= size
;
4756 if (TYPE_P (new_var
))
4758 ptr_type
= build_pointer_type (new_var
);
4759 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4763 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4764 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4768 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4769 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4770 if (sz
== NULL_TREE
)
4771 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4773 align
= build_int_cst (size_type_node
, ialign
);
4774 if (TREE_CODE (sz
) != INTEGER_CST
)
4776 tree szvar
= create_tmp_var (size_type_node
);
4777 gimplify_assign (szvar
, sz
, ilist
);
4780 allocate_ptr
= create_tmp_var (ptr_type
);
4781 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4782 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4783 gimple_call_set_lhs (g
, allocate_ptr
);
4784 gimple_seq_add_stmt (ilist
, g
);
4787 tree x
= build_simple_mem_ref (allocate_ptr
);
4788 TREE_THIS_NOTRAP (x
) = 1;
4789 SET_DECL_VALUE_EXPR (new_var
, x
);
4790 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4795 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4796 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4797 private variables. Initialization statements go in ILIST, while calls
4798 to destructors go in DLIST. */
4801 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4802 omp_context
*ctx
, struct omp_for_data
*fd
)
4804 tree c
, copyin_seq
, x
, ptr
;
4805 bool copyin_by_ref
= false;
4806 bool lastprivate_firstprivate
= false;
4807 bool reduction_omp_orig_ref
= false;
4809 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4810 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4811 omplow_simd_context sctx
= omplow_simd_context ();
4812 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4813 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4814 gimple_seq llist
[4] = { };
4815 tree nonconst_simd_if
= NULL_TREE
;
4818 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4820 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4821 with data sharing clauses referencing variable sized vars. That
4822 is unnecessarily hard to support and very unlikely to result in
4823 vectorized code anyway. */
4825 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4826 switch (OMP_CLAUSE_CODE (c
))
4828 case OMP_CLAUSE_LINEAR
:
4829 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4832 case OMP_CLAUSE_PRIVATE
:
4833 case OMP_CLAUSE_FIRSTPRIVATE
:
4834 case OMP_CLAUSE_LASTPRIVATE
:
4835 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4837 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4839 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4840 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4844 case OMP_CLAUSE_REDUCTION
:
4845 case OMP_CLAUSE_IN_REDUCTION
:
4846 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4847 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4849 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4851 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4852 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4857 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4859 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4860 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4862 case OMP_CLAUSE_SIMDLEN
:
4863 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4866 case OMP_CLAUSE__CONDTEMP_
:
4867 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4875 /* Add a placeholder for simduid. */
4876 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4877 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4879 unsigned task_reduction_cnt
= 0;
4880 unsigned task_reduction_cntorig
= 0;
4881 unsigned task_reduction_cnt_full
= 0;
4882 unsigned task_reduction_cntorig_full
= 0;
4883 unsigned task_reduction_other_cnt
= 0;
4884 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4885 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4886 /* Do all the fixed sized types in the first pass, and the variable sized
4887 types in the second pass. This makes sure that the scalar arguments to
4888 the variable sized types are processed before we use them in the
4889 variable sized operations. For task reductions we use 4 passes, in the
4890 first two we ignore them, in the third one gather arguments for
4891 GOMP_task_reduction_remap call and in the last pass actually handle
4892 the task reductions. */
4893 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4896 if (pass
== 2 && task_reduction_cnt
)
4899 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4900 + task_reduction_cntorig
);
4901 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4902 gimple_add_tmp_var (tskred_avar
);
4903 TREE_ADDRESSABLE (tskred_avar
) = 1;
4904 task_reduction_cnt_full
= task_reduction_cnt
;
4905 task_reduction_cntorig_full
= task_reduction_cntorig
;
4907 else if (pass
== 3 && task_reduction_cnt
)
4909 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4911 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4912 size_int (task_reduction_cntorig
),
4913 build_fold_addr_expr (tskred_avar
));
4914 gimple_seq_add_stmt (ilist
, g
);
4916 if (pass
== 3 && task_reduction_other_cnt
)
4918 /* For reduction clauses, build
4919 tskred_base = (void *) tskred_temp[2]
4920 + omp_get_thread_num () * tskred_temp[1]
4921 or if tskred_temp[1] is known to be constant, that constant
4922 directly. This is the start of the private reduction copy block
4923 for the current thread. */
4924 tree v
= create_tmp_var (integer_type_node
);
4925 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4926 gimple
*g
= gimple_build_call (x
, 0);
4927 gimple_call_set_lhs (g
, v
);
4928 gimple_seq_add_stmt (ilist
, g
);
4929 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4930 tskred_temp
= OMP_CLAUSE_DECL (c
);
4931 if (is_taskreg_ctx (ctx
))
4932 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4933 tree v2
= create_tmp_var (sizetype
);
4934 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4935 gimple_seq_add_stmt (ilist
, g
);
4936 if (ctx
->task_reductions
[0])
4937 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4939 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4940 tree v3
= create_tmp_var (sizetype
);
4941 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4942 gimple_seq_add_stmt (ilist
, g
);
4943 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4944 tskred_base
= create_tmp_var (ptr_type_node
);
4945 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4946 gimple_seq_add_stmt (ilist
, g
);
4948 task_reduction_cnt
= 0;
4949 task_reduction_cntorig
= 0;
4950 task_reduction_other_cnt
= 0;
4951 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4953 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4956 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4957 bool task_reduction_p
= false;
4958 bool task_reduction_needs_orig_p
= false;
4959 tree cond
= NULL_TREE
;
4960 tree allocator
, allocate_ptr
;
4964 case OMP_CLAUSE_PRIVATE
:
4965 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4968 case OMP_CLAUSE_SHARED
:
4969 /* Ignore shared directives in teams construct inside
4970 of target construct. */
4971 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4972 && !is_host_teams_ctx (ctx
))
4974 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4976 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4977 || is_global_var (OMP_CLAUSE_DECL (c
)));
4980 case OMP_CLAUSE_FIRSTPRIVATE
:
4981 case OMP_CLAUSE_COPYIN
:
4983 case OMP_CLAUSE_LINEAR
:
4984 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4985 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4986 lastprivate_firstprivate
= true;
4988 case OMP_CLAUSE_REDUCTION
:
4989 case OMP_CLAUSE_IN_REDUCTION
:
4990 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
4991 || is_task_ctx (ctx
)
4992 || OMP_CLAUSE_REDUCTION_TASK (c
))
4994 task_reduction_p
= true;
4995 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4997 task_reduction_other_cnt
++;
5002 task_reduction_cnt
++;
5003 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5005 var
= OMP_CLAUSE_DECL (c
);
5006 /* If var is a global variable that isn't privatized
5007 in outer contexts, we don't need to look up the
5008 original address, it is always the address of the
5009 global variable itself. */
5011 || omp_privatize_by_reference (var
)
5013 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5015 task_reduction_needs_orig_p
= true;
5016 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5017 task_reduction_cntorig
++;
5021 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5022 reduction_omp_orig_ref
= true;
5024 case OMP_CLAUSE__REDUCTEMP_
:
5025 if (!is_taskreg_ctx (ctx
))
5028 case OMP_CLAUSE__LOOPTEMP_
:
5029 /* Handle _looptemp_/_reductemp_ clauses only on
5034 case OMP_CLAUSE_LASTPRIVATE
:
5035 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5037 lastprivate_firstprivate
= true;
5038 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5041 /* Even without corresponding firstprivate, if
5042 decl is Fortran allocatable, it needs outer var
5045 && lang_hooks
.decls
.omp_private_outer_ref
5046 (OMP_CLAUSE_DECL (c
)))
5047 lastprivate_firstprivate
= true;
5049 case OMP_CLAUSE_ALIGNED
:
5052 var
= OMP_CLAUSE_DECL (c
);
5053 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5054 && !is_global_var (var
))
5056 new_var
= maybe_lookup_decl (var
, ctx
);
5057 if (new_var
== NULL_TREE
)
5058 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5059 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5060 tree alarg
= omp_clause_aligned_alignment (c
);
5061 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5062 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5063 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5064 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5065 gimplify_and_add (x
, ilist
);
5067 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5068 && is_global_var (var
))
5070 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5071 new_var
= lookup_decl (var
, ctx
);
5072 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5073 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5074 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5075 tree alarg
= omp_clause_aligned_alignment (c
);
5076 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5077 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5078 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5079 x
= create_tmp_var (ptype
);
5080 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5081 gimplify_and_add (t
, ilist
);
5082 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5083 SET_DECL_VALUE_EXPR (new_var
, t
);
5084 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5087 case OMP_CLAUSE__CONDTEMP_
:
5088 if (is_parallel_ctx (ctx
)
5089 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5096 if (task_reduction_p
!= (pass
>= 2))
5099 allocator
= NULL_TREE
;
5100 allocate_ptr
= NULL_TREE
;
5101 new_var
= var
= OMP_CLAUSE_DECL (c
);
5102 if ((c_kind
== OMP_CLAUSE_REDUCTION
5103 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5104 && TREE_CODE (var
) == MEM_REF
)
5106 var
= TREE_OPERAND (var
, 0);
5107 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5108 var
= TREE_OPERAND (var
, 0);
5109 if (TREE_CODE (var
) == INDIRECT_REF
5110 || TREE_CODE (var
) == ADDR_EXPR
)
5111 var
= TREE_OPERAND (var
, 0);
5112 if (is_variable_sized (var
))
5114 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5115 var
= DECL_VALUE_EXPR (var
);
5116 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5117 var
= TREE_OPERAND (var
, 0);
5118 gcc_assert (DECL_P (var
));
5122 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5124 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5125 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5127 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5128 new_var
= lookup_decl (var
, ctx
);
5130 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5135 /* C/C++ array section reductions. */
5136 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5137 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5138 && var
!= OMP_CLAUSE_DECL (c
))
5143 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5144 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5146 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5148 tree b
= TREE_OPERAND (orig_var
, 1);
5149 if (is_omp_target (ctx
->stmt
))
5152 b
= maybe_lookup_decl (b
, ctx
);
5155 b
= TREE_OPERAND (orig_var
, 1);
5156 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5158 if (integer_zerop (bias
))
5162 bias
= fold_convert_loc (clause_loc
,
5163 TREE_TYPE (b
), bias
);
5164 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5165 TREE_TYPE (b
), b
, bias
);
5167 orig_var
= TREE_OPERAND (orig_var
, 0);
5171 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5172 if (is_global_var (out
)
5173 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5174 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5175 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5178 else if (is_omp_target (ctx
->stmt
))
5182 bool by_ref
= use_pointer_for_field (var
, NULL
);
5183 x
= build_receiver_ref (var
, by_ref
, ctx
);
5184 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5185 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5187 x
= build_fold_addr_expr (x
);
5189 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5190 x
= build_simple_mem_ref (x
);
5191 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5193 if (var
== TREE_OPERAND (orig_var
, 0))
5194 x
= build_fold_addr_expr (x
);
5196 bias
= fold_convert (sizetype
, bias
);
5197 x
= fold_convert (ptr_type_node
, x
);
5198 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5199 TREE_TYPE (x
), x
, bias
);
5200 unsigned cnt
= task_reduction_cnt
- 1;
5201 if (!task_reduction_needs_orig_p
)
5202 cnt
+= (task_reduction_cntorig_full
5203 - task_reduction_cntorig
);
5205 cnt
= task_reduction_cntorig
- 1;
5206 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5207 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5208 gimplify_assign (r
, x
, ilist
);
5212 if (TREE_CODE (orig_var
) == INDIRECT_REF
5213 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5214 orig_var
= TREE_OPERAND (orig_var
, 0);
5215 tree d
= OMP_CLAUSE_DECL (c
);
5216 tree type
= TREE_TYPE (d
);
5217 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5218 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5220 const char *name
= get_name (orig_var
);
5221 if (pass
!= 3 && !TREE_CONSTANT (v
))
5224 if (is_omp_target (ctx
->stmt
))
5227 t
= maybe_lookup_decl (v
, ctx
);
5231 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5232 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5233 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5235 build_int_cst (TREE_TYPE (v
), 1));
5236 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5238 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5242 tree xv
= create_tmp_var (ptr_type_node
);
5243 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5245 unsigned cnt
= task_reduction_cnt
- 1;
5246 if (!task_reduction_needs_orig_p
)
5247 cnt
+= (task_reduction_cntorig_full
5248 - task_reduction_cntorig
);
5250 cnt
= task_reduction_cntorig
- 1;
5251 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5252 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5254 gimple
*g
= gimple_build_assign (xv
, x
);
5255 gimple_seq_add_stmt (ilist
, g
);
5259 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5261 if (ctx
->task_reductions
[1 + idx
])
5262 off
= fold_convert (sizetype
,
5263 ctx
->task_reductions
[1 + idx
]);
5265 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5267 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5269 gimple_seq_add_stmt (ilist
, g
);
5271 x
= fold_convert (build_pointer_type (boolean_type_node
),
5273 if (TREE_CONSTANT (v
))
5274 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5275 TYPE_SIZE_UNIT (type
));
5279 if (is_omp_target (ctx
->stmt
))
5282 t
= maybe_lookup_decl (v
, ctx
);
5286 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5287 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5289 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5291 build_int_cst (TREE_TYPE (v
), 1));
5292 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5294 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5295 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5297 cond
= create_tmp_var (TREE_TYPE (x
));
5298 gimplify_assign (cond
, x
, ilist
);
5301 else if (lower_private_allocate (var
, type
, allocator
,
5302 allocate_ptr
, ilist
, ctx
,
5305 ? TYPE_SIZE_UNIT (type
)
5308 else if (TREE_CONSTANT (v
))
5310 x
= create_tmp_var_raw (type
, name
);
5311 gimple_add_tmp_var (x
);
5312 TREE_ADDRESSABLE (x
) = 1;
5313 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5318 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5319 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5320 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5323 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5324 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5325 tree y
= create_tmp_var (ptype
, name
);
5326 gimplify_assign (y
, x
, ilist
);
5330 if (!integer_zerop (bias
))
5332 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5334 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5336 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5337 pointer_sized_int_node
, yb
, bias
);
5338 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5339 yb
= create_tmp_var (ptype
, name
);
5340 gimplify_assign (yb
, x
, ilist
);
5344 d
= TREE_OPERAND (d
, 0);
5345 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5346 d
= TREE_OPERAND (d
, 0);
5347 if (TREE_CODE (d
) == ADDR_EXPR
)
5349 if (orig_var
!= var
)
5351 gcc_assert (is_variable_sized (orig_var
));
5352 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5354 gimplify_assign (new_var
, x
, ilist
);
5355 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5356 tree t
= build_fold_indirect_ref (new_var
);
5357 DECL_IGNORED_P (new_var
) = 0;
5358 TREE_THIS_NOTRAP (t
) = 1;
5359 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5360 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5364 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5365 build_int_cst (ptype
, 0));
5366 SET_DECL_VALUE_EXPR (new_var
, x
);
5367 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5372 gcc_assert (orig_var
== var
);
5373 if (TREE_CODE (d
) == INDIRECT_REF
)
5375 x
= create_tmp_var (ptype
, name
);
5376 TREE_ADDRESSABLE (x
) = 1;
5377 gimplify_assign (x
, yb
, ilist
);
5378 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5380 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5381 gimplify_assign (new_var
, x
, ilist
);
5383 /* GOMP_taskgroup_reduction_register memsets the whole
5384 array to zero. If the initializer is zero, we don't
5385 need to initialize it again, just mark it as ever
5386 used unconditionally, i.e. cond = true. */
5388 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5389 && initializer_zerop (omp_reduction_init (c
,
5392 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5394 gimple_seq_add_stmt (ilist
, g
);
5397 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5401 if (!is_parallel_ctx (ctx
))
5403 tree condv
= create_tmp_var (boolean_type_node
);
5404 g
= gimple_build_assign (condv
,
5405 build_simple_mem_ref (cond
));
5406 gimple_seq_add_stmt (ilist
, g
);
5407 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5408 g
= gimple_build_cond (NE_EXPR
, condv
,
5409 boolean_false_node
, end
, lab1
);
5410 gimple_seq_add_stmt (ilist
, g
);
5411 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5413 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5415 gimple_seq_add_stmt (ilist
, g
);
5418 tree y1
= create_tmp_var (ptype
);
5419 gimplify_assign (y1
, y
, ilist
);
5420 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5421 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5422 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5423 if (task_reduction_needs_orig_p
)
5425 y3
= create_tmp_var (ptype
);
5427 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5428 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5429 size_int (task_reduction_cnt_full
5430 + task_reduction_cntorig
- 1),
5431 NULL_TREE
, NULL_TREE
);
5434 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5435 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5438 gimplify_assign (y3
, ref
, ilist
);
5440 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5444 y2
= create_tmp_var (ptype
);
5445 gimplify_assign (y2
, y
, ilist
);
5447 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5449 tree ref
= build_outer_var_ref (var
, ctx
);
5450 /* For ref build_outer_var_ref already performs this. */
5451 if (TREE_CODE (d
) == INDIRECT_REF
)
5452 gcc_assert (omp_privatize_by_reference (var
));
5453 else if (TREE_CODE (d
) == ADDR_EXPR
)
5454 ref
= build_fold_addr_expr (ref
);
5455 else if (omp_privatize_by_reference (var
))
5456 ref
= build_fold_addr_expr (ref
);
5457 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5458 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5459 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5461 y3
= create_tmp_var (ptype
);
5462 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5466 y4
= create_tmp_var (ptype
);
5467 gimplify_assign (y4
, ref
, dlist
);
5471 tree i
= create_tmp_var (TREE_TYPE (v
));
5472 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5473 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5474 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5477 i2
= create_tmp_var (TREE_TYPE (v
));
5478 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5479 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5480 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5481 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5483 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5485 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5486 tree decl_placeholder
5487 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5488 SET_DECL_VALUE_EXPR (decl_placeholder
,
5489 build_simple_mem_ref (y1
));
5490 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5491 SET_DECL_VALUE_EXPR (placeholder
,
5492 y3
? build_simple_mem_ref (y3
)
5494 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5495 x
= lang_hooks
.decls
.omp_clause_default_ctor
5496 (c
, build_simple_mem_ref (y1
),
5497 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5499 gimplify_and_add (x
, ilist
);
5500 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5502 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5503 lower_omp (&tseq
, ctx
);
5504 gimple_seq_add_seq (ilist
, tseq
);
5506 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5509 SET_DECL_VALUE_EXPR (decl_placeholder
,
5510 build_simple_mem_ref (y2
));
5511 SET_DECL_VALUE_EXPR (placeholder
,
5512 build_simple_mem_ref (y4
));
5513 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5514 lower_omp (&tseq
, ctx
);
5515 gimple_seq_add_seq (dlist
, tseq
);
5516 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5518 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5519 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5522 x
= lang_hooks
.decls
.omp_clause_dtor
5523 (c
, build_simple_mem_ref (y2
));
5525 gimplify_and_add (x
, dlist
);
5530 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5531 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5533 /* reduction(-:var) sums up the partial results, so it
5534 acts identically to reduction(+:var). */
5535 if (code
== MINUS_EXPR
)
5538 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5541 x
= build2 (code
, TREE_TYPE (type
),
5542 build_simple_mem_ref (y4
),
5543 build_simple_mem_ref (y2
));
5544 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5548 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5549 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5550 gimple_seq_add_stmt (ilist
, g
);
5553 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5554 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5555 gimple_seq_add_stmt (ilist
, g
);
5557 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5558 build_int_cst (TREE_TYPE (i
), 1));
5559 gimple_seq_add_stmt (ilist
, g
);
5560 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5561 gimple_seq_add_stmt (ilist
, g
);
5562 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5565 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5566 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5567 gimple_seq_add_stmt (dlist
, g
);
5570 g
= gimple_build_assign
5571 (y4
, POINTER_PLUS_EXPR
, y4
,
5572 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5573 gimple_seq_add_stmt (dlist
, g
);
5575 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5576 build_int_cst (TREE_TYPE (i2
), 1));
5577 gimple_seq_add_stmt (dlist
, g
);
5578 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5579 gimple_seq_add_stmt (dlist
, g
);
5580 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5584 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5585 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5586 gimple_seq_add_stmt (dlist
, g
);
5592 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5593 if (is_global_var (out
))
5595 else if (is_omp_target (ctx
->stmt
))
5599 bool by_ref
= use_pointer_for_field (var
, ctx
);
5600 x
= build_receiver_ref (var
, by_ref
, ctx
);
5602 if (!omp_privatize_by_reference (var
))
5603 x
= build_fold_addr_expr (x
);
5604 x
= fold_convert (ptr_type_node
, x
);
5605 unsigned cnt
= task_reduction_cnt
- 1;
5606 if (!task_reduction_needs_orig_p
)
5607 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5609 cnt
= task_reduction_cntorig
- 1;
5610 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5611 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5612 gimplify_assign (r
, x
, ilist
);
5617 tree type
= TREE_TYPE (new_var
);
5618 if (!omp_privatize_by_reference (var
))
5619 type
= build_pointer_type (type
);
5620 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5622 unsigned cnt
= task_reduction_cnt
- 1;
5623 if (!task_reduction_needs_orig_p
)
5624 cnt
+= (task_reduction_cntorig_full
5625 - task_reduction_cntorig
);
5627 cnt
= task_reduction_cntorig
- 1;
5628 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5629 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5633 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5635 if (ctx
->task_reductions
[1 + idx
])
5636 off
= fold_convert (sizetype
,
5637 ctx
->task_reductions
[1 + idx
]);
5639 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5641 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5644 x
= fold_convert (type
, x
);
5646 if (omp_privatize_by_reference (var
))
5648 gimplify_assign (new_var
, x
, ilist
);
5650 new_var
= build_simple_mem_ref (new_var
);
5654 t
= create_tmp_var (type
);
5655 gimplify_assign (t
, x
, ilist
);
5656 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5657 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5659 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5660 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5661 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5662 cond
= create_tmp_var (TREE_TYPE (t
));
5663 gimplify_assign (cond
, t
, ilist
);
5665 else if (is_variable_sized (var
))
5667 /* For variable sized types, we need to allocate the
5668 actual storage here. Call alloca and store the
5669 result in the pointer decl that we created elsewhere. */
5673 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5677 ptr
= DECL_VALUE_EXPR (new_var
);
5678 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5679 ptr
= TREE_OPERAND (ptr
, 0);
5680 gcc_assert (DECL_P (ptr
));
5681 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5683 if (lower_private_allocate (var
, new_var
, allocator
,
5684 allocate_ptr
, ilist
, ctx
,
5689 /* void *tmp = __builtin_alloca */
5691 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5693 = gimple_build_call (atmp
, 2, x
,
5694 size_int (DECL_ALIGN (var
)));
5695 cfun
->calls_alloca
= 1;
5696 tmp
= create_tmp_var_raw (ptr_type_node
);
5697 gimple_add_tmp_var (tmp
);
5698 gimple_call_set_lhs (stmt
, tmp
);
5700 gimple_seq_add_stmt (ilist
, stmt
);
5703 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5704 gimplify_assign (ptr
, x
, ilist
);
5707 else if (omp_privatize_by_reference (var
)
5708 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5709 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5711 /* For references that are being privatized for Fortran,
5712 allocate new backing storage for the new pointer
5713 variable. This allows us to avoid changing all the
5714 code that expects a pointer to something that expects
5715 a direct variable. */
5719 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5720 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5722 x
= build_receiver_ref (var
, false, ctx
);
5723 if (ctx
->allocate_map
)
5724 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5726 allocator
= *allocatep
;
5727 if (TREE_CODE (allocator
) == TREE_LIST
)
5728 allocator
= TREE_PURPOSE (allocator
);
5729 if (TREE_CODE (allocator
) != INTEGER_CST
)
5730 allocator
= build_outer_var_ref (allocator
, ctx
);
5731 allocator
= fold_convert (pointer_sized_int_node
,
5733 allocate_ptr
= unshare_expr (x
);
5735 if (allocator
== NULL_TREE
)
5736 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5738 else if (lower_private_allocate (var
, new_var
, allocator
,
5740 ilist
, ctx
, true, x
))
5742 else if (TREE_CONSTANT (x
))
5744 /* For reduction in SIMD loop, defer adding the
5745 initialization of the reference, because if we decide
5746 to use SIMD array for it, the initilization could cause
5747 expansion ICE. Ditto for other privatization clauses. */
5752 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5754 gimple_add_tmp_var (x
);
5755 TREE_ADDRESSABLE (x
) = 1;
5756 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5762 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5763 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5764 tree al
= size_int (TYPE_ALIGN (rtype
));
5765 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5770 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5771 gimplify_assign (new_var
, x
, ilist
);
5774 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5776 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5777 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5778 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5786 switch (OMP_CLAUSE_CODE (c
))
5788 case OMP_CLAUSE_SHARED
:
5789 /* Ignore shared directives in teams construct inside
5790 target construct. */
5791 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5792 && !is_host_teams_ctx (ctx
))
5794 /* Shared global vars are just accessed directly. */
5795 if (is_global_var (new_var
))
5797 /* For taskloop firstprivate/lastprivate, represented
5798 as firstprivate and shared clause on the task, new_var
5799 is the firstprivate var. */
5800 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5802 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5803 needs to be delayed until after fixup_child_record_type so
5804 that we get the correct type during the dereference. */
5805 by_ref
= use_pointer_for_field (var
, ctx
);
5806 x
= build_receiver_ref (var
, by_ref
, ctx
);
5807 SET_DECL_VALUE_EXPR (new_var
, x
);
5808 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5810 /* ??? If VAR is not passed by reference, and the variable
5811 hasn't been initialized yet, then we'll get a warning for
5812 the store into the omp_data_s structure. Ideally, we'd be
5813 able to notice this and not store anything at all, but
5814 we're generating code too early. Suppress the warning. */
5816 suppress_warning (var
, OPT_Wuninitialized
);
5819 case OMP_CLAUSE__CONDTEMP_
:
5820 if (is_parallel_ctx (ctx
))
5822 x
= build_receiver_ref (var
, false, ctx
);
5823 SET_DECL_VALUE_EXPR (new_var
, x
);
5824 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5826 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5828 x
= build_zero_cst (TREE_TYPE (var
));
5833 case OMP_CLAUSE_LASTPRIVATE
:
5834 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5838 case OMP_CLAUSE_PRIVATE
:
5839 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5840 x
= build_outer_var_ref (var
, ctx
);
5841 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5843 if (is_task_ctx (ctx
))
5844 x
= build_receiver_ref (var
, false, ctx
);
5846 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5854 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5855 ilist
, ctx
, false, NULL_TREE
);
5856 nx
= unshare_expr (new_var
);
5858 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5859 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5862 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5864 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5867 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5868 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5869 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5870 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5871 || (gimple_omp_for_index (ctx
->stmt
, 0)
5873 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5874 || omp_privatize_by_reference (var
))
5875 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5878 if (omp_privatize_by_reference (var
))
5880 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5881 tree new_vard
= TREE_OPERAND (new_var
, 0);
5882 gcc_assert (DECL_P (new_vard
));
5883 SET_DECL_VALUE_EXPR (new_vard
,
5884 build_fold_addr_expr (lvar
));
5885 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5890 tree iv
= unshare_expr (ivar
);
5892 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5895 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5899 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5901 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5902 unshare_expr (ivar
), x
);
5906 gimplify_and_add (x
, &llist
[0]);
5907 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5908 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5913 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5914 v
= TREE_OPERAND (v
, 0);
5915 gcc_assert (DECL_P (v
));
5917 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5918 tree t
= create_tmp_var (TREE_TYPE (v
));
5919 tree z
= build_zero_cst (TREE_TYPE (v
));
5921 = build_outer_var_ref (var
, ctx
,
5922 OMP_CLAUSE_LASTPRIVATE
);
5923 gimple_seq_add_stmt (dlist
,
5924 gimple_build_assign (t
, z
));
5925 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5926 tree civar
= DECL_VALUE_EXPR (v
);
5927 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5928 civar
= unshare_expr (civar
);
5929 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5930 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5931 unshare_expr (civar
));
5932 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5933 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5934 orig_v
, unshare_expr (ivar
)));
5935 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5937 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5939 gimple_seq tseq
= NULL
;
5940 gimplify_and_add (x
, &tseq
);
5942 lower_omp (&tseq
, ctx
->outer
);
5943 gimple_seq_add_seq (&llist
[1], tseq
);
5945 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5946 && ctx
->for_simd_scan_phase
)
5948 x
= unshare_expr (ivar
);
5950 = build_outer_var_ref (var
, ctx
,
5951 OMP_CLAUSE_LASTPRIVATE
);
5952 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5954 gimplify_and_add (x
, &llist
[0]);
5958 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5960 gimplify_and_add (y
, &llist
[1]);
5964 if (omp_privatize_by_reference (var
))
5966 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5967 tree new_vard
= TREE_OPERAND (new_var
, 0);
5968 gcc_assert (DECL_P (new_vard
));
5969 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5970 x
= TYPE_SIZE_UNIT (type
);
5971 if (TREE_CONSTANT (x
))
5973 x
= create_tmp_var_raw (type
, get_name (var
));
5974 gimple_add_tmp_var (x
);
5975 TREE_ADDRESSABLE (x
) = 1;
5976 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5977 x
= fold_convert_loc (clause_loc
,
5978 TREE_TYPE (new_vard
), x
);
5979 gimplify_assign (new_vard
, x
, ilist
);
5984 gimplify_and_add (nx
, ilist
);
5985 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5987 && ctx
->for_simd_scan_phase
)
5989 tree orig_v
= build_outer_var_ref (var
, ctx
,
5990 OMP_CLAUSE_LASTPRIVATE
);
5991 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5993 gimplify_and_add (x
, ilist
);
5998 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6000 gimplify_and_add (x
, dlist
);
6003 if (!is_gimple_val (allocator
))
6005 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6006 gimplify_assign (avar
, allocator
, dlist
);
6009 if (!is_gimple_val (allocate_ptr
))
6011 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6012 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6013 allocate_ptr
= apvar
;
6015 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6017 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6018 gimple_seq_add_stmt (dlist
, g
);
6022 case OMP_CLAUSE_LINEAR
:
6023 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6024 goto do_firstprivate
;
6025 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6028 x
= build_outer_var_ref (var
, ctx
);
6031 case OMP_CLAUSE_FIRSTPRIVATE
:
6032 if (is_task_ctx (ctx
))
6034 if ((omp_privatize_by_reference (var
)
6035 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6036 || is_variable_sized (var
))
6038 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6040 || use_pointer_for_field (var
, NULL
))
6042 x
= build_receiver_ref (var
, false, ctx
);
6043 if (ctx
->allocate_map
)
6044 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6046 allocator
= *allocatep
;
6047 if (TREE_CODE (allocator
) == TREE_LIST
)
6048 allocator
= TREE_PURPOSE (allocator
);
6049 if (TREE_CODE (allocator
) != INTEGER_CST
)
6050 allocator
= build_outer_var_ref (allocator
, ctx
);
6051 allocator
= fold_convert (pointer_sized_int_node
,
6053 allocate_ptr
= unshare_expr (x
);
6054 x
= build_simple_mem_ref (x
);
6055 TREE_THIS_NOTRAP (x
) = 1;
6057 SET_DECL_VALUE_EXPR (new_var
, x
);
6058 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6062 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6063 && omp_privatize_by_reference (var
))
6065 x
= build_outer_var_ref (var
, ctx
);
6066 gcc_assert (TREE_CODE (x
) == MEM_REF
6067 && integer_zerop (TREE_OPERAND (x
, 1)));
6068 x
= TREE_OPERAND (x
, 0);
6069 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6070 (c
, unshare_expr (new_var
), x
);
6071 gimplify_and_add (x
, ilist
);
6075 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6076 ilist
, ctx
, false, NULL_TREE
);
6077 x
= build_outer_var_ref (var
, ctx
);
6080 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6081 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6083 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6084 tree stept
= TREE_TYPE (t
);
6085 tree ct
= omp_find_clause (clauses
,
6086 OMP_CLAUSE__LOOPTEMP_
);
6088 tree l
= OMP_CLAUSE_DECL (ct
);
6089 tree n1
= fd
->loop
.n1
;
6090 tree step
= fd
->loop
.step
;
6091 tree itype
= TREE_TYPE (l
);
6092 if (POINTER_TYPE_P (itype
))
6093 itype
= signed_type_for (itype
);
6094 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6095 if (TYPE_UNSIGNED (itype
)
6096 && fd
->loop
.cond_code
== GT_EXPR
)
6097 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6098 fold_build1 (NEGATE_EXPR
, itype
, l
),
6099 fold_build1 (NEGATE_EXPR
,
6102 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6103 t
= fold_build2 (MULT_EXPR
, stept
,
6104 fold_convert (stept
, l
), t
);
6106 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6108 if (omp_privatize_by_reference (var
))
6110 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6111 tree new_vard
= TREE_OPERAND (new_var
, 0);
6112 gcc_assert (DECL_P (new_vard
));
6113 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6114 nx
= TYPE_SIZE_UNIT (type
);
6115 if (TREE_CONSTANT (nx
))
6117 nx
= create_tmp_var_raw (type
,
6119 gimple_add_tmp_var (nx
);
6120 TREE_ADDRESSABLE (nx
) = 1;
6121 nx
= build_fold_addr_expr_loc (clause_loc
,
6123 nx
= fold_convert_loc (clause_loc
,
6124 TREE_TYPE (new_vard
),
6126 gimplify_assign (new_vard
, nx
, ilist
);
6130 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6132 gimplify_and_add (x
, ilist
);
6136 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6137 x
= fold_build2 (POINTER_PLUS_EXPR
,
6138 TREE_TYPE (x
), x
, t
);
6140 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
6143 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6144 || TREE_ADDRESSABLE (new_var
)
6145 || omp_privatize_by_reference (var
))
6146 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6149 if (omp_privatize_by_reference (var
))
6151 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6152 tree new_vard
= TREE_OPERAND (new_var
, 0);
6153 gcc_assert (DECL_P (new_vard
));
6154 SET_DECL_VALUE_EXPR (new_vard
,
6155 build_fold_addr_expr (lvar
));
6156 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6158 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6160 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6161 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6162 gimplify_and_add (x
, ilist
);
6163 gimple_stmt_iterator gsi
6164 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6166 = gimple_build_assign (unshare_expr (lvar
), iv
);
6167 gsi_insert_before_without_update (&gsi
, g
,
6169 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6170 enum tree_code code
= PLUS_EXPR
;
6171 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6172 code
= POINTER_PLUS_EXPR
;
6173 g
= gimple_build_assign (iv
, code
, iv
, t
);
6174 gsi_insert_before_without_update (&gsi
, g
,
6178 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6179 (c
, unshare_expr (ivar
), x
);
6180 gimplify_and_add (x
, &llist
[0]);
6181 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6183 gimplify_and_add (x
, &llist
[1]);
6186 if (omp_privatize_by_reference (var
))
6188 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6189 tree new_vard
= TREE_OPERAND (new_var
, 0);
6190 gcc_assert (DECL_P (new_vard
));
6191 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6192 nx
= TYPE_SIZE_UNIT (type
);
6193 if (TREE_CONSTANT (nx
))
6195 nx
= create_tmp_var_raw (type
, get_name (var
));
6196 gimple_add_tmp_var (nx
);
6197 TREE_ADDRESSABLE (nx
) = 1;
6198 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6199 nx
= fold_convert_loc (clause_loc
,
6200 TREE_TYPE (new_vard
), nx
);
6201 gimplify_assign (new_vard
, nx
, ilist
);
6205 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6206 (c
, unshare_expr (new_var
), x
);
6207 gimplify_and_add (x
, ilist
);
6210 case OMP_CLAUSE__LOOPTEMP_
:
6211 case OMP_CLAUSE__REDUCTEMP_
:
6212 gcc_assert (is_taskreg_ctx (ctx
));
6213 x
= build_outer_var_ref (var
, ctx
);
6214 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6215 gimplify_and_add (x
, ilist
);
6218 case OMP_CLAUSE_COPYIN
:
6219 by_ref
= use_pointer_for_field (var
, NULL
);
6220 x
= build_receiver_ref (var
, by_ref
, ctx
);
6221 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6222 append_to_statement_list (x
, ©in_seq
);
6223 copyin_by_ref
|= by_ref
;
6226 case OMP_CLAUSE_REDUCTION
:
6227 case OMP_CLAUSE_IN_REDUCTION
:
6228 /* OpenACC reductions are initialized using the
6229 GOACC_REDUCTION internal function. */
6230 if (is_gimple_omp_oacc (ctx
->stmt
))
6232 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6234 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6236 tree ptype
= TREE_TYPE (placeholder
);
6239 x
= error_mark_node
;
6240 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6241 && !task_reduction_needs_orig_p
)
6243 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6245 tree pptype
= build_pointer_type (ptype
);
6246 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6247 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6248 size_int (task_reduction_cnt_full
6249 + task_reduction_cntorig
- 1),
6250 NULL_TREE
, NULL_TREE
);
6254 = *ctx
->task_reduction_map
->get (c
);
6255 x
= task_reduction_read (ilist
, tskred_temp
,
6256 pptype
, 7 + 3 * idx
);
6258 x
= fold_convert (pptype
, x
);
6259 x
= build_simple_mem_ref (x
);
6264 lower_private_allocate (var
, new_var
, allocator
,
6265 allocate_ptr
, ilist
, ctx
, false,
6267 x
= build_outer_var_ref (var
, ctx
);
6269 if (omp_privatize_by_reference (var
)
6270 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6271 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6273 SET_DECL_VALUE_EXPR (placeholder
, x
);
6274 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6275 tree new_vard
= new_var
;
6276 if (omp_privatize_by_reference (var
))
6278 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6279 new_vard
= TREE_OPERAND (new_var
, 0);
6280 gcc_assert (DECL_P (new_vard
));
6282 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6284 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6285 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6288 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6292 if (new_vard
== new_var
)
6294 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6295 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6299 SET_DECL_VALUE_EXPR (new_vard
,
6300 build_fold_addr_expr (ivar
));
6301 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6303 x
= lang_hooks
.decls
.omp_clause_default_ctor
6304 (c
, unshare_expr (ivar
),
6305 build_outer_var_ref (var
, ctx
));
6306 if (rvarp
&& ctx
->for_simd_scan_phase
)
6309 gimplify_and_add (x
, &llist
[0]);
6310 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6312 gimplify_and_add (x
, &llist
[1]);
6319 gimplify_and_add (x
, &llist
[0]);
6321 tree ivar2
= unshare_expr (lvar
);
6322 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6323 x
= lang_hooks
.decls
.omp_clause_default_ctor
6324 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6325 gimplify_and_add (x
, &llist
[0]);
6329 x
= lang_hooks
.decls
.omp_clause_default_ctor
6330 (c
, unshare_expr (rvar2
),
6331 build_outer_var_ref (var
, ctx
));
6332 gimplify_and_add (x
, &llist
[0]);
6335 /* For types that need construction, add another
6336 private var which will be default constructed
6337 and optionally initialized with
6338 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6339 loop we want to assign this value instead of
6340 constructing and destructing it in each
6342 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6343 gimple_add_tmp_var (nv
);
6344 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6348 x
= lang_hooks
.decls
.omp_clause_default_ctor
6349 (c
, nv
, build_outer_var_ref (var
, ctx
));
6350 gimplify_and_add (x
, ilist
);
6352 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6354 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6355 x
= DECL_VALUE_EXPR (new_vard
);
6357 if (new_vard
!= new_var
)
6358 vexpr
= build_fold_addr_expr (nv
);
6359 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6360 lower_omp (&tseq
, ctx
);
6361 SET_DECL_VALUE_EXPR (new_vard
, x
);
6362 gimple_seq_add_seq (ilist
, tseq
);
6363 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6366 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6368 gimplify_and_add (x
, dlist
);
6371 tree ref
= build_outer_var_ref (var
, ctx
);
6372 x
= unshare_expr (ivar
);
6373 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6375 gimplify_and_add (x
, &llist
[0]);
6377 ref
= build_outer_var_ref (var
, ctx
);
6378 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6380 gimplify_and_add (x
, &llist
[3]);
6382 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6383 if (new_vard
== new_var
)
6384 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6386 SET_DECL_VALUE_EXPR (new_vard
,
6387 build_fold_addr_expr (lvar
));
6389 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6391 gimplify_and_add (x
, &llist
[1]);
6393 tree ivar2
= unshare_expr (lvar
);
6394 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6395 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6397 gimplify_and_add (x
, &llist
[1]);
6401 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6403 gimplify_and_add (x
, &llist
[1]);
6408 gimplify_and_add (x
, &llist
[0]);
6409 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6411 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6412 lower_omp (&tseq
, ctx
);
6413 gimple_seq_add_seq (&llist
[0], tseq
);
6415 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6416 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6417 lower_omp (&tseq
, ctx
);
6418 gimple_seq_add_seq (&llist
[1], tseq
);
6419 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6420 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6421 if (new_vard
== new_var
)
6422 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6424 SET_DECL_VALUE_EXPR (new_vard
,
6425 build_fold_addr_expr (lvar
));
6426 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6428 gimplify_and_add (x
, &llist
[1]);
6431 /* If this is a reference to constant size reduction var
6432 with placeholder, we haven't emitted the initializer
6433 for it because it is undesirable if SIMD arrays are used.
6434 But if they aren't used, we need to emit the deferred
6435 initialization now. */
6436 else if (omp_privatize_by_reference (var
) && is_simd
)
6437 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6439 tree lab2
= NULL_TREE
;
6443 if (!is_parallel_ctx (ctx
))
6445 tree condv
= create_tmp_var (boolean_type_node
);
6446 tree m
= build_simple_mem_ref (cond
);
6447 g
= gimple_build_assign (condv
, m
);
6448 gimple_seq_add_stmt (ilist
, g
);
6450 = create_artificial_label (UNKNOWN_LOCATION
);
6451 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6452 g
= gimple_build_cond (NE_EXPR
, condv
,
6455 gimple_seq_add_stmt (ilist
, g
);
6456 gimple_seq_add_stmt (ilist
,
6457 gimple_build_label (lab1
));
6459 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6461 gimple_seq_add_stmt (ilist
, g
);
6463 x
= lang_hooks
.decls
.omp_clause_default_ctor
6464 (c
, unshare_expr (new_var
),
6466 : build_outer_var_ref (var
, ctx
));
6468 gimplify_and_add (x
, ilist
);
6470 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6471 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6473 if (ctx
->for_simd_scan_phase
)
6476 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6478 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6479 gimple_add_tmp_var (nv
);
6480 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6481 x
= lang_hooks
.decls
.omp_clause_default_ctor
6482 (c
, nv
, build_outer_var_ref (var
, ctx
));
6484 gimplify_and_add (x
, ilist
);
6485 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6487 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6489 if (new_vard
!= new_var
)
6490 vexpr
= build_fold_addr_expr (nv
);
6491 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6492 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6493 lower_omp (&tseq
, ctx
);
6494 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6495 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6496 gimple_seq_add_seq (ilist
, tseq
);
6498 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6499 if (is_simd
&& ctx
->scan_exclusive
)
6502 = create_tmp_var_raw (TREE_TYPE (new_var
));
6503 gimple_add_tmp_var (nv2
);
6504 ctx
->cb
.decl_map
->put (nv
, nv2
);
6505 x
= lang_hooks
.decls
.omp_clause_default_ctor
6506 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6507 gimplify_and_add (x
, ilist
);
6508 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6510 gimplify_and_add (x
, dlist
);
6512 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6514 gimplify_and_add (x
, dlist
);
6517 && ctx
->scan_exclusive
6518 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6520 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6521 gimple_add_tmp_var (nv2
);
6522 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6523 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6525 gimplify_and_add (x
, dlist
);
6527 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6531 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6533 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6534 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6535 && is_omp_target (ctx
->stmt
))
6537 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6538 tree oldv
= NULL_TREE
;
6540 if (DECL_HAS_VALUE_EXPR_P (d
))
6541 oldv
= DECL_VALUE_EXPR (d
);
6542 SET_DECL_VALUE_EXPR (d
, new_vard
);
6543 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6544 lower_omp (&tseq
, ctx
);
6546 SET_DECL_VALUE_EXPR (d
, oldv
);
6549 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6550 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6554 lower_omp (&tseq
, ctx
);
6555 gimple_seq_add_seq (ilist
, tseq
);
6557 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6560 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6561 lower_omp (&tseq
, ctx
);
6562 gimple_seq_add_seq (dlist
, tseq
);
6563 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6565 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6569 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6576 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6577 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6578 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6583 tree lab2
= NULL_TREE
;
6584 /* GOMP_taskgroup_reduction_register memsets the whole
6585 array to zero. If the initializer is zero, we don't
6586 need to initialize it again, just mark it as ever
6587 used unconditionally, i.e. cond = true. */
6588 if (initializer_zerop (x
))
6590 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6592 gimple_seq_add_stmt (ilist
, g
);
6597 if (!cond) { cond = true; new_var = x; } */
6598 if (!is_parallel_ctx (ctx
))
6600 tree condv
= create_tmp_var (boolean_type_node
);
6601 tree m
= build_simple_mem_ref (cond
);
6602 g
= gimple_build_assign (condv
, m
);
6603 gimple_seq_add_stmt (ilist
, g
);
6605 = create_artificial_label (UNKNOWN_LOCATION
);
6606 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6607 g
= gimple_build_cond (NE_EXPR
, condv
,
6610 gimple_seq_add_stmt (ilist
, g
);
6611 gimple_seq_add_stmt (ilist
,
6612 gimple_build_label (lab1
));
6614 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6616 gimple_seq_add_stmt (ilist
, g
);
6617 gimplify_assign (new_var
, x
, ilist
);
6619 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6623 /* reduction(-:var) sums up the partial results, so it
6624 acts identically to reduction(+:var). */
6625 if (code
== MINUS_EXPR
)
6629 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6630 tree new_vard
= new_var
;
6631 if (is_simd
&& omp_privatize_by_reference (var
))
6633 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6634 new_vard
= TREE_OPERAND (new_var
, 0);
6635 gcc_assert (DECL_P (new_vard
));
6637 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6639 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6640 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6643 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6647 if (new_vard
!= new_var
)
6649 SET_DECL_VALUE_EXPR (new_vard
,
6650 build_fold_addr_expr (lvar
));
6651 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6654 tree ref
= build_outer_var_ref (var
, ctx
);
6658 if (ctx
->for_simd_scan_phase
)
6660 gimplify_assign (ivar
, ref
, &llist
[0]);
6661 ref
= build_outer_var_ref (var
, ctx
);
6662 gimplify_assign (ref
, rvar
, &llist
[3]);
6666 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6671 simt_lane
= create_tmp_var (unsigned_type_node
);
6672 x
= build_call_expr_internal_loc
6673 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6674 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6675 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6676 gimplify_assign (ivar
, x
, &llist
[2]);
6682 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6683 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6684 boolean_type_node
, ivar
,
6686 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6687 boolean_type_node
, ref
,
6690 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6692 x
= fold_convert (TREE_TYPE (ref
), x
);
6693 ref
= build_outer_var_ref (var
, ctx
);
6694 gimplify_assign (ref
, x
, &llist
[1]);
6699 lower_private_allocate (var
, new_var
, allocator
,
6700 allocate_ptr
, ilist
, ctx
,
6702 if (omp_privatize_by_reference (var
) && is_simd
)
6703 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6704 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6705 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6707 gimplify_assign (new_var
, x
, ilist
);
6710 tree ref
= build_outer_var_ref (var
, ctx
);
6711 tree new_var2
= new_var
;
6715 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6717 = fold_build2_loc (clause_loc
, NE_EXPR
,
6718 boolean_type_node
, new_var
,
6720 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6721 boolean_type_node
, ref
,
6724 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6726 x
= fold_convert (TREE_TYPE (new_var
), x
);
6727 ref
= build_outer_var_ref (var
, ctx
);
6728 gimplify_assign (ref
, x
, dlist
);
6743 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6744 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6747 if (known_eq (sctx
.max_vf
, 1U))
6749 sctx
.is_simt
= false;
6750 if (ctx
->lastprivate_conditional_map
)
6752 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6754 /* Signal to lower_omp_1 that it should use parent context. */
6755 ctx
->combined_into_simd_safelen1
= true;
6756 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6757 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6758 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6760 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6761 omp_context
*outer
= ctx
->outer
;
6762 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6763 outer
= outer
->outer
;
6764 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6765 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6766 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6772 /* When not vectorized, treat lastprivate(conditional:) like
6773 normal lastprivate, as there will be just one simd lane
6774 writing the privatized variable. */
6775 delete ctx
->lastprivate_conditional_map
;
6776 ctx
->lastprivate_conditional_map
= NULL
;
6781 if (nonconst_simd_if
)
6783 if (sctx
.lane
== NULL_TREE
)
6785 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6786 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6788 /* FIXME: For now. */
6789 sctx
.is_simt
= false;
6792 if (sctx
.lane
|| sctx
.is_simt
)
6794 uid
= create_tmp_var (ptr_type_node
, "simduid");
6795 /* Don't want uninit warnings on simduid, it is always uninitialized,
6796 but we use it not for the value, but for the DECL_UID only. */
6797 suppress_warning (uid
, OPT_Wuninitialized
);
6798 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6799 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6800 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6801 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6803 /* Emit calls denoting privatized variables and initializing a pointer to
6804 structure that holds private variables as fields after ompdevlow pass. */
6807 sctx
.simt_eargs
[0] = uid
;
6809 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6810 gimple_call_set_lhs (g
, uid
);
6811 gimple_seq_add_stmt (ilist
, g
);
6812 sctx
.simt_eargs
.release ();
6814 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6815 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6816 gimple_call_set_lhs (g
, simtrec
);
6817 gimple_seq_add_stmt (ilist
, g
);
6821 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6822 2 + (nonconst_simd_if
!= NULL
),
6823 uid
, integer_zero_node
,
6825 gimple_call_set_lhs (g
, sctx
.lane
);
6826 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6827 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6828 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6829 build_int_cst (unsigned_type_node
, 0));
6830 gimple_seq_add_stmt (ilist
, g
);
6833 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6835 gimple_call_set_lhs (g
, sctx
.lastlane
);
6836 gimple_seq_add_stmt (dlist
, g
);
6837 gimple_seq_add_seq (dlist
, llist
[3]);
6839 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6842 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6843 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6844 gimple_call_set_lhs (g
, simt_vf
);
6845 gimple_seq_add_stmt (dlist
, g
);
6847 tree t
= build_int_cst (unsigned_type_node
, 1);
6848 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6849 gimple_seq_add_stmt (dlist
, g
);
6851 t
= build_int_cst (unsigned_type_node
, 0);
6852 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6853 gimple_seq_add_stmt (dlist
, g
);
6855 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6856 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6857 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6858 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6859 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6861 gimple_seq_add_seq (dlist
, llist
[2]);
6863 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6864 gimple_seq_add_stmt (dlist
, g
);
6866 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6867 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6868 gimple_seq_add_stmt (dlist
, g
);
6870 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6872 for (int i
= 0; i
< 2; i
++)
6875 tree vf
= create_tmp_var (unsigned_type_node
);
6876 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6877 gimple_call_set_lhs (g
, vf
);
6878 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6879 gimple_seq_add_stmt (seq
, g
);
6880 tree t
= build_int_cst (unsigned_type_node
, 0);
6881 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6882 gimple_seq_add_stmt (seq
, g
);
6883 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6884 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6885 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6886 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6887 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6888 gimple_seq_add_seq (seq
, llist
[i
]);
6889 t
= build_int_cst (unsigned_type_node
, 1);
6890 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6891 gimple_seq_add_stmt (seq
, g
);
6892 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6893 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6894 gimple_seq_add_stmt (seq
, g
);
6895 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6900 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6902 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6903 gimple_seq_add_stmt (dlist
, g
);
6906 /* The copyin sequence is not to be executed by the main thread, since
6907 that would result in self-copies. Perhaps not visible to scalars,
6908 but it certainly is to C++ operator=. */
6911 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6913 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6914 build_int_cst (TREE_TYPE (x
), 0));
6915 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6916 gimplify_and_add (x
, ilist
);
6919 /* If any copyin variable is passed by reference, we must ensure the
6920 master thread doesn't modify it before it is copied over in all
6921 threads. Similarly for variables in both firstprivate and
6922 lastprivate clauses we need to ensure the lastprivate copying
6923 happens after firstprivate copying in all threads. And similarly
6924 for UDRs if initializer expression refers to omp_orig. */
6925 if (copyin_by_ref
|| lastprivate_firstprivate
6926 || (reduction_omp_orig_ref
6927 && !ctx
->scan_inclusive
6928 && !ctx
->scan_exclusive
))
6930 /* Don't add any barrier for #pragma omp simd or
6931 #pragma omp distribute. */
6932 if (!is_task_ctx (ctx
)
6933 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6934 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6935 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6938 /* If max_vf is non-zero, then we can use only a vectorization factor
6939 up to the max_vf we chose. So stick it into the safelen clause. */
6940 if (maybe_ne (sctx
.max_vf
, 0U))
6942 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6943 OMP_CLAUSE_SAFELEN
);
6944 poly_uint64 safe_len
;
6946 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6947 && maybe_gt (safe_len
, sctx
.max_vf
)))
6949 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6950 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6952 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6953 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6958 /* Create temporary variables for lastprivate(conditional:) implementation
6959 in context CTX with CLAUSES. */
6962 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6964 tree iter_type
= NULL_TREE
;
6965 tree cond_ptr
= NULL_TREE
;
6966 tree iter_var
= NULL_TREE
;
6967 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6968 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6969 tree next
= *clauses
;
6970 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6971 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6972 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6976 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6978 if (iter_type
== NULL_TREE
)
6980 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6981 iter_var
= create_tmp_var_raw (iter_type
);
6982 DECL_CONTEXT (iter_var
) = current_function_decl
;
6983 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6984 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6985 ctx
->block_vars
= iter_var
;
6987 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6988 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6989 OMP_CLAUSE_DECL (c3
) = iter_var
;
6990 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6992 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6994 next
= OMP_CLAUSE_CHAIN (cc
);
6995 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6996 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6997 ctx
->lastprivate_conditional_map
->put (o
, v
);
7000 if (iter_type
== NULL
)
7002 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7004 struct omp_for_data fd
;
7005 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7007 iter_type
= unsigned_type_for (fd
.iter_type
);
7009 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7010 iter_type
= unsigned_type_node
;
7011 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7015 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7016 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7020 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7021 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7022 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7023 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7024 ctx
->block_vars
= cond_ptr
;
7025 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7026 OMP_CLAUSE__CONDTEMP_
);
7027 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7028 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7031 iter_var
= create_tmp_var_raw (iter_type
);
7032 DECL_CONTEXT (iter_var
) = current_function_decl
;
7033 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7034 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7035 ctx
->block_vars
= iter_var
;
7037 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7038 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7039 OMP_CLAUSE_DECL (c3
) = iter_var
;
7040 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7041 OMP_CLAUSE_CHAIN (c2
) = c3
;
7042 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7044 tree v
= create_tmp_var_raw (iter_type
);
7045 DECL_CONTEXT (v
) = current_function_decl
;
7046 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7047 DECL_CHAIN (v
) = ctx
->block_vars
;
7048 ctx
->block_vars
= v
;
7049 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7050 ctx
->lastprivate_conditional_map
->put (o
, v
);
7055 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7056 both parallel and workshare constructs. PREDICATE may be NULL if it's
7057 always true. BODY_P is the sequence to insert early initialization
7058 if needed, STMT_LIST is where the non-conditional lastprivate handling
7059 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7063 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7064 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7067 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7068 bool par_clauses
= false;
7069 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7070 unsigned HOST_WIDE_INT conditional_off
= 0;
7071 gimple_seq post_stmt_list
= NULL
;
7073 /* Early exit if there are no lastprivate or linear clauses. */
7074 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7075 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7076 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7077 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7079 if (clauses
== NULL
)
7081 /* If this was a workshare clause, see if it had been combined
7082 with its parallel. In that case, look for the clauses on the
7083 parallel statement itself. */
7084 if (is_parallel_ctx (ctx
))
7088 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7091 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7092 OMP_CLAUSE_LASTPRIVATE
);
7093 if (clauses
== NULL
)
7098 bool maybe_simt
= false;
7099 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7100 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7102 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7103 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7105 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7111 tree label_true
, arm1
, arm2
;
7112 enum tree_code pred_code
= TREE_CODE (predicate
);
7114 label
= create_artificial_label (UNKNOWN_LOCATION
);
7115 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7116 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7118 arm1
= TREE_OPERAND (predicate
, 0);
7119 arm2
= TREE_OPERAND (predicate
, 1);
7120 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7121 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7126 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7127 arm2
= boolean_false_node
;
7128 pred_code
= NE_EXPR
;
7132 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7133 c
= fold_convert (integer_type_node
, c
);
7134 simtcond
= create_tmp_var (integer_type_node
);
7135 gimplify_assign (simtcond
, c
, stmt_list
);
7136 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7138 c
= create_tmp_var (integer_type_node
);
7139 gimple_call_set_lhs (g
, c
);
7140 gimple_seq_add_stmt (stmt_list
, g
);
7141 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7145 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7146 gimple_seq_add_stmt (stmt_list
, stmt
);
7147 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7150 tree cond_ptr
= NULL_TREE
;
7151 for (c
= clauses
; c
;)
7154 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7155 gimple_seq
*this_stmt_list
= stmt_list
;
7156 tree lab2
= NULL_TREE
;
7158 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7159 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7160 && ctx
->lastprivate_conditional_map
7161 && !ctx
->combined_into_simd_safelen1
)
7163 gcc_assert (body_p
);
7166 if (cond_ptr
== NULL_TREE
)
7168 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7169 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7171 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7172 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7173 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7174 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7175 this_stmt_list
= cstmt_list
;
7177 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7179 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7180 build_int_cst (TREE_TYPE (cond_ptr
),
7182 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7185 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7186 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7187 tree mem2
= copy_node (mem
);
7188 gimple_seq seq
= NULL
;
7189 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7190 gimple_seq_add_seq (this_stmt_list
, seq
);
7191 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7192 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7193 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7194 gimple_seq_add_stmt (this_stmt_list
, g
);
7195 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7196 gimplify_assign (mem2
, v
, this_stmt_list
);
7199 && ctx
->combined_into_simd_safelen1
7200 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7201 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7202 && ctx
->lastprivate_conditional_map
)
7203 this_stmt_list
= &post_stmt_list
;
7205 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7206 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7207 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7209 var
= OMP_CLAUSE_DECL (c
);
7210 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7211 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7212 && is_taskloop_ctx (ctx
))
7214 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7215 new_var
= lookup_decl (var
, ctx
->outer
);
7219 new_var
= lookup_decl (var
, ctx
);
7220 /* Avoid uninitialized warnings for lastprivate and
7221 for linear iterators. */
7223 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7224 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7225 suppress_warning (new_var
, OPT_Wuninitialized
);
7228 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7230 tree val
= DECL_VALUE_EXPR (new_var
);
7231 if (TREE_CODE (val
) == ARRAY_REF
7232 && VAR_P (TREE_OPERAND (val
, 0))
7233 && lookup_attribute ("omp simd array",
7234 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7237 if (lastlane
== NULL
)
7239 lastlane
= create_tmp_var (unsigned_type_node
);
7241 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7243 TREE_OPERAND (val
, 1));
7244 gimple_call_set_lhs (g
, lastlane
);
7245 gimple_seq_add_stmt (this_stmt_list
, g
);
7247 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7248 TREE_OPERAND (val
, 0), lastlane
,
7249 NULL_TREE
, NULL_TREE
);
7250 TREE_THIS_NOTRAP (new_var
) = 1;
7253 else if (maybe_simt
)
7255 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7256 ? DECL_VALUE_EXPR (new_var
)
7258 if (simtlast
== NULL
)
7260 simtlast
= create_tmp_var (unsigned_type_node
);
7261 gcall
*g
= gimple_build_call_internal
7262 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7263 gimple_call_set_lhs (g
, simtlast
);
7264 gimple_seq_add_stmt (this_stmt_list
, g
);
7266 x
= build_call_expr_internal_loc
7267 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7268 TREE_TYPE (val
), 2, val
, simtlast
);
7269 new_var
= unshare_expr (new_var
);
7270 gimplify_assign (new_var
, x
, this_stmt_list
);
7271 new_var
= unshare_expr (new_var
);
7274 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7275 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7277 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7278 gimple_seq_add_seq (this_stmt_list
,
7279 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7280 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7282 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7283 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7285 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7286 gimple_seq_add_seq (this_stmt_list
,
7287 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7288 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7292 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7293 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7294 && is_taskloop_ctx (ctx
))
7296 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7298 if (is_global_var (ovar
))
7302 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7303 if (omp_privatize_by_reference (var
))
7304 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7305 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7306 gimplify_and_add (x
, this_stmt_list
);
7309 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7313 c
= OMP_CLAUSE_CHAIN (c
);
7314 if (c
== NULL
&& !par_clauses
)
7316 /* If this was a workshare clause, see if it had been combined
7317 with its parallel. In that case, continue looking for the
7318 clauses also on the parallel statement itself. */
7319 if (is_parallel_ctx (ctx
))
7323 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7326 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7327 OMP_CLAUSE_LASTPRIVATE
);
7333 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7334 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7337 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7338 (which might be a placeholder). INNER is true if this is an inner
7339 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7340 join markers. Generate the before-loop forking sequence in
7341 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7342 general form of these sequences is
7344 GOACC_REDUCTION_SETUP
7346 GOACC_REDUCTION_INIT
7348 GOACC_REDUCTION_FINI
7350 GOACC_REDUCTION_TEARDOWN. */
7353 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7354 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7355 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7358 gimple_seq before_fork
= NULL
;
7359 gimple_seq after_fork
= NULL
;
7360 gimple_seq before_join
= NULL
;
7361 gimple_seq after_join
= NULL
;
7362 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7363 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7364 unsigned offset
= 0;
7366 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7367 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7369 /* No 'reduction' clauses on OpenACC 'kernels'. */
7370 gcc_checking_assert (!is_oacc_kernels (ctx
));
7371 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7372 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7374 tree orig
= OMP_CLAUSE_DECL (c
);
7375 tree var
= maybe_lookup_decl (orig
, ctx
);
7376 tree ref_to_res
= NULL_TREE
;
7377 tree incoming
, outgoing
, v1
, v2
, v3
;
7378 bool is_private
= false;
7380 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7381 if (rcode
== MINUS_EXPR
)
7383 else if (rcode
== TRUTH_ANDIF_EXPR
)
7384 rcode
= BIT_AND_EXPR
;
7385 else if (rcode
== TRUTH_ORIF_EXPR
)
7386 rcode
= BIT_IOR_EXPR
;
7387 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7392 incoming
= outgoing
= var
;
7396 /* See if an outer construct also reduces this variable. */
7397 omp_context
*outer
= ctx
;
7399 while (omp_context
*probe
= outer
->outer
)
7401 enum gimple_code type
= gimple_code (probe
->stmt
);
7406 case GIMPLE_OMP_FOR
:
7407 cls
= gimple_omp_for_clauses (probe
->stmt
);
7410 case GIMPLE_OMP_TARGET
:
7411 /* No 'reduction' clauses inside OpenACC 'kernels'
7413 gcc_checking_assert (!is_oacc_kernels (probe
));
7415 if (!is_gimple_omp_offloaded (probe
->stmt
))
7418 cls
= gimple_omp_target_clauses (probe
->stmt
);
7426 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7427 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7428 && orig
== OMP_CLAUSE_DECL (cls
))
7430 incoming
= outgoing
= lookup_decl (orig
, probe
);
7431 goto has_outer_reduction
;
7433 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7434 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7435 && orig
== OMP_CLAUSE_DECL (cls
))
7443 /* This is the outermost construct with this reduction,
7444 see if there's a mapping for it. */
7445 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7446 && maybe_lookup_field (orig
, outer
) && !is_private
)
7448 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7449 if (omp_privatize_by_reference (orig
))
7450 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7452 tree type
= TREE_TYPE (var
);
7453 if (POINTER_TYPE_P (type
))
7454 type
= TREE_TYPE (type
);
7457 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7461 /* Try to look at enclosing contexts for reduction var,
7462 use original if no mapping found. */
7464 omp_context
*c
= ctx
->outer
;
7467 t
= maybe_lookup_decl (orig
, c
);
7470 incoming
= outgoing
= (t
? t
: orig
);
7473 has_outer_reduction
:;
7477 ref_to_res
= integer_zero_node
;
7479 if (omp_privatize_by_reference (orig
))
7481 tree type
= TREE_TYPE (var
);
7482 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7486 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7487 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7490 v1
= create_tmp_var (type
, id
);
7491 v2
= create_tmp_var (type
, id
);
7492 v3
= create_tmp_var (type
, id
);
7494 gimplify_assign (v1
, var
, fork_seq
);
7495 gimplify_assign (v2
, var
, fork_seq
);
7496 gimplify_assign (v3
, var
, fork_seq
);
7498 var
= build_simple_mem_ref (var
);
7499 v1
= build_simple_mem_ref (v1
);
7500 v2
= build_simple_mem_ref (v2
);
7501 v3
= build_simple_mem_ref (v3
);
7502 outgoing
= build_simple_mem_ref (outgoing
);
7504 if (!TREE_CONSTANT (incoming
))
7505 incoming
= build_simple_mem_ref (incoming
);
7510 /* Determine position in reduction buffer, which may be used
7511 by target. The parser has ensured that this is not a
7512 variable-sized type. */
7513 fixed_size_mode mode
7514 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7515 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7516 offset
= (offset
+ align
- 1) & ~(align
- 1);
7517 tree off
= build_int_cst (sizetype
, offset
);
7518 offset
+= GET_MODE_SIZE (mode
);
7522 init_code
= build_int_cst (integer_type_node
,
7523 IFN_GOACC_REDUCTION_INIT
);
7524 fini_code
= build_int_cst (integer_type_node
,
7525 IFN_GOACC_REDUCTION_FINI
);
7526 setup_code
= build_int_cst (integer_type_node
,
7527 IFN_GOACC_REDUCTION_SETUP
);
7528 teardown_code
= build_int_cst (integer_type_node
,
7529 IFN_GOACC_REDUCTION_TEARDOWN
);
7533 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7534 TREE_TYPE (var
), 6, setup_code
,
7535 unshare_expr (ref_to_res
),
7536 incoming
, level
, op
, off
);
7538 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7539 TREE_TYPE (var
), 6, init_code
,
7540 unshare_expr (ref_to_res
),
7541 v1
, level
, op
, off
);
7543 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7544 TREE_TYPE (var
), 6, fini_code
,
7545 unshare_expr (ref_to_res
),
7546 v2
, level
, op
, off
);
7548 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7549 TREE_TYPE (var
), 6, teardown_code
,
7550 ref_to_res
, v3
, level
, op
, off
);
7552 gimplify_assign (v1
, setup_call
, &before_fork
);
7553 gimplify_assign (v2
, init_call
, &after_fork
);
7554 gimplify_assign (v3
, fini_call
, &before_join
);
7555 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7558 /* Now stitch things together. */
7559 gimple_seq_add_seq (fork_seq
, before_fork
);
7561 gimple_seq_add_stmt (fork_seq
, private_marker
);
7563 gimple_seq_add_stmt (fork_seq
, fork
);
7564 gimple_seq_add_seq (fork_seq
, after_fork
);
7566 gimple_seq_add_seq (join_seq
, before_join
);
7568 gimple_seq_add_stmt (join_seq
, join
);
7569 gimple_seq_add_seq (join_seq
, after_join
);
7572 /* Generate code to implement the REDUCTION clauses, append it
7573 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7574 that should be emitted also inside of the critical section,
7575 in that case clear *CLIST afterwards, otherwise leave it as is
7576 and let the caller emit it itself. */
7579 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7580 gimple_seq
*clist
, omp_context
*ctx
)
7582 gimple_seq sub_seq
= NULL
;
7587 /* OpenACC loop reductions are handled elsewhere. */
7588 if (is_gimple_omp_oacc (ctx
->stmt
))
7591 /* SIMD reductions are handled in lower_rec_input_clauses. */
7592 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7593 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7596 /* inscan reductions are handled elsewhere. */
7597 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7600 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7601 update in that case, otherwise use a lock. */
7602 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7603 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7604 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7606 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7607 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7609 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7619 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7621 tree var
, ref
, new_var
, orig_var
;
7622 enum tree_code code
;
7623 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7625 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7626 || OMP_CLAUSE_REDUCTION_TASK (c
))
7629 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7630 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7631 if (TREE_CODE (var
) == MEM_REF
)
7633 var
= TREE_OPERAND (var
, 0);
7634 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7635 var
= TREE_OPERAND (var
, 0);
7636 if (TREE_CODE (var
) == ADDR_EXPR
)
7637 var
= TREE_OPERAND (var
, 0);
7640 /* If this is a pointer or referenced based array
7641 section, the var could be private in the outer
7642 context e.g. on orphaned loop construct. Pretend this
7643 is private variable's outer reference. */
7644 ccode
= OMP_CLAUSE_PRIVATE
;
7645 if (TREE_CODE (var
) == INDIRECT_REF
)
7646 var
= TREE_OPERAND (var
, 0);
7649 if (is_variable_sized (var
))
7651 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7652 var
= DECL_VALUE_EXPR (var
);
7653 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7654 var
= TREE_OPERAND (var
, 0);
7655 gcc_assert (DECL_P (var
));
7658 new_var
= lookup_decl (var
, ctx
);
7659 if (var
== OMP_CLAUSE_DECL (c
)
7660 && omp_privatize_by_reference (var
))
7661 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7662 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7663 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7665 /* reduction(-:var) sums up the partial results, so it acts
7666 identically to reduction(+:var). */
7667 if (code
== MINUS_EXPR
)
7670 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7673 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7675 addr
= save_expr (addr
);
7676 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7677 tree new_var2
= new_var
;
7681 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7682 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7683 boolean_type_node
, new_var
, zero
);
7684 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7687 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7690 x
= fold_convert (TREE_TYPE (new_var
), x
);
7691 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7692 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7693 gimplify_and_add (x
, stmt_seqp
);
7696 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7698 tree d
= OMP_CLAUSE_DECL (c
);
7699 tree type
= TREE_TYPE (d
);
7700 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7701 tree i
= create_tmp_var (TREE_TYPE (v
));
7702 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7703 tree bias
= TREE_OPERAND (d
, 1);
7704 d
= TREE_OPERAND (d
, 0);
7705 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7707 tree b
= TREE_OPERAND (d
, 1);
7708 b
= maybe_lookup_decl (b
, ctx
);
7711 b
= TREE_OPERAND (d
, 1);
7712 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7714 if (integer_zerop (bias
))
7718 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7719 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7720 TREE_TYPE (b
), b
, bias
);
7722 d
= TREE_OPERAND (d
, 0);
7724 /* For ref build_outer_var_ref already performs this, so
7725 only new_var needs a dereference. */
7726 if (TREE_CODE (d
) == INDIRECT_REF
)
7728 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7729 gcc_assert (omp_privatize_by_reference (var
)
7730 && var
== orig_var
);
7732 else if (TREE_CODE (d
) == ADDR_EXPR
)
7734 if (orig_var
== var
)
7736 new_var
= build_fold_addr_expr (new_var
);
7737 ref
= build_fold_addr_expr (ref
);
7742 gcc_assert (orig_var
== var
);
7743 if (omp_privatize_by_reference (var
))
7744 ref
= build_fold_addr_expr (ref
);
7748 tree t
= maybe_lookup_decl (v
, ctx
);
7752 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7753 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7755 if (!integer_zerop (bias
))
7757 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7758 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7759 TREE_TYPE (new_var
), new_var
,
7760 unshare_expr (bias
));
7761 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7762 TREE_TYPE (ref
), ref
, bias
);
7764 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7765 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7766 tree m
= create_tmp_var (ptype
);
7767 gimplify_assign (m
, new_var
, stmt_seqp
);
7769 m
= create_tmp_var (ptype
);
7770 gimplify_assign (m
, ref
, stmt_seqp
);
7772 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7773 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7774 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7775 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7776 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7777 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7778 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7780 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7781 tree decl_placeholder
7782 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7783 SET_DECL_VALUE_EXPR (placeholder
, out
);
7784 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7785 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7786 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7787 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7788 gimple_seq_add_seq (&sub_seq
,
7789 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7790 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7791 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7792 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7800 tree zero
= build_zero_cst (TREE_TYPE (out
));
7801 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7802 boolean_type_node
, out
, zero
);
7803 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7804 boolean_type_node
, priv
, zero
);
7806 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7808 x
= fold_convert (TREE_TYPE (out
), x
);
7809 out
= unshare_expr (out
);
7810 gimplify_assign (out
, x
, &sub_seq
);
7812 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7813 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7814 gimple_seq_add_stmt (&sub_seq
, g
);
7815 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7816 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7817 gimple_seq_add_stmt (&sub_seq
, g
);
7818 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7819 build_int_cst (TREE_TYPE (i
), 1));
7820 gimple_seq_add_stmt (&sub_seq
, g
);
7821 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7822 gimple_seq_add_stmt (&sub_seq
, g
);
7823 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7825 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7827 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7829 if (omp_privatize_by_reference (var
)
7830 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7832 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7833 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7834 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7835 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7836 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7837 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7838 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7842 tree new_var2
= new_var
;
7846 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7847 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7848 boolean_type_node
, new_var
, zero
);
7849 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7852 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7854 x
= fold_convert (TREE_TYPE (new_var
), x
);
7855 ref
= build_outer_var_ref (var
, ctx
);
7856 gimplify_assign (ref
, x
, &sub_seq
);
7860 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7862 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7864 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7868 gimple_seq_add_seq (stmt_seqp
, *clist
);
7872 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7874 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7878 /* Generate code to implement the COPYPRIVATE clauses. */
7881 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7886 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7888 tree var
, new_var
, ref
, x
;
7890 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7892 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7895 var
= OMP_CLAUSE_DECL (c
);
7896 by_ref
= use_pointer_for_field (var
, NULL
);
7898 ref
= build_sender_ref (var
, ctx
);
7899 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7902 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7903 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7905 gimplify_assign (ref
, x
, slist
);
7907 ref
= build_receiver_ref (var
, false, ctx
);
7910 ref
= fold_convert_loc (clause_loc
,
7911 build_pointer_type (TREE_TYPE (new_var
)),
7913 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7915 if (omp_privatize_by_reference (var
))
7917 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7918 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7919 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7921 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7922 gimplify_and_add (x
, rlist
);
7927 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7928 and REDUCTION from the sender (aka parent) side. */
7931 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7935 int ignored_looptemp
= 0;
7936 bool is_taskloop
= false;
7938 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7939 by GOMP_taskloop. */
7940 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7942 ignored_looptemp
= 2;
7946 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7948 tree val
, ref
, x
, var
;
7949 bool by_ref
, do_in
= false, do_out
= false;
7950 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7952 switch (OMP_CLAUSE_CODE (c
))
7954 case OMP_CLAUSE_PRIVATE
:
7955 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7958 case OMP_CLAUSE_FIRSTPRIVATE
:
7959 case OMP_CLAUSE_COPYIN
:
7960 case OMP_CLAUSE_LASTPRIVATE
:
7961 case OMP_CLAUSE_IN_REDUCTION
:
7962 case OMP_CLAUSE__REDUCTEMP_
:
7964 case OMP_CLAUSE_REDUCTION
:
7965 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7968 case OMP_CLAUSE_SHARED
:
7969 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7972 case OMP_CLAUSE__LOOPTEMP_
:
7973 if (ignored_looptemp
)
7983 val
= OMP_CLAUSE_DECL (c
);
7984 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7985 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7986 && TREE_CODE (val
) == MEM_REF
)
7988 val
= TREE_OPERAND (val
, 0);
7989 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7990 val
= TREE_OPERAND (val
, 0);
7991 if (TREE_CODE (val
) == INDIRECT_REF
7992 || TREE_CODE (val
) == ADDR_EXPR
)
7993 val
= TREE_OPERAND (val
, 0);
7994 if (is_variable_sized (val
))
7998 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7999 outer taskloop region. */
8000 omp_context
*ctx_for_o
= ctx
;
8002 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8003 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8004 ctx_for_o
= ctx
->outer
;
8006 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8008 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8009 && is_global_var (var
)
8010 && (val
== OMP_CLAUSE_DECL (c
)
8011 || !is_task_ctx (ctx
)
8012 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8013 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8014 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8015 != POINTER_TYPE
)))))
8018 t
= omp_member_access_dummy_var (var
);
8021 var
= DECL_VALUE_EXPR (var
);
8022 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8024 var
= unshare_and_remap (var
, t
, o
);
8026 var
= unshare_expr (var
);
8029 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8031 /* Handle taskloop firstprivate/lastprivate, where the
8032 lastprivate on GIMPLE_OMP_TASK is represented as
8033 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8034 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8035 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8036 if (use_pointer_for_field (val
, ctx
))
8037 var
= build_fold_addr_expr (var
);
8038 gimplify_assign (x
, var
, ilist
);
8039 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8043 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8044 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8045 || val
== OMP_CLAUSE_DECL (c
))
8046 && is_variable_sized (val
))
8048 by_ref
= use_pointer_for_field (val
, NULL
);
8050 switch (OMP_CLAUSE_CODE (c
))
8052 case OMP_CLAUSE_FIRSTPRIVATE
:
8053 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8055 && is_task_ctx (ctx
))
8056 suppress_warning (var
);
8060 case OMP_CLAUSE_PRIVATE
:
8061 case OMP_CLAUSE_COPYIN
:
8062 case OMP_CLAUSE__LOOPTEMP_
:
8063 case OMP_CLAUSE__REDUCTEMP_
:
8067 case OMP_CLAUSE_LASTPRIVATE
:
8068 if (by_ref
|| omp_privatize_by_reference (val
))
8070 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8077 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8082 case OMP_CLAUSE_REDUCTION
:
8083 case OMP_CLAUSE_IN_REDUCTION
:
8085 if (val
== OMP_CLAUSE_DECL (c
))
8087 if (is_task_ctx (ctx
))
8088 by_ref
= use_pointer_for_field (val
, ctx
);
8090 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8093 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8102 ref
= build_sender_ref (val
, ctx
);
8103 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8104 gimplify_assign (ref
, x
, ilist
);
8105 if (is_task_ctx (ctx
))
8106 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8111 ref
= build_sender_ref (val
, ctx
);
8112 gimplify_assign (var
, ref
, olist
);
8117 /* Generate code to implement SHARED from the sender (aka parent)
8118 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8119 list things that got automatically shared. */
8122 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8124 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8126 if (ctx
->record_type
== NULL
)
8129 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8130 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8132 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8133 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8136 nvar
= maybe_lookup_decl (ovar
, ctx
);
8138 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8139 || (ctx
->allocate_map
8140 && ctx
->allocate_map
->get (ovar
)))
8143 /* If CTX is a nested parallel directive. Find the immediately
8144 enclosing parallel or workshare construct that contains a
8145 mapping for OVAR. */
8146 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8148 t
= omp_member_access_dummy_var (var
);
8151 var
= DECL_VALUE_EXPR (var
);
8152 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8154 var
= unshare_and_remap (var
, t
, o
);
8156 var
= unshare_expr (var
);
8159 if (use_pointer_for_field (ovar
, ctx
))
8161 x
= build_sender_ref (ovar
, ctx
);
8162 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8163 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8165 gcc_assert (is_parallel_ctx (ctx
)
8166 && DECL_ARTIFICIAL (ovar
));
8167 /* _condtemp_ clause. */
8168 var
= build_constructor (TREE_TYPE (x
), NULL
);
8171 var
= build_fold_addr_expr (var
);
8172 gimplify_assign (x
, var
, ilist
);
8176 x
= build_sender_ref (ovar
, ctx
);
8177 gimplify_assign (x
, var
, ilist
);
8179 if (!TREE_READONLY (var
)
8180 /* We don't need to receive a new reference to a result
8181 or parm decl. In fact we may not store to it as we will
8182 invalidate any pending RSO and generate wrong gimple
8184 && !((TREE_CODE (var
) == RESULT_DECL
8185 || TREE_CODE (var
) == PARM_DECL
)
8186 && DECL_BY_REFERENCE (var
)))
8188 x
= build_sender_ref (ovar
, ctx
);
8189 gimplify_assign (var
, x
, olist
);
8195 /* Emit an OpenACC head marker call, encapulating the partitioning and
8196 other information that must be processed by the target compiler.
8197 Return the maximum number of dimensions the associated loop might
8198 be partitioned over. */
8201 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8202 gimple_seq
*seq
, omp_context
*ctx
)
8204 unsigned levels
= 0;
8206 tree gang_static
= NULL_TREE
;
8207 auto_vec
<tree
, 5> args
;
8209 args
.quick_push (build_int_cst
8210 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8211 args
.quick_push (ddvar
);
8212 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8214 switch (OMP_CLAUSE_CODE (c
))
8216 case OMP_CLAUSE_GANG
:
8217 tag
|= OLF_DIM_GANG
;
8218 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8219 /* static:* is represented by -1, and we can ignore it, as
8220 scheduling is always static. */
8221 if (gang_static
&& integer_minus_onep (gang_static
))
8222 gang_static
= NULL_TREE
;
8226 case OMP_CLAUSE_WORKER
:
8227 tag
|= OLF_DIM_WORKER
;
8231 case OMP_CLAUSE_VECTOR
:
8232 tag
|= OLF_DIM_VECTOR
;
8236 case OMP_CLAUSE_SEQ
:
8240 case OMP_CLAUSE_AUTO
:
8244 case OMP_CLAUSE_INDEPENDENT
:
8245 tag
|= OLF_INDEPENDENT
;
8248 case OMP_CLAUSE_TILE
:
8259 if (DECL_P (gang_static
))
8260 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8261 tag
|= OLF_GANG_STATIC
;
8264 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8265 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8267 else if (is_oacc_kernels (tgt
))
8268 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8270 else if (is_oacc_kernels_decomposed_part (tgt
))
8275 /* In a parallel region, loops are implicitly INDEPENDENT. */
8276 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8277 tag
|= OLF_INDEPENDENT
;
8279 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8280 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8281 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8283 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8284 gcc_assert (!(tag
& OLF_AUTO
));
8288 /* Tiling could use all 3 levels. */
8292 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8293 Ensure at least one level, or 2 for possible auto
8295 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8296 << OLF_DIM_BASE
) | OLF_SEQ
));
8298 if (levels
< 1u + maybe_auto
)
8299 levels
= 1u + maybe_auto
;
8302 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8303 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8305 args
.quick_push (gang_static
);
8307 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8308 gimple_set_location (call
, loc
);
8309 gimple_set_lhs (call
, ddvar
);
8310 gimple_seq_add_stmt (seq
, call
);
8315 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8316 partitioning level of the enclosed region. */
8319 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8320 tree tofollow
, gimple_seq
*seq
)
8322 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8323 : IFN_UNIQUE_OACC_TAIL_MARK
);
8324 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8325 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8326 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8327 marker
, ddvar
, tofollow
);
8328 gimple_set_location (call
, loc
);
8329 gimple_set_lhs (call
, ddvar
);
8330 gimple_seq_add_stmt (seq
, call
);
8333 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8334 the loop clauses, from which we extract reductions. Initialize
8338 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8339 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8342 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8343 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8345 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8349 gimple_set_location (private_marker
, loc
);
8350 gimple_call_set_lhs (private_marker
, ddvar
);
8351 gimple_call_set_arg (private_marker
, 1, ddvar
);
8354 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8355 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8358 for (unsigned done
= 1; count
; count
--, done
++)
8360 gimple_seq fork_seq
= NULL
;
8361 gimple_seq join_seq
= NULL
;
8363 tree place
= build_int_cst (integer_type_node
, -1);
8364 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8365 fork_kind
, ddvar
, place
);
8366 gimple_set_location (fork
, loc
);
8367 gimple_set_lhs (fork
, ddvar
);
8369 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8370 join_kind
, ddvar
, place
);
8371 gimple_set_location (join
, loc
);
8372 gimple_set_lhs (join
, ddvar
);
8374 /* Mark the beginning of this level sequence. */
8376 lower_oacc_loop_marker (loc
, ddvar
, true,
8377 build_int_cst (integer_type_node
, count
),
8379 lower_oacc_loop_marker (loc
, ddvar
, false,
8380 build_int_cst (integer_type_node
, done
),
8383 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8384 fork
, (count
== 1) ? private_marker
: NULL
,
8385 join
, &fork_seq
, &join_seq
, ctx
);
8387 /* Append this level to head. */
8388 gimple_seq_add_seq (head
, fork_seq
);
8389 /* Prepend it to tail. */
8390 gimple_seq_add_seq (&join_seq
, *tail
);
8396 /* Mark the end of the sequence. */
8397 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8398 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8401 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8402 catch handler and return it. This prevents programs from violating the
8403 structured block semantics with throws. */
8406 maybe_catch_exception (gimple_seq body
)
8411 if (!flag_exceptions
)
8414 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8415 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8417 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8419 g
= gimple_build_eh_must_not_throw (decl
);
8420 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8423 return gimple_seq_alloc_with_stmt (g
);
8427 /* Routines to lower OMP directives into OMP-GIMPLE. */
8429 /* If ctx is a worksharing context inside of a cancellable parallel
8430 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8431 and conditional branch to parallel's cancel_label to handle
8432 cancellation in the implicit barrier. */
8435 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8438 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8439 if (gimple_omp_return_nowait_p (omp_return
))
8441 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8442 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8443 && outer
->cancellable
)
8445 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8446 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8447 tree lhs
= create_tmp_var (c_bool_type
);
8448 gimple_omp_return_set_lhs (omp_return
, lhs
);
8449 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8450 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8451 fold_convert (c_bool_type
,
8452 boolean_false_node
),
8453 outer
->cancel_label
, fallthru_label
);
8454 gimple_seq_add_stmt (body
, g
);
8455 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8457 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8458 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8462 /* Find the first task_reduction or reduction clause or return NULL
8463 if there are none. */
8466 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8467 enum omp_clause_code ccode
)
8471 clauses
= omp_find_clause (clauses
, ccode
);
8472 if (clauses
== NULL_TREE
)
8474 if (ccode
!= OMP_CLAUSE_REDUCTION
8475 || code
== OMP_TASKLOOP
8476 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8478 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8482 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8483 gimple_seq
*, gimple_seq
*);
8485 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8486 CTX is the enclosing OMP context for the current statement. */
8489 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8491 tree block
, control
;
8492 gimple_stmt_iterator tgsi
;
8493 gomp_sections
*stmt
;
8495 gbind
*new_stmt
, *bind
;
8496 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8498 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8500 push_gimplify_context ();
8506 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8507 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8508 tree rtmp
= NULL_TREE
;
8511 tree type
= build_pointer_type (pointer_sized_int_node
);
8512 tree temp
= create_tmp_var (type
);
8513 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8514 OMP_CLAUSE_DECL (c
) = temp
;
8515 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8516 gimple_omp_sections_set_clauses (stmt
, c
);
8517 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8518 gimple_omp_sections_clauses (stmt
),
8519 &ilist
, &tred_dlist
);
8521 rtmp
= make_ssa_name (type
);
8522 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8525 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8526 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8528 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8529 &ilist
, &dlist
, ctx
, NULL
);
8531 control
= create_tmp_var (unsigned_type_node
, ".section");
8532 gimple_omp_sections_set_control (stmt
, control
);
8534 new_body
= gimple_omp_body (stmt
);
8535 gimple_omp_set_body (stmt
, NULL
);
8536 tgsi
= gsi_start (new_body
);
8537 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8542 sec_start
= gsi_stmt (tgsi
);
8543 sctx
= maybe_lookup_ctx (sec_start
);
8546 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8547 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8548 GSI_CONTINUE_LINKING
);
8549 gimple_omp_set_body (sec_start
, NULL
);
8551 if (gsi_one_before_end_p (tgsi
))
8553 gimple_seq l
= NULL
;
8554 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8555 &ilist
, &l
, &clist
, ctx
);
8556 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8557 gimple_omp_section_set_last (sec_start
);
8560 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8561 GSI_CONTINUE_LINKING
);
8564 block
= make_node (BLOCK
);
8565 bind
= gimple_build_bind (NULL
, new_body
, block
);
8568 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8572 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8573 gcall
*g
= gimple_build_call (fndecl
, 0);
8574 gimple_seq_add_stmt (&olist
, g
);
8575 gimple_seq_add_seq (&olist
, clist
);
8576 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8577 g
= gimple_build_call (fndecl
, 0);
8578 gimple_seq_add_stmt (&olist
, g
);
8581 block
= make_node (BLOCK
);
8582 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8583 gsi_replace (gsi_p
, new_stmt
, true);
8585 pop_gimplify_context (new_stmt
);
8586 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8587 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8588 if (BLOCK_VARS (block
))
8589 TREE_USED (block
) = 1;
8592 gimple_seq_add_seq (&new_body
, ilist
);
8593 gimple_seq_add_stmt (&new_body
, stmt
);
8594 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8595 gimple_seq_add_stmt (&new_body
, bind
);
8597 t
= gimple_build_omp_continue (control
, control
);
8598 gimple_seq_add_stmt (&new_body
, t
);
8600 gimple_seq_add_seq (&new_body
, olist
);
8601 if (ctx
->cancellable
)
8602 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8603 gimple_seq_add_seq (&new_body
, dlist
);
8605 new_body
= maybe_catch_exception (new_body
);
8607 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8608 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8609 t
= gimple_build_omp_return (nowait
);
8610 gimple_seq_add_stmt (&new_body
, t
);
8611 gimple_seq_add_seq (&new_body
, tred_dlist
);
8612 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8615 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8617 gimple_bind_set_body (new_stmt
, new_body
);
8621 /* A subroutine of lower_omp_single. Expand the simple form of
8622 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8624 if (GOMP_single_start ())
8626 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8628 FIXME. It may be better to delay expanding the logic of this until
8629 pass_expand_omp. The expanded logic may make the job more difficult
8630 to a synchronization analysis pass. */
8633 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8635 location_t loc
= gimple_location (single_stmt
);
8636 tree tlabel
= create_artificial_label (loc
);
8637 tree flabel
= create_artificial_label (loc
);
8638 gimple
*call
, *cond
;
8641 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8642 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8643 call
= gimple_build_call (decl
, 0);
8644 gimple_call_set_lhs (call
, lhs
);
8645 gimple_seq_add_stmt (pre_p
, call
);
8647 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8648 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8651 gimple_seq_add_stmt (pre_p
, cond
);
8652 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8653 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8654 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8658 /* A subroutine of lower_omp_single. Expand the simple form of
8659 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8661 #pragma omp single copyprivate (a, b, c)
8663 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8666 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8672 GOMP_single_copy_end (©out);
8683 FIXME. It may be better to delay expanding the logic of this until
8684 pass_expand_omp. The expanded logic may make the job more difficult
8685 to a synchronization analysis pass. */
8688 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8691 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8692 gimple_seq copyin_seq
;
8693 location_t loc
= gimple_location (single_stmt
);
8695 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8697 ptr_type
= build_pointer_type (ctx
->record_type
);
8698 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8700 l0
= create_artificial_label (loc
);
8701 l1
= create_artificial_label (loc
);
8702 l2
= create_artificial_label (loc
);
8704 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8705 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8706 t
= fold_convert_loc (loc
, ptr_type
, t
);
8707 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8709 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8710 build_int_cst (ptr_type
, 0));
8711 t
= build3 (COND_EXPR
, void_type_node
, t
,
8712 build_and_jump (&l0
), build_and_jump (&l1
));
8713 gimplify_and_add (t
, pre_p
);
8715 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8717 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8720 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8723 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8724 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8725 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8726 gimplify_and_add (t
, pre_p
);
8728 t
= build_and_jump (&l2
);
8729 gimplify_and_add (t
, pre_p
);
8731 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8733 gimple_seq_add_seq (pre_p
, copyin_seq
);
8735 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8739 /* Expand code for an OpenMP single directive. */
8742 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8745 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8747 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8749 push_gimplify_context ();
8751 block
= make_node (BLOCK
);
8752 bind
= gimple_build_bind (NULL
, NULL
, block
);
8753 gsi_replace (gsi_p
, bind
, true);
8756 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8757 &bind_body
, &dlist
, ctx
, NULL
);
8758 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8760 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8762 if (ctx
->record_type
)
8763 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8765 lower_omp_single_simple (single_stmt
, &bind_body
);
8767 gimple_omp_set_body (single_stmt
, NULL
);
8769 gimple_seq_add_seq (&bind_body
, dlist
);
8771 bind_body
= maybe_catch_exception (bind_body
);
8773 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8774 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8775 gimple
*g
= gimple_build_omp_return (nowait
);
8776 gimple_seq_add_stmt (&bind_body_tail
, g
);
8777 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8778 if (ctx
->record_type
)
8780 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8781 tree clobber
= build_clobber (ctx
->record_type
);
8782 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8783 clobber
), GSI_SAME_STMT
);
8785 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8786 gimple_bind_set_body (bind
, bind_body
);
8788 pop_gimplify_context (bind
);
8790 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8791 BLOCK_VARS (block
) = ctx
->block_vars
;
8792 if (BLOCK_VARS (block
))
8793 TREE_USED (block
) = 1;
8797 /* Lower code for an OMP scope directive. */
8800 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8803 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8805 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8806 gimple_seq tred_dlist
= NULL
;
8808 push_gimplify_context ();
8810 block
= make_node (BLOCK
);
8811 bind
= gimple_build_bind (NULL
, NULL
, block
);
8812 gsi_replace (gsi_p
, bind
, true);
8817 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8818 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8821 tree type
= build_pointer_type (pointer_sized_int_node
);
8822 tree temp
= create_tmp_var (type
);
8823 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8824 OMP_CLAUSE_DECL (c
) = temp
;
8825 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8826 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8827 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8828 gimple_omp_scope_clauses (scope_stmt
),
8829 &bind_body
, &tred_dlist
);
8831 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8832 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8833 gimple_seq_add_stmt (&bind_body
, stmt
);
8836 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8837 &bind_body
, &dlist
, ctx
, NULL
);
8838 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8840 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8842 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8844 gimple_omp_set_body (scope_stmt
, NULL
);
8846 gimple_seq clist
= NULL
;
8847 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8848 &bind_body
, &clist
, ctx
);
8851 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8852 gcall
*g
= gimple_build_call (fndecl
, 0);
8853 gimple_seq_add_stmt (&bind_body
, g
);
8854 gimple_seq_add_seq (&bind_body
, clist
);
8855 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8856 g
= gimple_build_call (fndecl
, 0);
8857 gimple_seq_add_stmt (&bind_body
, g
);
8860 gimple_seq_add_seq (&bind_body
, dlist
);
8862 bind_body
= maybe_catch_exception (bind_body
);
8864 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8865 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8866 gimple
*g
= gimple_build_omp_return (nowait
);
8867 gimple_seq_add_stmt (&bind_body_tail
, g
);
8868 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8869 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8870 if (ctx
->record_type
)
8872 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8873 tree clobber
= build_clobber (ctx
->record_type
);
8874 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8875 clobber
), GSI_SAME_STMT
);
8877 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8879 gimple_bind_set_body (bind
, bind_body
);
8881 pop_gimplify_context (bind
);
8883 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8884 BLOCK_VARS (block
) = ctx
->block_vars
;
8885 if (BLOCK_VARS (block
))
8886 TREE_USED (block
) = 1;
8888 /* Expand code for an OpenMP master or masked directive. */
8891 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8893 tree block
, lab
= NULL
, x
, bfn_decl
;
8894 gimple
*stmt
= gsi_stmt (*gsi_p
);
8896 location_t loc
= gimple_location (stmt
);
8898 tree filter
= integer_zero_node
;
8900 push_gimplify_context ();
8902 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
8904 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
8907 filter
= fold_convert (integer_type_node
,
8908 OMP_CLAUSE_FILTER_EXPR (filter
));
8910 filter
= integer_zero_node
;
8912 block
= make_node (BLOCK
);
8913 bind
= gimple_build_bind (NULL
, NULL
, block
);
8914 gsi_replace (gsi_p
, bind
, true);
8915 gimple_bind_add_stmt (bind
, stmt
);
8917 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8918 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8919 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
8920 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8922 gimplify_and_add (x
, &tseq
);
8923 gimple_bind_add_seq (bind
, tseq
);
8925 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8926 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8927 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8928 gimple_omp_set_body (stmt
, NULL
);
8930 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8932 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8934 pop_gimplify_context (bind
);
8936 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8937 BLOCK_VARS (block
) = ctx
->block_vars
;
8940 /* Helper function for lower_omp_task_reductions. For a specific PASS
8941 find out the current clause it should be processed, or return false
8942 if all have been processed already. */
8945 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8946 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8947 tree
*type
, tree
*next
)
8949 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8951 if (ccode
== OMP_CLAUSE_REDUCTION
8952 && code
!= OMP_TASKLOOP
8953 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8955 *decl
= OMP_CLAUSE_DECL (*c
);
8956 *type
= TREE_TYPE (*decl
);
8957 if (TREE_CODE (*decl
) == MEM_REF
)
8964 if (omp_privatize_by_reference (*decl
))
8965 *type
= TREE_TYPE (*type
);
8966 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8969 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8978 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8979 OMP_TASKGROUP only with task modifier). Register mapping of those in
8980 START sequence and reducing them and unregister them in the END sequence. */
8983 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8984 gimple_seq
*start
, gimple_seq
*end
)
8986 enum omp_clause_code ccode
8987 = (code
== OMP_TASKGROUP
8988 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8989 tree cancellable
= NULL_TREE
;
8990 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8991 if (clauses
== NULL_TREE
)
8993 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
8995 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8996 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8997 && outer
->cancellable
)
8999 cancellable
= error_mark_node
;
9002 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9003 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9006 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9007 tree
*last
= &TYPE_FIELDS (record_type
);
9011 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9013 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9016 DECL_CHAIN (field
) = ifield
;
9017 last
= &DECL_CHAIN (ifield
);
9018 DECL_CONTEXT (field
) = record_type
;
9019 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9020 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9021 DECL_CONTEXT (ifield
) = record_type
;
9022 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9023 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9025 for (int pass
= 0; pass
< 2; pass
++)
9027 tree decl
, type
, next
;
9028 for (tree c
= clauses
;
9029 omp_task_reduction_iterate (pass
, code
, ccode
,
9030 &c
, &decl
, &type
, &next
); c
= next
)
9033 tree new_type
= type
;
9035 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9037 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9038 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9040 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9042 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9043 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9044 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9047 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9048 DECL_CONTEXT (field
) = record_type
;
9049 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9050 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9052 last
= &DECL_CHAIN (field
);
9054 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9056 DECL_CONTEXT (bfield
) = record_type
;
9057 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9058 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9060 last
= &DECL_CHAIN (bfield
);
9064 layout_type (record_type
);
9066 /* Build up an array which registers with the runtime all the reductions
9067 and deregisters them at the end. Format documented in libgomp/task.c. */
9068 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9069 tree avar
= create_tmp_var_raw (atype
);
9070 gimple_add_tmp_var (avar
);
9071 TREE_ADDRESSABLE (avar
) = 1;
9072 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9073 NULL_TREE
, NULL_TREE
);
9074 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9075 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9076 gimple_seq seq
= NULL
;
9077 tree sz
= fold_convert (pointer_sized_int_node
,
9078 TYPE_SIZE_UNIT (record_type
));
9080 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9081 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9082 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9083 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9084 ctx
->task_reductions
.create (1 + cnt
);
9085 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9086 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9088 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9089 gimple_seq_add_seq (start
, seq
);
9090 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9091 NULL_TREE
, NULL_TREE
);
9092 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9093 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9094 NULL_TREE
, NULL_TREE
);
9095 t
= build_int_cst (pointer_sized_int_node
,
9096 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9097 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9098 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9099 NULL_TREE
, NULL_TREE
);
9100 t
= build_int_cst (pointer_sized_int_node
, -1);
9101 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9102 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9103 NULL_TREE
, NULL_TREE
);
9104 t
= build_int_cst (pointer_sized_int_node
, 0);
9105 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9107 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9108 and for each task reduction checks a bool right after the private variable
9109 within that thread's chunk; if the bool is clear, it hasn't been
9110 initialized and thus isn't going to be reduced nor destructed, otherwise
9111 reduce and destruct it. */
9112 tree idx
= create_tmp_var (size_type_node
);
9113 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9114 tree num_thr_sz
= create_tmp_var (size_type_node
);
9115 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9116 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9117 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9119 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9121 /* For worksharing constructs or scope, only perform it in the master
9122 thread, with the exception of cancelled implicit barriers - then only
9123 handle the current thread. */
9124 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9125 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9126 tree thr_num
= create_tmp_var (integer_type_node
);
9127 g
= gimple_build_call (t
, 0);
9128 gimple_call_set_lhs (g
, thr_num
);
9129 gimple_seq_add_stmt (end
, g
);
9133 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9134 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9135 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9136 if (code
== OMP_FOR
)
9137 c
= gimple_omp_for_clauses (ctx
->stmt
);
9138 else if (code
== OMP_SECTIONS
)
9139 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9140 else /* if (code == OMP_SCOPE) */
9141 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9142 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9144 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9146 gimple_seq_add_stmt (end
, g
);
9147 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9148 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9149 gimple_seq_add_stmt (end
, g
);
9150 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9151 build_one_cst (TREE_TYPE (idx
)));
9152 gimple_seq_add_stmt (end
, g
);
9153 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9154 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9156 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9157 gimple_seq_add_stmt (end
, g
);
9158 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9160 if (code
!= OMP_PARALLEL
)
9162 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9163 tree num_thr
= create_tmp_var (integer_type_node
);
9164 g
= gimple_build_call (t
, 0);
9165 gimple_call_set_lhs (g
, num_thr
);
9166 gimple_seq_add_stmt (end
, g
);
9167 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9168 gimple_seq_add_stmt (end
, g
);
9170 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9174 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9175 OMP_CLAUSE__REDUCTEMP_
);
9176 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9177 t
= fold_convert (size_type_node
, t
);
9178 gimplify_assign (num_thr_sz
, t
, end
);
9180 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9181 NULL_TREE
, NULL_TREE
);
9182 tree data
= create_tmp_var (pointer_sized_int_node
);
9183 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9184 if (code
== OMP_TASKLOOP
)
9186 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9187 g
= gimple_build_cond (NE_EXPR
, data
,
9188 build_zero_cst (pointer_sized_int_node
),
9190 gimple_seq_add_stmt (end
, g
);
9192 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9194 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9195 ptr
= create_tmp_var (build_pointer_type (record_type
));
9197 ptr
= create_tmp_var (ptr_type_node
);
9198 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9200 tree field
= TYPE_FIELDS (record_type
);
9203 field
= DECL_CHAIN (DECL_CHAIN (field
));
9204 for (int pass
= 0; pass
< 2; pass
++)
9206 tree decl
, type
, next
;
9207 for (tree c
= clauses
;
9208 omp_task_reduction_iterate (pass
, code
, ccode
,
9209 &c
, &decl
, &type
, &next
); c
= next
)
9211 tree var
= decl
, ref
;
9212 if (TREE_CODE (decl
) == MEM_REF
)
9214 var
= TREE_OPERAND (var
, 0);
9215 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9216 var
= TREE_OPERAND (var
, 0);
9218 if (TREE_CODE (var
) == ADDR_EXPR
)
9219 var
= TREE_OPERAND (var
, 0);
9220 else if (TREE_CODE (var
) == INDIRECT_REF
)
9221 var
= TREE_OPERAND (var
, 0);
9222 tree orig_var
= var
;
9223 if (is_variable_sized (var
))
9225 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9226 var
= DECL_VALUE_EXPR (var
);
9227 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9228 var
= TREE_OPERAND (var
, 0);
9229 gcc_assert (DECL_P (var
));
9231 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9232 if (orig_var
!= var
)
9233 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9234 else if (TREE_CODE (v
) == ADDR_EXPR
)
9235 t
= build_fold_addr_expr (t
);
9236 else if (TREE_CODE (v
) == INDIRECT_REF
)
9237 t
= build_fold_indirect_ref (t
);
9238 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9240 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9241 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9242 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9244 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9245 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9246 fold_convert (size_type_node
,
9247 TREE_OPERAND (decl
, 1)));
9251 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9252 if (!omp_privatize_by_reference (decl
))
9253 t
= build_fold_addr_expr (t
);
9255 t
= fold_convert (pointer_sized_int_node
, t
);
9257 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9258 gimple_seq_add_seq (start
, seq
);
9259 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9260 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9261 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9262 t
= unshare_expr (byte_position (field
));
9263 t
= fold_convert (pointer_sized_int_node
, t
);
9264 ctx
->task_reduction_map
->put (c
, cnt
);
9265 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9268 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9269 gimple_seq_add_seq (start
, seq
);
9270 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9271 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9272 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9274 tree bfield
= DECL_CHAIN (field
);
9276 if (code
== OMP_PARALLEL
9278 || code
== OMP_SECTIONS
9279 || code
== OMP_SCOPE
)
9280 /* In parallel, worksharing or scope all threads unconditionally
9281 initialize all their task reduction private variables. */
9282 cond
= boolean_true_node
;
9283 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9285 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9286 unshare_expr (byte_position (bfield
)));
9288 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9289 gimple_seq_add_seq (end
, seq
);
9290 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9291 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9292 build_int_cst (pbool
, 0));
9295 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9296 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9297 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9298 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9299 tree condv
= create_tmp_var (boolean_type_node
);
9300 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9301 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9303 gimple_seq_add_stmt (end
, g
);
9304 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9305 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9307 /* If this reduction doesn't need destruction and parallel
9308 has been cancelled, there is nothing to do for this
9309 reduction, so jump around the merge operation. */
9310 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9311 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9312 build_zero_cst (TREE_TYPE (cancellable
)),
9314 gimple_seq_add_stmt (end
, g
);
9315 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9319 if (TREE_TYPE (ptr
) == ptr_type_node
)
9321 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9322 unshare_expr (byte_position (field
)));
9324 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9325 gimple_seq_add_seq (end
, seq
);
9326 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9327 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9328 build_int_cst (pbool
, 0));
9331 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9332 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9334 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9335 if (TREE_CODE (decl
) != MEM_REF
9336 && omp_privatize_by_reference (decl
))
9337 ref
= build_simple_mem_ref (ref
);
9338 /* reduction(-:var) sums up the partial results, so it acts
9339 identically to reduction(+:var). */
9340 if (rcode
== MINUS_EXPR
)
9342 if (TREE_CODE (decl
) == MEM_REF
)
9344 tree type
= TREE_TYPE (new_var
);
9345 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9346 tree i
= create_tmp_var (TREE_TYPE (v
));
9347 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9350 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9351 tree vv
= create_tmp_var (TREE_TYPE (v
));
9352 gimplify_assign (vv
, v
, start
);
9355 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9356 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9357 new_var
= build_fold_addr_expr (new_var
);
9358 new_var
= fold_convert (ptype
, new_var
);
9359 ref
= fold_convert (ptype
, ref
);
9360 tree m
= create_tmp_var (ptype
);
9361 gimplify_assign (m
, new_var
, end
);
9363 m
= create_tmp_var (ptype
);
9364 gimplify_assign (m
, ref
, end
);
9366 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9367 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9368 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9369 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9370 tree priv
= build_simple_mem_ref (new_var
);
9371 tree out
= build_simple_mem_ref (ref
);
9372 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9374 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9375 tree decl_placeholder
9376 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9377 tree lab6
= NULL_TREE
;
9380 /* If this reduction needs destruction and parallel
9381 has been cancelled, jump around the merge operation
9382 to the destruction. */
9383 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9384 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9385 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9386 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9388 gimple_seq_add_stmt (end
, g
);
9389 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9391 SET_DECL_VALUE_EXPR (placeholder
, out
);
9392 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9393 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9394 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9395 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9396 gimple_seq_add_seq (end
,
9397 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9398 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9399 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9401 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9402 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9405 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9406 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9409 gimple_seq tseq
= NULL
;
9410 gimplify_stmt (&x
, &tseq
);
9411 gimple_seq_add_seq (end
, tseq
);
9416 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9417 out
= unshare_expr (out
);
9418 gimplify_assign (out
, x
, end
);
9421 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9422 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9423 gimple_seq_add_stmt (end
, g
);
9424 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9425 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9426 gimple_seq_add_stmt (end
, g
);
9427 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9428 build_int_cst (TREE_TYPE (i
), 1));
9429 gimple_seq_add_stmt (end
, g
);
9430 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9431 gimple_seq_add_stmt (end
, g
);
9432 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9434 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9436 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9437 tree oldv
= NULL_TREE
;
9438 tree lab6
= NULL_TREE
;
9441 /* If this reduction needs destruction and parallel
9442 has been cancelled, jump around the merge operation
9443 to the destruction. */
9444 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9445 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9446 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9447 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9449 gimple_seq_add_stmt (end
, g
);
9450 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9452 if (omp_privatize_by_reference (decl
)
9453 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9455 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9456 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9457 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9458 gimplify_assign (refv
, ref
, end
);
9459 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9460 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9461 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9462 tree d
= maybe_lookup_decl (decl
, ctx
);
9464 if (DECL_HAS_VALUE_EXPR_P (d
))
9465 oldv
= DECL_VALUE_EXPR (d
);
9466 if (omp_privatize_by_reference (var
))
9468 tree v
= fold_convert (TREE_TYPE (d
),
9469 build_fold_addr_expr (new_var
));
9470 SET_DECL_VALUE_EXPR (d
, v
);
9473 SET_DECL_VALUE_EXPR (d
, new_var
);
9474 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9475 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9477 SET_DECL_VALUE_EXPR (d
, oldv
);
9480 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9481 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9483 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9484 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9485 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9486 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9488 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9489 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9492 gimple_seq tseq
= NULL
;
9493 gimplify_stmt (&x
, &tseq
);
9494 gimple_seq_add_seq (end
, tseq
);
9499 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9500 ref
= unshare_expr (ref
);
9501 gimplify_assign (ref
, x
, end
);
9503 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9505 field
= DECL_CHAIN (bfield
);
9509 if (code
== OMP_TASKGROUP
)
9511 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9512 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9513 gimple_seq_add_stmt (start
, g
);
9518 if (code
== OMP_FOR
)
9519 c
= gimple_omp_for_clauses (ctx
->stmt
);
9520 else if (code
== OMP_SECTIONS
)
9521 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9522 else if (code
== OMP_SCOPE
)
9523 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9525 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9526 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9527 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9528 build_fold_addr_expr (avar
));
9529 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9532 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9533 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9535 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9536 gimple_seq_add_stmt (end
, g
);
9537 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9538 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9540 enum built_in_function bfn
9541 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9542 t
= builtin_decl_explicit (bfn
);
9543 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9547 arg
= create_tmp_var (c_bool_type
);
9548 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9552 arg
= build_int_cst (c_bool_type
, 0);
9553 g
= gimple_build_call (t
, 1, arg
);
9557 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9558 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9560 gimple_seq_add_stmt (end
, g
);
9562 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9563 t
= build_constructor (atype
, NULL
);
9564 TREE_THIS_VOLATILE (t
) = 1;
9565 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9568 /* Expand code for an OpenMP taskgroup directive. */
9571 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9573 gimple
*stmt
= gsi_stmt (*gsi_p
);
9576 gimple_seq dseq
= NULL
;
9577 tree block
= make_node (BLOCK
);
9579 bind
= gimple_build_bind (NULL
, NULL
, block
);
9580 gsi_replace (gsi_p
, bind
, true);
9581 gimple_bind_add_stmt (bind
, stmt
);
9583 push_gimplify_context ();
9585 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9587 gimple_bind_add_stmt (bind
, x
);
9589 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9590 gimple_omp_taskgroup_clauses (stmt
),
9591 gimple_bind_body_ptr (bind
), &dseq
);
9593 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9594 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9595 gimple_omp_set_body (stmt
, NULL
);
9597 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9598 gimple_bind_add_seq (bind
, dseq
);
9600 pop_gimplify_context (bind
);
9602 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9603 BLOCK_VARS (block
) = ctx
->block_vars
;
9607 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9610 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9613 struct omp_for_data fd
;
9614 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9617 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9618 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9619 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9623 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9624 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9625 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
9626 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9628 /* Merge depend clauses from multiple adjacent
9629 #pragma omp ordered depend(sink:...) constructs
9630 into one #pragma omp ordered depend(sink:...), so that
9631 we can optimize them together. */
9632 gimple_stmt_iterator gsi
= *gsi_p
;
9634 while (!gsi_end_p (gsi
))
9636 gimple
*stmt
= gsi_stmt (gsi
);
9637 if (is_gimple_debug (stmt
)
9638 || gimple_code (stmt
) == GIMPLE_NOP
)
9643 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9645 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9646 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9648 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
9649 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9652 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9654 gsi_remove (&gsi
, true);
9658 /* Canonicalize sink dependence clauses into one folded clause if
9661 The basic algorithm is to create a sink vector whose first
9662 element is the GCD of all the first elements, and whose remaining
9663 elements are the minimum of the subsequent columns.
9665 We ignore dependence vectors whose first element is zero because
9666 such dependencies are known to be executed by the same thread.
9668 We take into account the direction of the loop, so a minimum
9669 becomes a maximum if the loop is iterating forwards. We also
9670 ignore sink clauses where the loop direction is unknown, or where
9671 the offsets are clearly invalid because they are not a multiple
9672 of the loop increment.
9676 #pragma omp for ordered(2)
9677 for (i=0; i < N; ++i)
9678 for (j=0; j < M; ++j)
9680 #pragma omp ordered \
9681 depend(sink:i-8,j-2) \
9682 depend(sink:i,j-1) \ // Completely ignored because i+0.
9683 depend(sink:i-4,j-3) \
9684 depend(sink:i-6,j-4)
9685 #pragma omp ordered depend(source)
9690 depend(sink:-gcd(8,4,6),-min(2,3,4))
9695 /* FIXME: Computing GCD's where the first element is zero is
9696 non-trivial in the presence of collapsed loops. Do this later. */
9697 if (fd
.collapse
> 1)
9700 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9702 /* wide_int is not a POD so it must be default-constructed. */
9703 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9704 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9706 tree folded_dep
= NULL_TREE
;
9707 /* TRUE if the first dimension's offset is negative. */
9708 bool neg_offset_p
= false;
9710 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9712 while ((c
= *list_p
) != NULL
)
9714 bool remove
= false;
9716 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
9717 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9718 goto next_ordered_clause
;
9721 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9722 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9723 vec
= TREE_CHAIN (vec
), ++i
)
9725 gcc_assert (i
< len
);
9727 /* omp_extract_for_data has canonicalized the condition. */
9728 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9729 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9730 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9731 bool maybe_lexically_later
= true;
9733 /* While the committee makes up its mind, bail if we have any
9734 non-constant steps. */
9735 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9736 goto lower_omp_ordered_ret
;
9738 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9739 if (POINTER_TYPE_P (itype
))
9741 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9742 TYPE_PRECISION (itype
),
9745 /* Ignore invalid offsets that are not multiples of the step. */
9746 if (!wi::multiple_of_p (wi::abs (offset
),
9747 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9750 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9751 "ignoring sink clause with offset that is not "
9752 "a multiple of the loop step");
9754 goto next_ordered_clause
;
9757 /* Calculate the first dimension. The first dimension of
9758 the folded dependency vector is the GCD of the first
9759 elements, while ignoring any first elements whose offset
9763 /* Ignore dependence vectors whose first dimension is 0. */
9767 goto next_ordered_clause
;
9771 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9773 error_at (OMP_CLAUSE_LOCATION (c
),
9774 "first offset must be in opposite direction "
9775 "of loop iterations");
9776 goto lower_omp_ordered_ret
;
9780 neg_offset_p
= forward
;
9781 /* Initialize the first time around. */
9782 if (folded_dep
== NULL_TREE
)
9785 folded_deps
[0] = offset
;
9788 folded_deps
[0] = wi::gcd (folded_deps
[0],
9792 /* Calculate minimum for the remaining dimensions. */
9795 folded_deps
[len
+ i
- 1] = offset
;
9796 if (folded_dep
== c
)
9797 folded_deps
[i
] = offset
;
9798 else if (maybe_lexically_later
9799 && !wi::eq_p (folded_deps
[i
], offset
))
9801 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9805 for (j
= 1; j
<= i
; j
++)
9806 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9809 maybe_lexically_later
= false;
9813 gcc_assert (i
== len
);
9817 next_ordered_clause
:
9819 *list_p
= OMP_CLAUSE_CHAIN (c
);
9821 list_p
= &OMP_CLAUSE_CHAIN (c
);
9827 folded_deps
[0] = -folded_deps
[0];
9829 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9830 if (POINTER_TYPE_P (itype
))
9833 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9834 = wide_int_to_tree (itype
, folded_deps
[0]);
9835 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9836 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9839 lower_omp_ordered_ret
:
9841 /* Ordered without clauses is #pragma omp threads, while we want
9842 a nop instead if we remove all clauses. */
9843 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9844 gsi_replace (gsi_p
, gimple_build_nop (), true);
9848 /* Expand code for an OpenMP ordered directive. */
9851 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9854 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9855 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9858 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9860 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9863 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9864 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9865 OMP_CLAUSE_THREADS
);
9867 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9870 /* FIXME: This is needs to be moved to the expansion to verify various
9871 conditions only testable on cfg with dominators computed, and also
9872 all the depend clauses to be merged still might need to be available
9873 for the runtime checks. */
9875 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9879 push_gimplify_context ();
9881 block
= make_node (BLOCK
);
9882 bind
= gimple_build_bind (NULL
, NULL
, block
);
9883 gsi_replace (gsi_p
, bind
, true);
9884 gimple_bind_add_stmt (bind
, stmt
);
9888 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9889 build_int_cst (NULL_TREE
, threads
));
9890 cfun
->has_simduid_loops
= true;
9893 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9895 gimple_bind_add_stmt (bind
, x
);
9897 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9900 counter
= create_tmp_var (integer_type_node
);
9901 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9902 gimple_call_set_lhs (g
, counter
);
9903 gimple_bind_add_stmt (bind
, g
);
9905 body
= create_artificial_label (UNKNOWN_LOCATION
);
9906 test
= create_artificial_label (UNKNOWN_LOCATION
);
9907 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9909 tree simt_pred
= create_tmp_var (integer_type_node
);
9910 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9911 gimple_call_set_lhs (g
, simt_pred
);
9912 gimple_bind_add_stmt (bind
, g
);
9914 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9915 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9916 gimple_bind_add_stmt (bind
, g
);
9918 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9920 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9921 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9922 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9923 gimple_omp_set_body (stmt
, NULL
);
9927 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9928 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9929 gimple_bind_add_stmt (bind
, g
);
9931 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9932 tree nonneg
= create_tmp_var (integer_type_node
);
9933 gimple_seq tseq
= NULL
;
9934 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9935 gimple_bind_add_seq (bind
, tseq
);
9937 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9938 gimple_call_set_lhs (g
, nonneg
);
9939 gimple_bind_add_stmt (bind
, g
);
9941 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9942 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9943 gimple_bind_add_stmt (bind
, g
);
9945 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9948 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9949 build_int_cst (NULL_TREE
, threads
));
9951 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9953 gimple_bind_add_stmt (bind
, x
);
9955 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9957 pop_gimplify_context (bind
);
9959 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9960 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9964 /* Expand code for an OpenMP scan directive and the structured block
9965 before the scan directive. */
9968 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9970 gimple
*stmt
= gsi_stmt (*gsi_p
);
9972 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9973 tree lane
= NULL_TREE
;
9974 gimple_seq before
= NULL
;
9975 omp_context
*octx
= ctx
->outer
;
9977 if (octx
->scan_exclusive
&& !has_clauses
)
9979 gimple_stmt_iterator gsi2
= *gsi_p
;
9981 gimple
*stmt2
= gsi_stmt (gsi2
);
9982 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9983 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9984 the one with exclusive clause(s), comes first. */
9986 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9987 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9989 gsi_remove (gsi_p
, false);
9990 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9991 ctx
= maybe_lookup_ctx (stmt2
);
9993 lower_omp_scan (gsi_p
, ctx
);
9998 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9999 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10000 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10001 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10002 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10003 && !gimple_omp_for_combined_p (octx
->stmt
));
10004 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10005 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10008 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10009 OMP_CLAUSE__SIMDUID_
))
10011 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10012 lane
= create_tmp_var (unsigned_type_node
);
10013 tree t
= build_int_cst (integer_type_node
,
10015 : octx
->scan_inclusive
? 2 : 3);
10017 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10018 gimple_call_set_lhs (g
, lane
);
10019 gimple_seq_add_stmt (&before
, g
);
10022 if (is_simd
|| is_for
)
10024 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10025 c
; c
= OMP_CLAUSE_CHAIN (c
))
10026 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10027 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10029 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10030 tree var
= OMP_CLAUSE_DECL (c
);
10031 tree new_var
= lookup_decl (var
, octx
);
10032 tree val
= new_var
;
10033 tree var2
= NULL_TREE
;
10034 tree var3
= NULL_TREE
;
10035 tree var4
= NULL_TREE
;
10036 tree lane0
= NULL_TREE
;
10037 tree new_vard
= new_var
;
10038 if (omp_privatize_by_reference (var
))
10040 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10043 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10045 val
= DECL_VALUE_EXPR (new_vard
);
10046 if (new_vard
!= new_var
)
10048 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10049 val
= TREE_OPERAND (val
, 0);
10051 if (TREE_CODE (val
) == ARRAY_REF
10052 && VAR_P (TREE_OPERAND (val
, 0)))
10054 tree v
= TREE_OPERAND (val
, 0);
10055 if (lookup_attribute ("omp simd array",
10056 DECL_ATTRIBUTES (v
)))
10058 val
= unshare_expr (val
);
10059 lane0
= TREE_OPERAND (val
, 1);
10060 TREE_OPERAND (val
, 1) = lane
;
10061 var2
= lookup_decl (v
, octx
);
10062 if (octx
->scan_exclusive
)
10063 var4
= lookup_decl (var2
, octx
);
10065 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10066 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10069 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10070 var2
, lane
, NULL_TREE
, NULL_TREE
);
10071 TREE_THIS_NOTRAP (var2
) = 1;
10072 if (octx
->scan_exclusive
)
10074 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10075 var4
, lane
, NULL_TREE
,
10077 TREE_THIS_NOTRAP (var4
) = 1;
10088 var2
= build_outer_var_ref (var
, octx
);
10089 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10091 var3
= maybe_lookup_decl (new_vard
, octx
);
10092 if (var3
== new_vard
|| var3
== NULL_TREE
)
10094 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10096 var4
= maybe_lookup_decl (var3
, octx
);
10097 if (var4
== var3
|| var4
== NULL_TREE
)
10099 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10110 && octx
->scan_exclusive
10112 && var4
== NULL_TREE
)
10113 var4
= create_tmp_var (TREE_TYPE (val
));
10115 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10117 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10122 /* If we've added a separate identity element
10123 variable, copy it over into val. */
10124 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10126 gimplify_and_add (x
, &before
);
10128 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10130 /* Otherwise, assign to it the identity element. */
10131 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10133 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10134 tree ref
= build_outer_var_ref (var
, octx
);
10135 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10136 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10139 if (new_vard
!= new_var
)
10140 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10141 SET_DECL_VALUE_EXPR (new_vard
, val
);
10143 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10144 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10145 lower_omp (&tseq
, octx
);
10147 SET_DECL_VALUE_EXPR (new_vard
, x
);
10148 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10149 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10150 gimple_seq_add_seq (&before
, tseq
);
10152 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10158 if (octx
->scan_exclusive
)
10160 tree v4
= unshare_expr (var4
);
10161 tree v2
= unshare_expr (var2
);
10162 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10163 gimplify_and_add (x
, &before
);
10165 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10166 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10167 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10169 if (x
&& new_vard
!= new_var
)
10170 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10172 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10173 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10174 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10175 lower_omp (&tseq
, octx
);
10176 gimple_seq_add_seq (&before
, tseq
);
10177 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10179 SET_DECL_VALUE_EXPR (new_vard
, x
);
10180 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10181 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10182 if (octx
->scan_inclusive
)
10184 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10186 gimplify_and_add (x
, &before
);
10188 else if (lane0
== NULL_TREE
)
10190 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10192 gimplify_and_add (x
, &before
);
10200 /* input phase. Set val to initializer before
10202 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10203 gimplify_assign (val
, x
, &before
);
10208 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10209 if (code
== MINUS_EXPR
)
10212 tree x
= build2 (code
, TREE_TYPE (var2
),
10213 unshare_expr (var2
), unshare_expr (val
));
10214 if (octx
->scan_inclusive
)
10216 gimplify_assign (unshare_expr (var2
), x
, &before
);
10217 gimplify_assign (val
, var2
, &before
);
10221 gimplify_assign (unshare_expr (var4
),
10222 unshare_expr (var2
), &before
);
10223 gimplify_assign (var2
, x
, &before
);
10224 if (lane0
== NULL_TREE
)
10225 gimplify_assign (val
, var4
, &before
);
10229 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10231 tree vexpr
= unshare_expr (var4
);
10232 TREE_OPERAND (vexpr
, 1) = lane0
;
10233 if (new_vard
!= new_var
)
10234 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10235 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10239 if (is_simd
&& !is_for_simd
)
10241 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10242 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10243 gsi_replace (gsi_p
, gimple_build_nop (), true);
10246 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10249 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
10250 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10255 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10256 substitution of a couple of function calls. But in the NAMED case,
10257 requires that languages coordinate a symbol name. It is therefore
10258 best put here in common code. */
10260 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10263 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10266 tree name
, lock
, unlock
;
10267 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10269 location_t loc
= gimple_location (stmt
);
10272 name
= gimple_omp_critical_name (stmt
);
10277 if (!critical_name_mutexes
)
10278 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10280 tree
*n
= critical_name_mutexes
->get (name
);
10285 decl
= create_tmp_var_raw (ptr_type_node
);
10287 new_str
= ACONCAT ((".gomp_critical_user_",
10288 IDENTIFIER_POINTER (name
), NULL
));
10289 DECL_NAME (decl
) = get_identifier (new_str
);
10290 TREE_PUBLIC (decl
) = 1;
10291 TREE_STATIC (decl
) = 1;
10292 DECL_COMMON (decl
) = 1;
10293 DECL_ARTIFICIAL (decl
) = 1;
10294 DECL_IGNORED_P (decl
) = 1;
10296 varpool_node::finalize_decl (decl
);
10298 critical_name_mutexes
->put (name
, decl
);
10303 /* If '#pragma omp critical' is inside offloaded region or
10304 inside function marked as offloadable, the symbol must be
10305 marked as offloadable too. */
10307 if (cgraph_node::get (current_function_decl
)->offloadable
)
10308 varpool_node::get_create (decl
)->offloadable
= 1;
10310 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10311 if (is_gimple_omp_offloaded (octx
->stmt
))
10313 varpool_node::get_create (decl
)->offloadable
= 1;
10317 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10318 lock
= build_call_expr_loc (loc
, lock
, 1,
10319 build_fold_addr_expr_loc (loc
, decl
));
10321 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10322 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10323 build_fold_addr_expr_loc (loc
, decl
));
10327 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10328 lock
= build_call_expr_loc (loc
, lock
, 0);
10330 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10331 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10334 push_gimplify_context ();
10336 block
= make_node (BLOCK
);
10337 bind
= gimple_build_bind (NULL
, NULL
, block
);
10338 gsi_replace (gsi_p
, bind
, true);
10339 gimple_bind_add_stmt (bind
, stmt
);
10341 tbody
= gimple_bind_body (bind
);
10342 gimplify_and_add (lock
, &tbody
);
10343 gimple_bind_set_body (bind
, tbody
);
10345 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10346 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10347 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10348 gimple_omp_set_body (stmt
, NULL
);
10350 tbody
= gimple_bind_body (bind
);
10351 gimplify_and_add (unlock
, &tbody
);
10352 gimple_bind_set_body (bind
, tbody
);
10354 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10356 pop_gimplify_context (bind
);
10357 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10358 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10361 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10362 for a lastprivate clause. Given a loop control predicate of (V
10363 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10364 is appended to *DLIST, iterator initialization is appended to
10365 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10366 to be emitted in a critical section. */
10369 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10370 gimple_seq
*dlist
, gimple_seq
*clist
,
10371 struct omp_context
*ctx
)
10373 tree clauses
, cond
, vinit
;
10374 enum tree_code cond_code
;
10377 cond_code
= fd
->loop
.cond_code
;
10378 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10380 /* When possible, use a strict equality expression. This can let VRP
10381 type optimizations deduce the value and remove a copy. */
10382 if (tree_fits_shwi_p (fd
->loop
.step
))
10384 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10385 if (step
== 1 || step
== -1)
10386 cond_code
= EQ_EXPR
;
10389 tree n2
= fd
->loop
.n2
;
10390 if (fd
->collapse
> 1
10391 && TREE_CODE (n2
) != INTEGER_CST
10392 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10394 struct omp_context
*taskreg_ctx
= NULL
;
10395 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10397 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10398 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10399 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10401 if (gimple_omp_for_combined_into_p (gfor
))
10403 gcc_assert (ctx
->outer
->outer
10404 && is_parallel_ctx (ctx
->outer
->outer
));
10405 taskreg_ctx
= ctx
->outer
->outer
;
10409 struct omp_for_data outer_fd
;
10410 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10411 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10414 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10415 taskreg_ctx
= ctx
->outer
->outer
;
10417 else if (is_taskreg_ctx (ctx
->outer
))
10418 taskreg_ctx
= ctx
->outer
;
10422 tree taskreg_clauses
10423 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10424 tree innerc
= omp_find_clause (taskreg_clauses
,
10425 OMP_CLAUSE__LOOPTEMP_
);
10426 gcc_assert (innerc
);
10427 int count
= fd
->collapse
;
10429 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10430 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10431 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10433 for (i
= 0; i
< count
; i
++)
10435 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10436 OMP_CLAUSE__LOOPTEMP_
);
10437 gcc_assert (innerc
);
10439 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10440 OMP_CLAUSE__LOOPTEMP_
);
10442 n2
= fold_convert (TREE_TYPE (n2
),
10443 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10447 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10449 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10451 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10452 if (!gimple_seq_empty_p (stmts
))
10454 gimple_seq_add_seq (&stmts
, *dlist
);
10457 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10458 vinit
= fd
->loop
.n1
;
10459 if (cond_code
== EQ_EXPR
10460 && tree_fits_shwi_p (fd
->loop
.n2
)
10461 && ! integer_zerop (fd
->loop
.n2
))
10462 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10464 vinit
= unshare_expr (vinit
);
10466 /* Initialize the iterator variable, so that threads that don't execute
10467 any iterations don't execute the lastprivate clauses by accident. */
10468 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10472 /* OpenACC privatization.
10474 Or, in other words, *sharing* at the respective OpenACC level of
10477 From a correctness perspective, a non-addressable variable can't be accessed
10478 outside the current thread, so it can go in a (faster than shared memory)
10479 register -- though that register may need to be broadcast in some
10480 circumstances. A variable can only meaningfully be "shared" across workers
10481 or vector lanes if its address is taken, e.g. by a call to an atomic
10484 From an optimisation perspective, the answer might be fuzzier: maybe
10485 sometimes, using shared memory directly would be faster than
10489 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10490 const location_t loc
, const tree c
,
10493 const dump_user_location_t d_u_loc
10494 = dump_user_location_t::from_location_t (loc
);
10495 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10497 # pragma GCC diagnostic push
10498 # pragma GCC diagnostic ignored "-Wformat"
10500 dump_printf_loc (l_dump_flags
, d_u_loc
,
10501 "variable %<%T%> ", decl
);
10503 # pragma GCC diagnostic pop
10506 dump_printf (l_dump_flags
,
10508 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10510 dump_printf (l_dump_flags
,
10511 "declared in block ");
10515 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10518 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10520 /* There is some differentiation depending on block vs. clause. */
10525 if (res
&& !VAR_P (decl
))
10529 if (dump_enabled_p ())
10531 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10532 dump_printf (l_dump_flags
,
10533 "potentially has improper OpenACC privatization level: %qs\n",
10534 get_tree_code_name (TREE_CODE (decl
)));
10538 if (res
&& block
&& TREE_STATIC (decl
))
10542 if (dump_enabled_p ())
10544 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10545 dump_printf (l_dump_flags
,
10546 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10551 if (res
&& block
&& DECL_EXTERNAL (decl
))
10555 if (dump_enabled_p ())
10557 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10558 dump_printf (l_dump_flags
,
10559 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10564 if (res
&& !TREE_ADDRESSABLE (decl
))
10568 if (dump_enabled_p ())
10570 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10571 dump_printf (l_dump_flags
,
10572 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10573 "not addressable");
10579 if (dump_enabled_p ())
10581 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10582 dump_printf (l_dump_flags
,
10583 "is candidate for adjusting OpenACC privatization level\n");
10587 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10589 print_generic_decl (dump_file
, decl
, dump_flags
);
10590 fprintf (dump_file
, "\n");
10596 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10600 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10602 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10603 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10605 tree decl
= OMP_CLAUSE_DECL (c
);
10607 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
, decl
))
10610 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10611 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10615 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10619 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10621 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10623 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
, decl
))
10626 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10627 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10631 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10634 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10635 struct walk_stmt_info
*wi
)
10637 gimple
*stmt
= gsi_stmt (*gsi_p
);
10639 *handled_ops_p
= true;
10640 switch (gimple_code (stmt
))
10644 case GIMPLE_OMP_FOR
:
10645 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10646 && gimple_omp_for_combined_into_p (stmt
))
10647 *handled_ops_p
= false;
10650 case GIMPLE_OMP_SCAN
:
10651 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10652 return integer_zero_node
;
10659 /* Helper function for lower_omp_for, add transformations for a worksharing
10660 loop with scan directives inside of it.
10661 For worksharing loop not combined with simd, transform:
10662 #pragma omp for reduction(inscan,+:r) private(i)
10663 for (i = 0; i < n; i = i + 1)
10668 #pragma omp scan inclusive(r)
10674 into two worksharing loops + code to merge results:
10676 num_threads = omp_get_num_threads ();
10677 thread_num = omp_get_thread_num ();
10678 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10683 // For UDRs this is UDR init, or if ctors are needed, copy from
10684 // var3 that has been constructed to contain the neutral element.
10688 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10689 // a shared array with num_threads elements and rprivb to a local array
10690 // number of elements equal to the number of (contiguous) iterations the
10691 // current thread will perform. controlb and controlp variables are
10692 // temporaries to handle deallocation of rprivb at the end of second
10694 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10695 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10696 for (i = 0; i < n; i = i + 1)
10699 // For UDRs this is UDR init or copy from var3.
10701 // This is the input phase from user code.
10705 // For UDRs this is UDR merge.
10707 // Rather than handing it over to the user, save to local thread's
10709 rprivb[ivar] = var2;
10710 // For exclusive scan, the above two statements are swapped.
10714 // And remember the final value from this thread's into the shared
10716 rpriva[(sizetype) thread_num] = var2;
10717 // If more than one thread, compute using Work-Efficient prefix sum
10718 // the inclusive parallel scan of the rpriva array.
10719 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10724 num_threadsu = (unsigned int) num_threads;
10725 thread_numup1 = (unsigned int) thread_num + 1;
10728 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10732 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10737 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10738 mul = REALPART_EXPR <cplx>;
10739 ovf = IMAGPART_EXPR <cplx>;
10740 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10743 andvm1 = andv + 4294967295;
10745 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10747 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10748 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10749 rpriva[l] = rpriva[l - k] + rpriva[l];
10751 if (down == 0) goto <D.2121>; else goto <D.2122>;
10759 if (k != 0) goto <D.2108>; else goto <D.2103>;
10761 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10763 // For UDRs this is UDR init or copy from var3.
10767 var2 = rpriva[thread_num - 1];
10770 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10771 reduction(inscan,+:r) private(i)
10772 for (i = 0; i < n; i = i + 1)
10775 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10776 r = var2 + rprivb[ivar];
10779 // This is the scan phase from user code.
10781 // Plus a bump of the iterator.
10787 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10788 struct omp_for_data
*fd
, omp_context
*ctx
)
10790 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10791 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10793 gimple_seq body
= gimple_omp_body (stmt
);
10794 gimple_stmt_iterator input1_gsi
= gsi_none ();
10795 struct walk_stmt_info wi
;
10796 memset (&wi
, 0, sizeof (wi
));
10797 wi
.val_only
= true;
10798 wi
.info
= (void *) &input1_gsi
;
10799 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10800 gcc_assert (!gsi_end_p (input1_gsi
));
10802 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10803 gimple_stmt_iterator gsi
= input1_gsi
;
10805 gimple_stmt_iterator scan1_gsi
= gsi
;
10806 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10807 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10809 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10810 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10811 gimple_omp_set_body (input_stmt1
, NULL
);
10812 gimple_omp_set_body (scan_stmt1
, NULL
);
10813 gimple_omp_set_body (stmt
, NULL
);
10815 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10816 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10817 gimple_omp_set_body (stmt
, body
);
10818 gimple_omp_set_body (input_stmt1
, input_body
);
10820 gimple_stmt_iterator input2_gsi
= gsi_none ();
10821 memset (&wi
, 0, sizeof (wi
));
10822 wi
.val_only
= true;
10823 wi
.info
= (void *) &input2_gsi
;
10824 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10825 gcc_assert (!gsi_end_p (input2_gsi
));
10827 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10830 gimple_stmt_iterator scan2_gsi
= gsi
;
10831 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10832 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10833 gimple_omp_set_body (scan_stmt2
, scan_body
);
10835 gimple_stmt_iterator input3_gsi
= gsi_none ();
10836 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10837 gimple_stmt_iterator input4_gsi
= gsi_none ();
10838 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10839 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10840 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10841 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10844 memset (&wi
, 0, sizeof (wi
));
10845 wi
.val_only
= true;
10846 wi
.info
= (void *) &input3_gsi
;
10847 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10848 gcc_assert (!gsi_end_p (input3_gsi
));
10850 input_stmt3
= gsi_stmt (input3_gsi
);
10854 scan_stmt3
= gsi_stmt (gsi
);
10855 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10857 memset (&wi
, 0, sizeof (wi
));
10858 wi
.val_only
= true;
10859 wi
.info
= (void *) &input4_gsi
;
10860 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10861 gcc_assert (!gsi_end_p (input4_gsi
));
10863 input_stmt4
= gsi_stmt (input4_gsi
);
10867 scan_stmt4
= gsi_stmt (gsi
);
10868 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10870 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10871 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
10874 tree num_threads
= create_tmp_var (integer_type_node
);
10875 tree thread_num
= create_tmp_var (integer_type_node
);
10876 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
10877 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
10878 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
10879 gimple_call_set_lhs (g
, num_threads
);
10880 gimple_seq_add_stmt (body_p
, g
);
10881 g
= gimple_build_call (threadnum_decl
, 0);
10882 gimple_call_set_lhs (g
, thread_num
);
10883 gimple_seq_add_stmt (body_p
, g
);
10885 tree ivar
= create_tmp_var (sizetype
);
10886 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
10887 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
10888 tree k
= create_tmp_var (unsigned_type_node
);
10889 tree l
= create_tmp_var (unsigned_type_node
);
10891 gimple_seq clist
= NULL
, mdlist
= NULL
;
10892 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
10893 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
10894 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
10895 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
10896 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10897 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10898 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10900 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10901 tree var
= OMP_CLAUSE_DECL (c
);
10902 tree new_var
= lookup_decl (var
, ctx
);
10903 tree var3
= NULL_TREE
;
10904 tree new_vard
= new_var
;
10905 if (omp_privatize_by_reference (var
))
10906 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10907 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10909 var3
= maybe_lookup_decl (new_vard
, ctx
);
10910 if (var3
== new_vard
)
10914 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
10915 tree rpriva
= create_tmp_var (ptype
);
10916 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10917 OMP_CLAUSE_DECL (nc
) = rpriva
;
10919 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10921 tree rprivb
= create_tmp_var (ptype
);
10922 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10923 OMP_CLAUSE_DECL (nc
) = rprivb
;
10924 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
10926 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10928 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
10929 if (new_vard
!= new_var
)
10930 TREE_ADDRESSABLE (var2
) = 1;
10931 gimple_add_tmp_var (var2
);
10933 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
10934 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10935 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10936 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10937 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10939 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
10940 thread_num
, integer_minus_one_node
);
10941 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10942 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10943 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10944 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10945 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10947 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
10948 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10949 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10950 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10951 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10953 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
10954 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10955 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10956 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10957 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10958 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10960 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
10961 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10962 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
10963 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10965 tree var4
= is_for_simd
? new_var
: var2
;
10966 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
10969 var5
= lookup_decl (var
, input_simd_ctx
);
10970 var6
= lookup_decl (var
, scan_simd_ctx
);
10971 if (new_vard
!= new_var
)
10973 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
10974 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
10977 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10979 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10982 x
= lang_hooks
.decls
.omp_clause_default_ctor
10983 (c
, var2
, build_outer_var_ref (var
, ctx
));
10985 gimplify_and_add (x
, &clist
);
10987 x
= build_outer_var_ref (var
, ctx
);
10988 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
10990 gimplify_and_add (x
, &thr01_list
);
10992 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10993 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10996 x
= unshare_expr (var4
);
10997 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10998 gimplify_and_add (x
, &thrn1_list
);
10999 x
= unshare_expr (var4
);
11000 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11001 gimplify_and_add (x
, &thr02_list
);
11003 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11005 /* Otherwise, assign to it the identity element. */
11006 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11007 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11010 if (new_vard
!= new_var
)
11011 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11012 SET_DECL_VALUE_EXPR (new_vard
, val
);
11013 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11015 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11016 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11017 lower_omp (&tseq
, ctx
);
11018 gimple_seq_add_seq (&thrn1_list
, tseq
);
11019 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11020 lower_omp (&tseq
, ctx
);
11021 gimple_seq_add_seq (&thr02_list
, tseq
);
11022 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11023 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11024 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11026 SET_DECL_VALUE_EXPR (new_vard
, y
);
11029 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11030 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11034 x
= unshare_expr (var4
);
11035 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11036 gimplify_and_add (x
, &thrn2_list
);
11040 x
= unshare_expr (rprivb_ref
);
11041 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11042 gimplify_and_add (x
, &scan1_list
);
11046 if (ctx
->scan_exclusive
)
11048 x
= unshare_expr (rprivb_ref
);
11049 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11050 gimplify_and_add (x
, &scan1_list
);
11053 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11054 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11055 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11056 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11057 lower_omp (&tseq
, ctx
);
11058 gimple_seq_add_seq (&scan1_list
, tseq
);
11060 if (ctx
->scan_inclusive
)
11062 x
= unshare_expr (rprivb_ref
);
11063 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11064 gimplify_and_add (x
, &scan1_list
);
11068 x
= unshare_expr (rpriva_ref
);
11069 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11070 unshare_expr (var4
));
11071 gimplify_and_add (x
, &mdlist
);
11073 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11074 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11075 gimplify_and_add (x
, &input2_list
);
11078 if (new_vard
!= new_var
)
11079 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11081 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11082 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11083 SET_DECL_VALUE_EXPR (new_vard
, val
);
11084 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11087 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11088 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11091 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11092 lower_omp (&tseq
, ctx
);
11094 SET_DECL_VALUE_EXPR (new_vard
, y
);
11097 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11098 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11102 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11103 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11104 lower_omp (&tseq
, ctx
);
11106 gimple_seq_add_seq (&input2_list
, tseq
);
11108 x
= build_outer_var_ref (var
, ctx
);
11109 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11110 gimplify_and_add (x
, &last_list
);
11112 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11113 gimplify_and_add (x
, &reduc_list
);
11114 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11115 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11117 if (new_vard
!= new_var
)
11118 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11119 SET_DECL_VALUE_EXPR (new_vard
, val
);
11120 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11121 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11122 lower_omp (&tseq
, ctx
);
11123 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11124 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11125 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11127 SET_DECL_VALUE_EXPR (new_vard
, y
);
11130 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11131 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11133 gimple_seq_add_seq (&reduc_list
, tseq
);
11134 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11135 gimplify_and_add (x
, &reduc_list
);
11137 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11139 gimplify_and_add (x
, dlist
);
11143 x
= build_outer_var_ref (var
, ctx
);
11144 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11146 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11147 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11149 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11151 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11153 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11154 if (code
== MINUS_EXPR
)
11158 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11161 if (ctx
->scan_exclusive
)
11162 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11164 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11165 gimplify_assign (var2
, x
, &scan1_list
);
11166 if (ctx
->scan_inclusive
)
11167 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11171 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11174 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11175 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11177 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11180 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11181 unshare_expr (rprival_ref
));
11182 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11186 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11187 gimple_seq_add_stmt (&scan1_list
, g
);
11188 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11189 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11190 ? scan_stmt4
: scan_stmt2
), g
);
11192 tree controlb
= create_tmp_var (boolean_type_node
);
11193 tree controlp
= create_tmp_var (ptr_type_node
);
11194 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11195 OMP_CLAUSE_DECL (nc
) = controlb
;
11196 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11198 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11199 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11200 OMP_CLAUSE_DECL (nc
) = controlp
;
11201 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11203 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11204 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11205 OMP_CLAUSE_DECL (nc
) = controlb
;
11206 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11208 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11209 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11210 OMP_CLAUSE_DECL (nc
) = controlp
;
11211 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11213 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11215 *cp1
= gimple_omp_for_clauses (stmt
);
11216 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11217 *cp2
= gimple_omp_for_clauses (new_stmt
);
11218 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11222 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11223 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11225 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11227 gsi_remove (&input3_gsi
, true);
11228 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11230 gsi_remove (&scan3_gsi
, true);
11231 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11233 gsi_remove (&input4_gsi
, true);
11234 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11236 gsi_remove (&scan4_gsi
, true);
11240 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11241 gimple_omp_set_body (input_stmt2
, input2_list
);
11244 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11246 gsi_remove (&input1_gsi
, true);
11247 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11249 gsi_remove (&scan1_gsi
, true);
11250 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11252 gsi_remove (&input2_gsi
, true);
11253 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11255 gsi_remove (&scan2_gsi
, true);
11257 gimple_seq_add_seq (body_p
, clist
);
11259 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11260 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11261 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11262 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11263 gimple_seq_add_stmt (body_p
, g
);
11264 g
= gimple_build_label (lab1
);
11265 gimple_seq_add_stmt (body_p
, g
);
11266 gimple_seq_add_seq (body_p
, thr01_list
);
11267 g
= gimple_build_goto (lab3
);
11268 gimple_seq_add_stmt (body_p
, g
);
11269 g
= gimple_build_label (lab2
);
11270 gimple_seq_add_stmt (body_p
, g
);
11271 gimple_seq_add_seq (body_p
, thrn1_list
);
11272 g
= gimple_build_label (lab3
);
11273 gimple_seq_add_stmt (body_p
, g
);
11275 g
= gimple_build_assign (ivar
, size_zero_node
);
11276 gimple_seq_add_stmt (body_p
, g
);
11278 gimple_seq_add_stmt (body_p
, stmt
);
11279 gimple_seq_add_seq (body_p
, body
);
11280 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11283 g
= gimple_build_omp_return (true);
11284 gimple_seq_add_stmt (body_p
, g
);
11285 gimple_seq_add_seq (body_p
, mdlist
);
11287 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11288 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11289 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11290 gimple_seq_add_stmt (body_p
, g
);
11291 g
= gimple_build_label (lab1
);
11292 gimple_seq_add_stmt (body_p
, g
);
11294 g
= omp_build_barrier (NULL
);
11295 gimple_seq_add_stmt (body_p
, g
);
11297 tree down
= create_tmp_var (unsigned_type_node
);
11298 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11299 gimple_seq_add_stmt (body_p
, g
);
11301 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11302 gimple_seq_add_stmt (body_p
, g
);
11304 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11305 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11306 gimple_seq_add_stmt (body_p
, g
);
11308 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11309 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11310 gimple_seq_add_stmt (body_p
, g
);
11312 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11313 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11314 build_int_cst (unsigned_type_node
, 1));
11315 gimple_seq_add_stmt (body_p
, g
);
11317 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11318 g
= gimple_build_label (lab3
);
11319 gimple_seq_add_stmt (body_p
, g
);
11321 tree twok
= create_tmp_var (unsigned_type_node
);
11322 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11323 gimple_seq_add_stmt (body_p
, g
);
11325 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11326 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11327 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11328 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11329 gimple_seq_add_stmt (body_p
, g
);
11330 g
= gimple_build_label (lab4
);
11331 gimple_seq_add_stmt (body_p
, g
);
11332 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11333 gimple_seq_add_stmt (body_p
, g
);
11334 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11335 gimple_seq_add_stmt (body_p
, g
);
11337 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11338 gimple_seq_add_stmt (body_p
, g
);
11339 g
= gimple_build_label (lab6
);
11340 gimple_seq_add_stmt (body_p
, g
);
11342 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11343 gimple_seq_add_stmt (body_p
, g
);
11345 g
= gimple_build_label (lab5
);
11346 gimple_seq_add_stmt (body_p
, g
);
11348 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11349 gimple_seq_add_stmt (body_p
, g
);
11351 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11352 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11353 gimple_call_set_lhs (g
, cplx
);
11354 gimple_seq_add_stmt (body_p
, g
);
11355 tree mul
= create_tmp_var (unsigned_type_node
);
11356 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11357 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11358 gimple_seq_add_stmt (body_p
, g
);
11359 tree ovf
= create_tmp_var (unsigned_type_node
);
11360 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11361 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11362 gimple_seq_add_stmt (body_p
, g
);
11364 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11365 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11366 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11368 gimple_seq_add_stmt (body_p
, g
);
11369 g
= gimple_build_label (lab7
);
11370 gimple_seq_add_stmt (body_p
, g
);
11372 tree andv
= create_tmp_var (unsigned_type_node
);
11373 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11374 gimple_seq_add_stmt (body_p
, g
);
11375 tree andvm1
= create_tmp_var (unsigned_type_node
);
11376 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11377 build_minus_one_cst (unsigned_type_node
));
11378 gimple_seq_add_stmt (body_p
, g
);
11380 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11381 gimple_seq_add_stmt (body_p
, g
);
11383 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11384 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11385 gimple_seq_add_stmt (body_p
, g
);
11386 g
= gimple_build_label (lab9
);
11387 gimple_seq_add_stmt (body_p
, g
);
11388 gimple_seq_add_seq (body_p
, reduc_list
);
11389 g
= gimple_build_label (lab8
);
11390 gimple_seq_add_stmt (body_p
, g
);
11392 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11393 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11394 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11395 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11397 gimple_seq_add_stmt (body_p
, g
);
11398 g
= gimple_build_label (lab10
);
11399 gimple_seq_add_stmt (body_p
, g
);
11400 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11401 gimple_seq_add_stmt (body_p
, g
);
11402 g
= gimple_build_goto (lab12
);
11403 gimple_seq_add_stmt (body_p
, g
);
11404 g
= gimple_build_label (lab11
);
11405 gimple_seq_add_stmt (body_p
, g
);
11406 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11407 gimple_seq_add_stmt (body_p
, g
);
11408 g
= gimple_build_label (lab12
);
11409 gimple_seq_add_stmt (body_p
, g
);
11411 g
= omp_build_barrier (NULL
);
11412 gimple_seq_add_stmt (body_p
, g
);
11414 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11416 gimple_seq_add_stmt (body_p
, g
);
11418 g
= gimple_build_label (lab2
);
11419 gimple_seq_add_stmt (body_p
, g
);
11421 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11422 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11423 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11424 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11425 gimple_seq_add_stmt (body_p
, g
);
11426 g
= gimple_build_label (lab1
);
11427 gimple_seq_add_stmt (body_p
, g
);
11428 gimple_seq_add_seq (body_p
, thr02_list
);
11429 g
= gimple_build_goto (lab3
);
11430 gimple_seq_add_stmt (body_p
, g
);
11431 g
= gimple_build_label (lab2
);
11432 gimple_seq_add_stmt (body_p
, g
);
11433 gimple_seq_add_seq (body_p
, thrn2_list
);
11434 g
= gimple_build_label (lab3
);
11435 gimple_seq_add_stmt (body_p
, g
);
11437 g
= gimple_build_assign (ivar
, size_zero_node
);
11438 gimple_seq_add_stmt (body_p
, g
);
11439 gimple_seq_add_stmt (body_p
, new_stmt
);
11440 gimple_seq_add_seq (body_p
, new_body
);
11442 gimple_seq new_dlist
= NULL
;
11443 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11444 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11445 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11446 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11447 integer_minus_one_node
);
11448 gimple_seq_add_stmt (&new_dlist
, g
);
11449 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11450 gimple_seq_add_stmt (&new_dlist
, g
);
11451 g
= gimple_build_label (lab1
);
11452 gimple_seq_add_stmt (&new_dlist
, g
);
11453 gimple_seq_add_seq (&new_dlist
, last_list
);
11454 g
= gimple_build_label (lab2
);
11455 gimple_seq_add_stmt (&new_dlist
, g
);
11456 gimple_seq_add_seq (&new_dlist
, *dlist
);
11457 *dlist
= new_dlist
;
11460 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11461 the addresses of variables to be made private at the surrounding
11462 parallelism level. Such functions appear in the gimple code stream in two
11463 forms, e.g. for a partitioned loop:
11465 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11466 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11467 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11468 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11470 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11471 not as part of a HEAD_MARK sequence:
11473 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11475 For such stand-alone appearances, the 3rd argument is always 0, denoting
11476 gang partitioning. */
11479 lower_oacc_private_marker (omp_context
*ctx
)
11481 if (ctx
->oacc_privatization_candidates
.length () == 0)
11484 auto_vec
<tree
, 5> args
;
11486 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11487 args
.quick_push (integer_zero_node
);
11488 args
.quick_push (integer_minus_one_node
);
11492 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11494 for (omp_context
*thisctx
= ctx
; thisctx
; thisctx
= thisctx
->outer
)
11496 tree inner_decl
= maybe_lookup_decl (decl
, thisctx
);
11503 gcc_checking_assert (decl
);
11505 tree addr
= build_fold_addr_expr (decl
);
11506 args
.safe_push (addr
);
11509 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11512 /* Lower code for an OMP loop directive. */
11515 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11517 tree
*rhs_p
, block
;
11518 struct omp_for_data fd
, *fdp
= NULL
;
11519 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11521 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11522 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11523 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11526 push_gimplify_context ();
11528 if (is_gimple_omp_oacc (ctx
->stmt
))
11529 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11531 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11533 block
= make_node (BLOCK
);
11534 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11535 /* Replace at gsi right away, so that 'stmt' is no member
11536 of a sequence anymore as we're going to add to a different
11538 gsi_replace (gsi_p
, new_stmt
, true);
11540 /* Move declaration of temporaries in the loop body before we make
11542 omp_for_body
= gimple_omp_body (stmt
);
11543 if (!gimple_seq_empty_p (omp_for_body
)
11544 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11547 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11548 tree vars
= gimple_bind_vars (inner_bind
);
11549 if (is_gimple_omp_oacc (ctx
->stmt
))
11550 oacc_privatization_scan_decl_chain (ctx
, vars
);
11551 gimple_bind_append_vars (new_stmt
, vars
);
11552 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11553 keep them on the inner_bind and it's block. */
11554 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11555 if (gimple_bind_block (inner_bind
))
11556 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11559 if (gimple_omp_for_combined_into_p (stmt
))
11561 omp_extract_for_data (stmt
, &fd
, NULL
);
11564 /* We need two temporaries with fd.loop.v type (istart/iend)
11565 and then (fd.collapse - 1) temporaries with the same
11566 type for count2 ... countN-1 vars if not constant. */
11568 tree type
= fd
.iter_type
;
11569 if (fd
.collapse
> 1
11570 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11571 count
+= fd
.collapse
- 1;
11573 tree type2
= NULL_TREE
;
11575 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11576 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11577 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11579 tree clauses
= *pc
;
11580 if (fd
.collapse
> 1
11582 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11583 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11584 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11585 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11587 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11588 type2
= TREE_TYPE (v
);
11594 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11595 OMP_CLAUSE__LOOPTEMP_
);
11596 if (ctx
->simt_stmt
)
11597 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11598 OMP_CLAUSE__LOOPTEMP_
);
11599 for (i
= 0; i
< count
+ count2
; i
++)
11604 gcc_assert (outerc
);
11605 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11606 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11607 OMP_CLAUSE__LOOPTEMP_
);
11611 /* If there are 2 adjacent SIMD stmts, one with _simt_
11612 clause, another without, make sure they have the same
11613 decls in _looptemp_ clauses, because the outer stmt
11614 they are combined into will look up just one inner_stmt. */
11615 if (ctx
->simt_stmt
)
11616 temp
= OMP_CLAUSE_DECL (simtc
);
11618 temp
= create_tmp_var (i
>= count
? type2
: type
);
11619 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11621 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11622 OMP_CLAUSE_DECL (*pc
) = temp
;
11623 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11624 if (ctx
->simt_stmt
)
11625 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11626 OMP_CLAUSE__LOOPTEMP_
);
11631 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11635 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11636 OMP_CLAUSE_REDUCTION
);
11637 tree rtmp
= NULL_TREE
;
11640 tree type
= build_pointer_type (pointer_sized_int_node
);
11641 tree temp
= create_tmp_var (type
);
11642 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11643 OMP_CLAUSE_DECL (c
) = temp
;
11644 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11645 gimple_omp_for_set_clauses (stmt
, c
);
11646 lower_omp_task_reductions (ctx
, OMP_FOR
,
11647 gimple_omp_for_clauses (stmt
),
11648 &tred_ilist
, &tred_dlist
);
11650 rtmp
= make_ssa_name (type
);
11651 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11654 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11657 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11659 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11660 gimple_omp_for_pre_body (stmt
));
11662 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11664 gcall
*private_marker
= NULL
;
11665 if (is_gimple_omp_oacc (ctx
->stmt
)
11666 && !gimple_seq_empty_p (omp_for_body
))
11667 private_marker
= lower_oacc_private_marker (ctx
);
11669 /* Lower the header expressions. At this point, we can assume that
11670 the header is of the form:
11672 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11674 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11675 using the .omp_data_s mapping, if needed. */
11676 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11678 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11679 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11681 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11682 TREE_VEC_ELT (*rhs_p
, 1)
11683 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11684 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11685 TREE_VEC_ELT (*rhs_p
, 2)
11686 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11688 else if (!is_gimple_min_invariant (*rhs_p
))
11689 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11690 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11691 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11693 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11694 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11696 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11697 TREE_VEC_ELT (*rhs_p
, 1)
11698 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11699 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11700 TREE_VEC_ELT (*rhs_p
, 2)
11701 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11703 else if (!is_gimple_min_invariant (*rhs_p
))
11704 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11705 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11706 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11708 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11709 if (!is_gimple_min_invariant (*rhs_p
))
11710 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11713 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11715 gimple_seq_add_seq (&body
, cnt_list
);
11717 /* Once lowered, extract the bounds and clauses. */
11718 omp_extract_for_data (stmt
, &fd
, NULL
);
11720 if (is_gimple_omp_oacc (ctx
->stmt
)
11721 && !ctx_in_oacc_kernels_region (ctx
))
11722 lower_oacc_head_tail (gimple_location (stmt
),
11723 gimple_omp_for_clauses (stmt
), private_marker
,
11724 &oacc_head
, &oacc_tail
, ctx
);
11726 /* Add OpenACC partitioning and reduction markers just before the loop. */
11728 gimple_seq_add_seq (&body
, oacc_head
);
11730 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11732 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11733 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11734 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11735 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11737 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11738 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11739 OMP_CLAUSE_LINEAR_STEP (c
)
11740 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11744 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11745 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11746 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11749 gimple_seq_add_stmt (&body
, stmt
);
11750 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11753 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11756 /* After the loop, add exit clauses. */
11757 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11761 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11762 gcall
*g
= gimple_build_call (fndecl
, 0);
11763 gimple_seq_add_stmt (&body
, g
);
11764 gimple_seq_add_seq (&body
, clist
);
11765 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11766 g
= gimple_build_call (fndecl
, 0);
11767 gimple_seq_add_stmt (&body
, g
);
11770 if (ctx
->cancellable
)
11771 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11773 gimple_seq_add_seq (&body
, dlist
);
11777 gimple_seq_add_seq (&tred_ilist
, body
);
11781 body
= maybe_catch_exception (body
);
11783 /* Region exit marker goes at the end of the loop body. */
11784 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11785 gimple_seq_add_stmt (&body
, g
);
11787 gimple_seq_add_seq (&body
, tred_dlist
);
11789 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11792 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11794 /* Add OpenACC joining and reduction markers just after the loop. */
11796 gimple_seq_add_seq (&body
, oacc_tail
);
11798 pop_gimplify_context (new_stmt
);
11800 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11801 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11802 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11803 if (BLOCK_VARS (block
))
11804 TREE_USED (block
) = 1;
11806 gimple_bind_set_body (new_stmt
, body
);
11807 gimple_omp_set_body (stmt
, NULL
);
11808 gimple_omp_for_set_pre_body (stmt
, NULL
);
11811 /* Callback for walk_stmts. Check if the current statement only contains
11812 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11815 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11816 bool *handled_ops_p
,
11817 struct walk_stmt_info
*wi
)
11819 int *info
= (int *) wi
->info
;
11820 gimple
*stmt
= gsi_stmt (*gsi_p
);
11822 *handled_ops_p
= true;
11823 switch (gimple_code (stmt
))
11829 case GIMPLE_OMP_FOR
:
11830 case GIMPLE_OMP_SECTIONS
:
11831 *info
= *info
== 0 ? 1 : -1;
11840 struct omp_taskcopy_context
11842 /* This field must be at the beginning, as we do "inheritance": Some
11843 callback functions for tree-inline.c (e.g., omp_copy_decl)
11844 receive a copy_body_data pointer that is up-casted to an
11845 omp_context pointer. */
11851 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11853 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11855 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11856 return create_tmp_var (TREE_TYPE (var
));
11862 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11864 tree name
, new_fields
= NULL
, type
, f
;
11866 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11867 name
= DECL_NAME (TYPE_NAME (orig_type
));
11868 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11869 TYPE_DECL
, name
, type
);
11870 TYPE_NAME (type
) = name
;
11872 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
11874 tree new_f
= copy_node (f
);
11875 DECL_CONTEXT (new_f
) = type
;
11876 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
11877 TREE_CHAIN (new_f
) = new_fields
;
11878 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11879 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11880 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
11882 new_fields
= new_f
;
11883 tcctx
->cb
.decl_map
->put (f
, new_f
);
11885 TYPE_FIELDS (type
) = nreverse (new_fields
);
11886 layout_type (type
);
11890 /* Create task copyfn. */
11893 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
11895 struct function
*child_cfun
;
11896 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
11897 tree record_type
, srecord_type
, bind
, list
;
11898 bool record_needs_remap
= false, srecord_needs_remap
= false;
11900 struct omp_taskcopy_context tcctx
;
11901 location_t loc
= gimple_location (task_stmt
);
11902 size_t looptempno
= 0;
11904 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
11905 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
11906 gcc_assert (child_cfun
->cfg
== NULL
);
11907 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
11909 /* Reset DECL_CONTEXT on function arguments. */
11910 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
11911 DECL_CONTEXT (t
) = child_fn
;
11913 /* Populate the function. */
11914 push_gimplify_context ();
11915 push_cfun (child_cfun
);
11917 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11918 TREE_SIDE_EFFECTS (bind
) = 1;
11920 DECL_SAVED_TREE (child_fn
) = bind
;
11921 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
11923 /* Remap src and dst argument types if needed. */
11924 record_type
= ctx
->record_type
;
11925 srecord_type
= ctx
->srecord_type
;
11926 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
11927 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11929 record_needs_remap
= true;
11932 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
11933 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11935 srecord_needs_remap
= true;
11939 if (record_needs_remap
|| srecord_needs_remap
)
11941 memset (&tcctx
, '\0', sizeof (tcctx
));
11942 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
11943 tcctx
.cb
.dst_fn
= child_fn
;
11944 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
11945 gcc_checking_assert (tcctx
.cb
.src_node
);
11946 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
11947 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
11948 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
11949 tcctx
.cb
.eh_lp_nr
= 0;
11950 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
11951 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
11954 if (record_needs_remap
)
11955 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
11956 if (srecord_needs_remap
)
11957 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
11960 tcctx
.cb
.decl_map
= NULL
;
11962 arg
= DECL_ARGUMENTS (child_fn
);
11963 TREE_TYPE (arg
) = build_pointer_type (record_type
);
11964 sarg
= DECL_CHAIN (arg
);
11965 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
11967 /* First pass: initialize temporaries used in record_type and srecord_type
11968 sizes and field offsets. */
11969 if (tcctx
.cb
.decl_map
)
11970 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11971 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11975 decl
= OMP_CLAUSE_DECL (c
);
11976 p
= tcctx
.cb
.decl_map
->get (decl
);
11979 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11980 sf
= (tree
) n
->value
;
11981 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11982 src
= build_simple_mem_ref_loc (loc
, sarg
);
11983 src
= omp_build_component_ref (src
, sf
);
11984 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
11985 append_to_statement_list (t
, &list
);
11988 /* Second pass: copy shared var pointers and copy construct non-VLA
11989 firstprivate vars. */
11990 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11991 switch (OMP_CLAUSE_CODE (c
))
11993 splay_tree_key key
;
11994 case OMP_CLAUSE_SHARED
:
11995 decl
= OMP_CLAUSE_DECL (c
);
11996 key
= (splay_tree_key
) decl
;
11997 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
11998 key
= (splay_tree_key
) &DECL_UID (decl
);
11999 n
= splay_tree_lookup (ctx
->field_map
, key
);
12002 f
= (tree
) n
->value
;
12003 if (tcctx
.cb
.decl_map
)
12004 f
= *tcctx
.cb
.decl_map
->get (f
);
12005 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12006 sf
= (tree
) n
->value
;
12007 if (tcctx
.cb
.decl_map
)
12008 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12009 src
= build_simple_mem_ref_loc (loc
, sarg
);
12010 src
= omp_build_component_ref (src
, sf
);
12011 dst
= build_simple_mem_ref_loc (loc
, arg
);
12012 dst
= omp_build_component_ref (dst
, f
);
12013 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12014 append_to_statement_list (t
, &list
);
12016 case OMP_CLAUSE_REDUCTION
:
12017 case OMP_CLAUSE_IN_REDUCTION
:
12018 decl
= OMP_CLAUSE_DECL (c
);
12019 if (TREE_CODE (decl
) == MEM_REF
)
12021 decl
= TREE_OPERAND (decl
, 0);
12022 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12023 decl
= TREE_OPERAND (decl
, 0);
12024 if (TREE_CODE (decl
) == INDIRECT_REF
12025 || TREE_CODE (decl
) == ADDR_EXPR
)
12026 decl
= TREE_OPERAND (decl
, 0);
12028 key
= (splay_tree_key
) decl
;
12029 n
= splay_tree_lookup (ctx
->field_map
, key
);
12032 f
= (tree
) n
->value
;
12033 if (tcctx
.cb
.decl_map
)
12034 f
= *tcctx
.cb
.decl_map
->get (f
);
12035 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12036 sf
= (tree
) n
->value
;
12037 if (tcctx
.cb
.decl_map
)
12038 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12039 src
= build_simple_mem_ref_loc (loc
, sarg
);
12040 src
= omp_build_component_ref (src
, sf
);
12041 if (decl
!= OMP_CLAUSE_DECL (c
)
12042 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12043 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12044 src
= build_simple_mem_ref_loc (loc
, src
);
12045 dst
= build_simple_mem_ref_loc (loc
, arg
);
12046 dst
= omp_build_component_ref (dst
, f
);
12047 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12048 append_to_statement_list (t
, &list
);
12050 case OMP_CLAUSE__LOOPTEMP_
:
12051 /* Fields for first two _looptemp_ clauses are initialized by
12052 GOMP_taskloop*, the rest are handled like firstprivate. */
12053 if (looptempno
< 2)
12059 case OMP_CLAUSE__REDUCTEMP_
:
12060 case OMP_CLAUSE_FIRSTPRIVATE
:
12061 decl
= OMP_CLAUSE_DECL (c
);
12062 if (is_variable_sized (decl
))
12064 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12067 f
= (tree
) n
->value
;
12068 if (tcctx
.cb
.decl_map
)
12069 f
= *tcctx
.cb
.decl_map
->get (f
);
12070 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12073 sf
= (tree
) n
->value
;
12074 if (tcctx
.cb
.decl_map
)
12075 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12076 src
= build_simple_mem_ref_loc (loc
, sarg
);
12077 src
= omp_build_component_ref (src
, sf
);
12078 if (use_pointer_for_field (decl
, NULL
)
12079 || omp_privatize_by_reference (decl
))
12080 src
= build_simple_mem_ref_loc (loc
, src
);
12084 dst
= build_simple_mem_ref_loc (loc
, arg
);
12085 dst
= omp_build_component_ref (dst
, f
);
12086 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12087 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12090 if (ctx
->allocate_map
)
12091 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12093 tree allocator
= *allocatorp
;
12094 HOST_WIDE_INT ialign
= 0;
12095 if (TREE_CODE (allocator
) == TREE_LIST
)
12097 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12098 allocator
= TREE_PURPOSE (allocator
);
12100 if (TREE_CODE (allocator
) != INTEGER_CST
)
12102 n
= splay_tree_lookup (ctx
->sfield_map
,
12103 (splay_tree_key
) allocator
);
12104 allocator
= (tree
) n
->value
;
12105 if (tcctx
.cb
.decl_map
)
12106 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12107 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12108 allocator
= omp_build_component_ref (a
, allocator
);
12110 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12111 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12112 tree align
= build_int_cst (size_type_node
,
12114 DECL_ALIGN_UNIT (decl
)));
12115 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12116 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12118 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12119 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12120 append_to_statement_list (t
, &list
);
12121 dst
= build_simple_mem_ref_loc (loc
, dst
);
12123 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12125 append_to_statement_list (t
, &list
);
12127 case OMP_CLAUSE_PRIVATE
:
12128 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12130 decl
= OMP_CLAUSE_DECL (c
);
12131 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12132 f
= (tree
) n
->value
;
12133 if (tcctx
.cb
.decl_map
)
12134 f
= *tcctx
.cb
.decl_map
->get (f
);
12135 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12138 sf
= (tree
) n
->value
;
12139 if (tcctx
.cb
.decl_map
)
12140 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12141 src
= build_simple_mem_ref_loc (loc
, sarg
);
12142 src
= omp_build_component_ref (src
, sf
);
12143 if (use_pointer_for_field (decl
, NULL
))
12144 src
= build_simple_mem_ref_loc (loc
, src
);
12148 dst
= build_simple_mem_ref_loc (loc
, arg
);
12149 dst
= omp_build_component_ref (dst
, f
);
12150 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12151 append_to_statement_list (t
, &list
);
12157 /* Last pass: handle VLA firstprivates. */
12158 if (tcctx
.cb
.decl_map
)
12159 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12160 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12164 decl
= OMP_CLAUSE_DECL (c
);
12165 if (!is_variable_sized (decl
))
12167 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12170 f
= (tree
) n
->value
;
12171 f
= *tcctx
.cb
.decl_map
->get (f
);
12172 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12173 ind
= DECL_VALUE_EXPR (decl
);
12174 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12175 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12176 n
= splay_tree_lookup (ctx
->sfield_map
,
12177 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12178 sf
= (tree
) n
->value
;
12179 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12180 src
= build_simple_mem_ref_loc (loc
, sarg
);
12181 src
= omp_build_component_ref (src
, sf
);
12182 src
= build_simple_mem_ref_loc (loc
, src
);
12183 dst
= build_simple_mem_ref_loc (loc
, arg
);
12184 dst
= omp_build_component_ref (dst
, f
);
12185 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12186 append_to_statement_list (t
, &list
);
12187 n
= splay_tree_lookup (ctx
->field_map
,
12188 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12189 df
= (tree
) n
->value
;
12190 df
= *tcctx
.cb
.decl_map
->get (df
);
12191 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12192 ptr
= omp_build_component_ref (ptr
, df
);
12193 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12194 build_fold_addr_expr_loc (loc
, dst
));
12195 append_to_statement_list (t
, &list
);
12198 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12199 append_to_statement_list (t
, &list
);
12201 if (tcctx
.cb
.decl_map
)
12202 delete tcctx
.cb
.decl_map
;
12203 pop_gimplify_context (NULL
);
12204 BIND_EXPR_BODY (bind
) = list
;
12209 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12213 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
12215 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12216 gcc_assert (clauses
);
12217 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12218 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12219 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12221 case OMP_CLAUSE_DEPEND_LAST
:
12222 /* Lowering already done at gimplification. */
12224 case OMP_CLAUSE_DEPEND_IN
:
12227 case OMP_CLAUSE_DEPEND_OUT
:
12228 case OMP_CLAUSE_DEPEND_INOUT
:
12231 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12234 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12237 case OMP_CLAUSE_DEPEND_SOURCE
:
12238 case OMP_CLAUSE_DEPEND_SINK
:
12241 gcc_unreachable ();
12243 if (cnt
[1] || cnt
[3])
12245 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
12246 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
12247 tree array
= create_tmp_var (type
);
12248 TREE_ADDRESSABLE (array
) = 1;
12249 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12253 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12254 gimple_seq_add_stmt (iseq
, g
);
12255 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12258 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12259 gimple_seq_add_stmt (iseq
, g
);
12260 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12262 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12263 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12264 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12265 gimple_seq_add_stmt (iseq
, g
);
12267 for (i
= 0; i
< 4; i
++)
12271 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12272 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12276 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12278 case OMP_CLAUSE_DEPEND_IN
:
12282 case OMP_CLAUSE_DEPEND_OUT
:
12283 case OMP_CLAUSE_DEPEND_INOUT
:
12287 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12291 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12296 gcc_unreachable ();
12298 tree t
= OMP_CLAUSE_DECL (c
);
12299 t
= fold_convert (ptr_type_node
, t
);
12300 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12301 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12302 NULL_TREE
, NULL_TREE
);
12303 g
= gimple_build_assign (r
, t
);
12304 gimple_seq_add_stmt (iseq
, g
);
12307 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12308 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12309 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12310 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12312 tree clobber
= build_clobber (type
);
12313 g
= gimple_build_assign (array
, clobber
);
12314 gimple_seq_add_stmt (oseq
, g
);
12317 /* Lower the OpenMP parallel or task directive in the current statement
12318 in GSI_P. CTX holds context information for the directive. */
12321 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12325 gimple
*stmt
= gsi_stmt (*gsi_p
);
12326 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12327 gimple_seq par_body
;
12328 location_t loc
= gimple_location (stmt
);
12330 clauses
= gimple_omp_taskreg_clauses (stmt
);
12331 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12332 && gimple_omp_task_taskwait_p (stmt
))
12340 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12341 par_body
= gimple_bind_body (par_bind
);
12343 child_fn
= ctx
->cb
.dst_fn
;
12344 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12345 && !gimple_omp_parallel_combined_p (stmt
))
12347 struct walk_stmt_info wi
;
12350 memset (&wi
, 0, sizeof (wi
));
12352 wi
.val_only
= true;
12353 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12355 gimple_omp_parallel_set_combined_p (stmt
, true);
12357 gimple_seq dep_ilist
= NULL
;
12358 gimple_seq dep_olist
= NULL
;
12359 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12360 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12362 push_gimplify_context ();
12363 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12364 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12365 &dep_ilist
, &dep_olist
);
12368 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12369 && gimple_omp_task_taskwait_p (stmt
))
12373 gsi_replace (gsi_p
, dep_bind
, true);
12374 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12375 gimple_bind_add_stmt (dep_bind
, stmt
);
12376 gimple_bind_add_seq (dep_bind
, dep_olist
);
12377 pop_gimplify_context (dep_bind
);
12382 if (ctx
->srecord_type
)
12383 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12385 gimple_seq tskred_ilist
= NULL
;
12386 gimple_seq tskred_olist
= NULL
;
12387 if ((is_task_ctx (ctx
)
12388 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12389 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12390 OMP_CLAUSE_REDUCTION
))
12391 || (is_parallel_ctx (ctx
)
12392 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12393 OMP_CLAUSE__REDUCTEMP_
)))
12395 if (dep_bind
== NULL
)
12397 push_gimplify_context ();
12398 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12400 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12402 gimple_omp_taskreg_clauses (ctx
->stmt
),
12403 &tskred_ilist
, &tskred_olist
);
12406 push_gimplify_context ();
12408 gimple_seq par_olist
= NULL
;
12409 gimple_seq par_ilist
= NULL
;
12410 gimple_seq par_rlist
= NULL
;
12411 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12412 lower_omp (&par_body
, ctx
);
12413 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12414 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12416 /* Declare all the variables created by mapping and the variables
12417 declared in the scope of the parallel body. */
12418 record_vars_into (ctx
->block_vars
, child_fn
);
12419 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12420 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12422 if (ctx
->record_type
)
12425 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12426 : ctx
->record_type
, ".omp_data_o");
12427 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12428 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12429 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12432 gimple_seq olist
= NULL
;
12433 gimple_seq ilist
= NULL
;
12434 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12435 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12437 if (ctx
->record_type
)
12439 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12440 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12444 /* Once all the expansions are done, sequence all the different
12445 fragments inside gimple_omp_body. */
12447 gimple_seq new_body
= NULL
;
12449 if (ctx
->record_type
)
12451 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12452 /* fixup_child_record_type might have changed receiver_decl's type. */
12453 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12454 gimple_seq_add_stmt (&new_body
,
12455 gimple_build_assign (ctx
->receiver_decl
, t
));
12458 gimple_seq_add_seq (&new_body
, par_ilist
);
12459 gimple_seq_add_seq (&new_body
, par_body
);
12460 gimple_seq_add_seq (&new_body
, par_rlist
);
12461 if (ctx
->cancellable
)
12462 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12463 gimple_seq_add_seq (&new_body
, par_olist
);
12464 new_body
= maybe_catch_exception (new_body
);
12465 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12466 gimple_seq_add_stmt (&new_body
,
12467 gimple_build_omp_continue (integer_zero_node
,
12468 integer_zero_node
));
12469 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12470 gimple_omp_set_body (stmt
, new_body
);
12472 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12473 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12475 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12476 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12477 gimple_bind_add_seq (bind
, ilist
);
12478 gimple_bind_add_stmt (bind
, stmt
);
12479 gimple_bind_add_seq (bind
, olist
);
12481 pop_gimplify_context (NULL
);
12485 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12486 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12487 gimple_bind_add_stmt (dep_bind
, bind
);
12488 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12489 gimple_bind_add_seq (dep_bind
, dep_olist
);
12490 pop_gimplify_context (dep_bind
);
12494 /* Lower the GIMPLE_OMP_TARGET in the current statement
12495 in GSI_P. CTX holds context information for the directive. */
12498 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12501 tree child_fn
, t
, c
;
12502 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12503 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12504 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12505 location_t loc
= gimple_location (stmt
);
12506 bool offloaded
, data_region
;
12507 unsigned int map_cnt
= 0;
12508 tree in_reduction_clauses
= NULL_TREE
;
12510 offloaded
= is_gimple_omp_offloaded (stmt
);
12511 switch (gimple_omp_target_kind (stmt
))
12513 case GF_OMP_TARGET_KIND_REGION
:
12515 q
= &in_reduction_clauses
;
12516 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12517 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12520 q
= &OMP_CLAUSE_CHAIN (*q
);
12521 *p
= OMP_CLAUSE_CHAIN (*p
);
12524 p
= &OMP_CLAUSE_CHAIN (*p
);
12526 *p
= in_reduction_clauses
;
12528 case GF_OMP_TARGET_KIND_UPDATE
:
12529 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12530 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12531 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12532 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12533 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12534 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12535 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12536 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12537 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12538 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12539 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12540 data_region
= false;
12542 case GF_OMP_TARGET_KIND_DATA
:
12543 case GF_OMP_TARGET_KIND_OACC_DATA
:
12544 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12545 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12546 data_region
= true;
12549 gcc_unreachable ();
12552 clauses
= gimple_omp_target_clauses (stmt
);
12554 gimple_seq dep_ilist
= NULL
;
12555 gimple_seq dep_olist
= NULL
;
12556 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12557 if (has_depend
|| in_reduction_clauses
)
12559 push_gimplify_context ();
12560 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12562 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12563 &dep_ilist
, &dep_olist
);
12564 if (in_reduction_clauses
)
12565 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12573 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12574 tgt_body
= gimple_bind_body (tgt_bind
);
12576 else if (data_region
)
12577 tgt_body
= gimple_omp_body (stmt
);
12578 child_fn
= ctx
->cb
.dst_fn
;
12580 push_gimplify_context ();
12583 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12584 switch (OMP_CLAUSE_CODE (c
))
12590 case OMP_CLAUSE_MAP
:
12592 /* First check what we're prepared to handle in the following. */
12593 switch (OMP_CLAUSE_MAP_KIND (c
))
12595 case GOMP_MAP_ALLOC
:
12597 case GOMP_MAP_FROM
:
12598 case GOMP_MAP_TOFROM
:
12599 case GOMP_MAP_POINTER
:
12600 case GOMP_MAP_TO_PSET
:
12601 case GOMP_MAP_DELETE
:
12602 case GOMP_MAP_RELEASE
:
12603 case GOMP_MAP_ALWAYS_TO
:
12604 case GOMP_MAP_ALWAYS_FROM
:
12605 case GOMP_MAP_ALWAYS_TOFROM
:
12606 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12607 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12608 case GOMP_MAP_STRUCT
:
12609 case GOMP_MAP_ALWAYS_POINTER
:
12610 case GOMP_MAP_ATTACH
:
12611 case GOMP_MAP_DETACH
:
12613 case GOMP_MAP_IF_PRESENT
:
12614 case GOMP_MAP_FORCE_ALLOC
:
12615 case GOMP_MAP_FORCE_TO
:
12616 case GOMP_MAP_FORCE_FROM
:
12617 case GOMP_MAP_FORCE_TOFROM
:
12618 case GOMP_MAP_FORCE_PRESENT
:
12619 case GOMP_MAP_FORCE_DEVICEPTR
:
12620 case GOMP_MAP_DEVICE_RESIDENT
:
12621 case GOMP_MAP_LINK
:
12622 case GOMP_MAP_FORCE_DETACH
:
12623 gcc_assert (is_gimple_omp_oacc (stmt
));
12626 gcc_unreachable ();
12630 case OMP_CLAUSE_TO
:
12631 case OMP_CLAUSE_FROM
:
12633 var
= OMP_CLAUSE_DECL (c
);
12636 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12637 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12638 && (OMP_CLAUSE_MAP_KIND (c
)
12639 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12644 if (DECL_SIZE (var
)
12645 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12647 tree var2
= DECL_VALUE_EXPR (var
);
12648 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12649 var2
= TREE_OPERAND (var2
, 0);
12650 gcc_assert (DECL_P (var2
));
12655 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12656 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12657 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12659 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12662 && varpool_node::get_create (var
)->offloadable
)
12665 tree type
= build_pointer_type (TREE_TYPE (var
));
12666 tree new_var
= lookup_decl (var
, ctx
);
12667 x
= create_tmp_var_raw (type
, get_name (new_var
));
12668 gimple_add_tmp_var (x
);
12669 x
= build_simple_mem_ref (x
);
12670 SET_DECL_VALUE_EXPR (new_var
, x
);
12671 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12676 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12677 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12678 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12679 && is_omp_target (stmt
))
12681 gcc_assert (maybe_lookup_field (c
, ctx
));
12686 if (!maybe_lookup_field (var
, ctx
))
12689 /* Don't remap compute constructs' reduction variables, because the
12690 intermediate result must be local to each gang. */
12691 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12692 && is_gimple_omp_oacc (ctx
->stmt
)
12693 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12695 x
= build_receiver_ref (var
, true, ctx
);
12696 tree new_var
= lookup_decl (var
, ctx
);
12698 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12699 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12700 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12701 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12702 x
= build_simple_mem_ref (x
);
12703 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12705 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12706 if (omp_privatize_by_reference (new_var
)
12707 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12708 || DECL_BY_REFERENCE (var
)))
12710 /* Create a local object to hold the instance
12712 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12713 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12714 tree inst
= create_tmp_var (type
, id
);
12715 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12716 x
= build_fold_addr_expr (inst
);
12718 gimplify_assign (new_var
, x
, &fplist
);
12720 else if (DECL_P (new_var
))
12722 SET_DECL_VALUE_EXPR (new_var
, x
);
12723 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12726 gcc_unreachable ();
12731 case OMP_CLAUSE_FIRSTPRIVATE
:
12732 gcc_checking_assert (offloaded
);
12733 if (is_gimple_omp_oacc (ctx
->stmt
))
12735 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12736 gcc_checking_assert (!is_oacc_kernels (ctx
));
12737 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12738 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12740 goto oacc_firstprivate
;
12743 var
= OMP_CLAUSE_DECL (c
);
12744 if (!omp_privatize_by_reference (var
)
12745 && !is_gimple_reg_type (TREE_TYPE (var
)))
12747 tree new_var
= lookup_decl (var
, ctx
);
12748 if (is_variable_sized (var
))
12750 tree pvar
= DECL_VALUE_EXPR (var
);
12751 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12752 pvar
= TREE_OPERAND (pvar
, 0);
12753 gcc_assert (DECL_P (pvar
));
12754 tree new_pvar
= lookup_decl (pvar
, ctx
);
12755 x
= build_fold_indirect_ref (new_pvar
);
12756 TREE_THIS_NOTRAP (x
) = 1;
12759 x
= build_receiver_ref (var
, true, ctx
);
12760 SET_DECL_VALUE_EXPR (new_var
, x
);
12761 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12765 case OMP_CLAUSE_PRIVATE
:
12766 gcc_checking_assert (offloaded
);
12767 if (is_gimple_omp_oacc (ctx
->stmt
))
12769 /* No 'private' clauses on OpenACC 'kernels'. */
12770 gcc_checking_assert (!is_oacc_kernels (ctx
));
12771 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12772 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12776 var
= OMP_CLAUSE_DECL (c
);
12777 if (is_variable_sized (var
))
12779 tree new_var
= lookup_decl (var
, ctx
);
12780 tree pvar
= DECL_VALUE_EXPR (var
);
12781 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12782 pvar
= TREE_OPERAND (pvar
, 0);
12783 gcc_assert (DECL_P (pvar
));
12784 tree new_pvar
= lookup_decl (pvar
, ctx
);
12785 x
= build_fold_indirect_ref (new_pvar
);
12786 TREE_THIS_NOTRAP (x
) = 1;
12787 SET_DECL_VALUE_EXPR (new_var
, x
);
12788 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12792 case OMP_CLAUSE_USE_DEVICE_PTR
:
12793 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12794 case OMP_CLAUSE_IS_DEVICE_PTR
:
12795 var
= OMP_CLAUSE_DECL (c
);
12797 if (is_variable_sized (var
))
12799 tree new_var
= lookup_decl (var
, ctx
);
12800 tree pvar
= DECL_VALUE_EXPR (var
);
12801 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12802 pvar
= TREE_OPERAND (pvar
, 0);
12803 gcc_assert (DECL_P (pvar
));
12804 tree new_pvar
= lookup_decl (pvar
, ctx
);
12805 x
= build_fold_indirect_ref (new_pvar
);
12806 TREE_THIS_NOTRAP (x
) = 1;
12807 SET_DECL_VALUE_EXPR (new_var
, x
);
12808 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12810 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12811 && !omp_privatize_by_reference (var
)
12812 && !omp_is_allocatable_or_ptr (var
)
12813 && !lang_hooks
.decls
.omp_array_data (var
, true))
12814 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12816 tree new_var
= lookup_decl (var
, ctx
);
12817 tree type
= build_pointer_type (TREE_TYPE (var
));
12818 x
= create_tmp_var_raw (type
, get_name (new_var
));
12819 gimple_add_tmp_var (x
);
12820 x
= build_simple_mem_ref (x
);
12821 SET_DECL_VALUE_EXPR (new_var
, x
);
12822 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12826 tree new_var
= lookup_decl (var
, ctx
);
12827 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
12828 gimple_add_tmp_var (x
);
12829 SET_DECL_VALUE_EXPR (new_var
, x
);
12830 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12837 target_nesting_level
++;
12838 lower_omp (&tgt_body
, ctx
);
12839 target_nesting_level
--;
12841 else if (data_region
)
12842 lower_omp (&tgt_body
, ctx
);
12846 /* Declare all the variables created by mapping and the variables
12847 declared in the scope of the target body. */
12848 record_vars_into (ctx
->block_vars
, child_fn
);
12849 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
12850 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
12855 if (ctx
->record_type
)
12858 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
12859 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12860 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12861 t
= make_tree_vec (3);
12862 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
12863 TREE_VEC_ELT (t
, 1)
12864 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
12865 ".omp_data_sizes");
12866 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
12867 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
12868 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
12869 tree tkind_type
= short_unsigned_type_node
;
12870 int talign_shift
= 8;
12871 TREE_VEC_ELT (t
, 2)
12872 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
12873 ".omp_data_kinds");
12874 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
12875 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
12876 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
12877 gimple_omp_target_set_data_arg (stmt
, t
);
12879 vec
<constructor_elt
, va_gc
> *vsize
;
12880 vec
<constructor_elt
, va_gc
> *vkind
;
12881 vec_alloc (vsize
, map_cnt
);
12882 vec_alloc (vkind
, map_cnt
);
12883 unsigned int map_idx
= 0;
12885 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12886 switch (OMP_CLAUSE_CODE (c
))
12888 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
12889 unsigned int talign
;
12894 case OMP_CLAUSE_MAP
:
12895 case OMP_CLAUSE_TO
:
12896 case OMP_CLAUSE_FROM
:
12897 oacc_firstprivate_map
:
12899 ovar
= OMP_CLAUSE_DECL (c
);
12900 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12901 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12902 || (OMP_CLAUSE_MAP_KIND (c
)
12903 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12905 if (!DECL_P (ovar
))
12907 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12908 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
12910 nc
= OMP_CLAUSE_CHAIN (c
);
12911 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
12912 == get_base_address (ovar
));
12913 ovar
= OMP_CLAUSE_DECL (nc
);
12917 tree x
= build_sender_ref (ovar
, ctx
);
12919 if (in_reduction_clauses
12920 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12921 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12923 v
= unshare_expr (v
);
12925 while (handled_component_p (*p
)
12926 || TREE_CODE (*p
) == INDIRECT_REF
12927 || TREE_CODE (*p
) == ADDR_EXPR
12928 || TREE_CODE (*p
) == MEM_REF
12929 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
12930 p
= &TREE_OPERAND (*p
, 0);
12932 if (is_variable_sized (d
))
12934 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
12935 d
= DECL_VALUE_EXPR (d
);
12936 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
12937 d
= TREE_OPERAND (d
, 0);
12938 gcc_assert (DECL_P (d
));
12941 = (splay_tree_key
) &DECL_CONTEXT (d
);
12942 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
12947 *p
= build_fold_indirect_ref (nd
);
12949 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
12950 gimplify_assign (x
, v
, &ilist
);
12956 if (DECL_SIZE (ovar
)
12957 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
12959 tree ovar2
= DECL_VALUE_EXPR (ovar
);
12960 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
12961 ovar2
= TREE_OPERAND (ovar2
, 0);
12962 gcc_assert (DECL_P (ovar2
));
12965 if (!maybe_lookup_field (ovar
, ctx
)
12966 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12967 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12968 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
12972 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
12973 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
12974 talign
= DECL_ALIGN_UNIT (ovar
);
12979 if (in_reduction_clauses
12980 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12981 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12984 if (is_variable_sized (d
))
12986 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
12987 d
= DECL_VALUE_EXPR (d
);
12988 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
12989 d
= TREE_OPERAND (d
, 0);
12990 gcc_assert (DECL_P (d
));
12993 = (splay_tree_key
) &DECL_CONTEXT (d
);
12994 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
12999 var
= build_fold_indirect_ref (nd
);
13002 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13005 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13006 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13007 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13008 && is_omp_target (stmt
))
13010 x
= build_sender_ref (c
, ctx
);
13011 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13015 x
= build_sender_ref (ovar
, ctx
);
13017 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13018 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13019 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13020 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13022 gcc_assert (offloaded
);
13024 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13025 mark_addressable (avar
);
13026 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13027 talign
= DECL_ALIGN_UNIT (avar
);
13028 avar
= build_fold_addr_expr (avar
);
13029 gimplify_assign (x
, avar
, &ilist
);
13031 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13033 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13034 if (!omp_privatize_by_reference (var
))
13036 if (is_gimple_reg (var
)
13037 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13038 suppress_warning (var
);
13039 var
= build_fold_addr_expr (var
);
13042 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13043 gimplify_assign (x
, var
, &ilist
);
13045 else if (is_gimple_reg (var
))
13047 gcc_assert (offloaded
);
13048 tree avar
= create_tmp_var (TREE_TYPE (var
));
13049 mark_addressable (avar
);
13050 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13051 if (GOMP_MAP_COPY_TO_P (map_kind
)
13052 || map_kind
== GOMP_MAP_POINTER
13053 || map_kind
== GOMP_MAP_TO_PSET
13054 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13056 /* If we need to initialize a temporary
13057 with VAR because it is not addressable, and
13058 the variable hasn't been initialized yet, then
13059 we'll get a warning for the store to avar.
13060 Don't warn in that case, the mapping might
13062 suppress_warning (var
, OPT_Wuninitialized
);
13063 gimplify_assign (avar
, var
, &ilist
);
13065 avar
= build_fold_addr_expr (avar
);
13066 gimplify_assign (x
, avar
, &ilist
);
13067 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13068 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13069 && !TYPE_READONLY (TREE_TYPE (var
)))
13071 x
= unshare_expr (x
);
13072 x
= build_simple_mem_ref (x
);
13073 gimplify_assign (var
, x
, &olist
);
13078 /* While MAP is handled explicitly by the FE,
13079 for 'target update', only the identified is passed. */
13080 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13081 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13082 && (omp_is_allocatable_or_ptr (var
)
13083 && omp_check_optional_argument (var
, false)))
13084 var
= build_fold_indirect_ref (var
);
13085 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13086 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13087 || (!omp_is_allocatable_or_ptr (var
)
13088 && !omp_check_optional_argument (var
, false)))
13089 var
= build_fold_addr_expr (var
);
13090 gimplify_assign (x
, var
, &ilist
);
13094 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13096 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13097 s
= TREE_TYPE (ovar
);
13098 if (TREE_CODE (s
) == REFERENCE_TYPE
13099 || omp_check_optional_argument (ovar
, false))
13101 s
= TYPE_SIZE_UNIT (s
);
13104 s
= OMP_CLAUSE_SIZE (c
);
13105 if (s
== NULL_TREE
)
13106 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13107 s
= fold_convert (size_type_node
, s
);
13108 purpose
= size_int (map_idx
++);
13109 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13110 if (TREE_CODE (s
) != INTEGER_CST
)
13111 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13113 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13114 switch (OMP_CLAUSE_CODE (c
))
13116 case OMP_CLAUSE_MAP
:
13117 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13118 tkind_zero
= tkind
;
13119 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13122 case GOMP_MAP_ALLOC
:
13123 case GOMP_MAP_IF_PRESENT
:
13125 case GOMP_MAP_FROM
:
13126 case GOMP_MAP_TOFROM
:
13127 case GOMP_MAP_ALWAYS_TO
:
13128 case GOMP_MAP_ALWAYS_FROM
:
13129 case GOMP_MAP_ALWAYS_TOFROM
:
13130 case GOMP_MAP_RELEASE
:
13131 case GOMP_MAP_FORCE_TO
:
13132 case GOMP_MAP_FORCE_FROM
:
13133 case GOMP_MAP_FORCE_TOFROM
:
13134 case GOMP_MAP_FORCE_PRESENT
:
13135 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13137 case GOMP_MAP_DELETE
:
13138 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13142 if (tkind_zero
!= tkind
)
13144 if (integer_zerop (s
))
13145 tkind
= tkind_zero
;
13146 else if (integer_nonzerop (s
))
13147 tkind_zero
= tkind
;
13150 case OMP_CLAUSE_FIRSTPRIVATE
:
13151 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13152 tkind
= GOMP_MAP_TO
;
13153 tkind_zero
= tkind
;
13155 case OMP_CLAUSE_TO
:
13156 tkind
= GOMP_MAP_TO
;
13157 tkind_zero
= tkind
;
13159 case OMP_CLAUSE_FROM
:
13160 tkind
= GOMP_MAP_FROM
;
13161 tkind_zero
= tkind
;
13164 gcc_unreachable ();
13166 gcc_checking_assert (tkind
13167 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13168 gcc_checking_assert (tkind_zero
13169 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13170 talign
= ceil_log2 (talign
);
13171 tkind
|= talign
<< talign_shift
;
13172 tkind_zero
|= talign
<< talign_shift
;
13173 gcc_checking_assert (tkind
13174 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13175 gcc_checking_assert (tkind_zero
13176 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13177 if (tkind
== tkind_zero
)
13178 x
= build_int_cstu (tkind_type
, tkind
);
13181 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13182 x
= build3 (COND_EXPR
, tkind_type
,
13183 fold_build2 (EQ_EXPR
, boolean_type_node
,
13184 unshare_expr (s
), size_zero_node
),
13185 build_int_cstu (tkind_type
, tkind_zero
),
13186 build_int_cstu (tkind_type
, tkind
));
13188 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13193 case OMP_CLAUSE_FIRSTPRIVATE
:
13194 if (is_gimple_omp_oacc (ctx
->stmt
))
13195 goto oacc_firstprivate_map
;
13196 ovar
= OMP_CLAUSE_DECL (c
);
13197 if (omp_privatize_by_reference (ovar
))
13198 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13200 talign
= DECL_ALIGN_UNIT (ovar
);
13201 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13202 x
= build_sender_ref (ovar
, ctx
);
13203 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13204 type
= TREE_TYPE (ovar
);
13205 if (omp_privatize_by_reference (ovar
))
13206 type
= TREE_TYPE (type
);
13207 if ((INTEGRAL_TYPE_P (type
)
13208 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13209 || TREE_CODE (type
) == POINTER_TYPE
)
13211 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13213 if (omp_privatize_by_reference (var
))
13214 t
= build_simple_mem_ref (var
);
13215 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13216 suppress_warning (var
);
13217 if (TREE_CODE (type
) != POINTER_TYPE
)
13218 t
= fold_convert (pointer_sized_int_node
, t
);
13219 t
= fold_convert (TREE_TYPE (x
), t
);
13220 gimplify_assign (x
, t
, &ilist
);
13222 else if (omp_privatize_by_reference (var
))
13223 gimplify_assign (x
, var
, &ilist
);
13224 else if (is_gimple_reg (var
))
13226 tree avar
= create_tmp_var (TREE_TYPE (var
));
13227 mark_addressable (avar
);
13228 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13229 suppress_warning (var
);
13230 gimplify_assign (avar
, var
, &ilist
);
13231 avar
= build_fold_addr_expr (avar
);
13232 gimplify_assign (x
, avar
, &ilist
);
13236 var
= build_fold_addr_expr (var
);
13237 gimplify_assign (x
, var
, &ilist
);
13239 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13241 else if (omp_privatize_by_reference (ovar
))
13242 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13244 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13245 s
= fold_convert (size_type_node
, s
);
13246 purpose
= size_int (map_idx
++);
13247 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13248 if (TREE_CODE (s
) != INTEGER_CST
)
13249 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13251 gcc_checking_assert (tkind
13252 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13253 talign
= ceil_log2 (talign
);
13254 tkind
|= talign
<< talign_shift
;
13255 gcc_checking_assert (tkind
13256 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13257 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13258 build_int_cstu (tkind_type
, tkind
));
13261 case OMP_CLAUSE_USE_DEVICE_PTR
:
13262 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13263 case OMP_CLAUSE_IS_DEVICE_PTR
:
13264 ovar
= OMP_CLAUSE_DECL (c
);
13265 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13267 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13269 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13270 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13271 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13273 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13275 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13276 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13280 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13281 x
= build_sender_ref (ovar
, ctx
);
13284 if (is_gimple_omp_oacc (ctx
->stmt
))
13286 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13288 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13289 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13292 type
= TREE_TYPE (ovar
);
13293 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13294 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
13295 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13296 && !omp_privatize_by_reference (ovar
)
13297 && !omp_is_allocatable_or_ptr (ovar
))
13298 || TREE_CODE (type
) == ARRAY_TYPE
)
13299 var
= build_fold_addr_expr (var
);
13302 if (omp_privatize_by_reference (ovar
)
13303 || omp_check_optional_argument (ovar
, false)
13304 || omp_is_allocatable_or_ptr (ovar
))
13306 type
= TREE_TYPE (type
);
13307 if (POINTER_TYPE_P (type
)
13308 && TREE_CODE (type
) != ARRAY_TYPE
13309 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13310 && !omp_is_allocatable_or_ptr (ovar
))
13311 || (omp_privatize_by_reference (ovar
)
13312 && omp_is_allocatable_or_ptr (ovar
))))
13313 var
= build_simple_mem_ref (var
);
13314 var
= fold_convert (TREE_TYPE (x
), var
);
13318 present
= omp_check_optional_argument (ovar
, true);
13321 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13322 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13323 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13324 tree new_x
= unshare_expr (x
);
13325 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13327 gcond
*cond
= gimple_build_cond_from_tree (present
,
13330 gimple_seq_add_stmt (&ilist
, cond
);
13331 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13332 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13333 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13334 gimple_seq_add_stmt (&ilist
,
13335 gimple_build_label (notnull_label
));
13336 gimplify_assign (x
, var
, &ilist
);
13337 gimple_seq_add_stmt (&ilist
,
13338 gimple_build_label (opt_arg_label
));
13341 gimplify_assign (x
, var
, &ilist
);
13343 purpose
= size_int (map_idx
++);
13344 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13345 gcc_checking_assert (tkind
13346 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13347 gcc_checking_assert (tkind
13348 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13349 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13350 build_int_cstu (tkind_type
, tkind
));
13354 gcc_assert (map_idx
== map_cnt
);
13356 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13357 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13358 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13359 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13360 for (int i
= 1; i
<= 2; i
++)
13361 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13363 gimple_seq initlist
= NULL
;
13364 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13365 TREE_VEC_ELT (t
, i
)),
13366 &initlist
, true, NULL_TREE
);
13367 gimple_seq_add_seq (&ilist
, initlist
);
13369 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13370 gimple_seq_add_stmt (&olist
,
13371 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13374 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13376 tree id
= get_identifier ("omp declare target");
13377 tree decl
= TREE_VEC_ELT (t
, i
);
13378 DECL_ATTRIBUTES (decl
)
13379 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13380 varpool_node
*node
= varpool_node::get (decl
);
13383 node
->offloadable
= 1;
13384 if (ENABLE_OFFLOADING
)
13386 g
->have_offload
= true;
13387 vec_safe_push (offload_vars
, t
);
13392 tree clobber
= build_clobber (ctx
->record_type
);
13393 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13397 /* Once all the expansions are done, sequence all the different
13398 fragments inside gimple_omp_body. */
13403 && ctx
->record_type
)
13405 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13406 /* fixup_child_record_type might have changed receiver_decl's type. */
13407 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13408 gimple_seq_add_stmt (&new_body
,
13409 gimple_build_assign (ctx
->receiver_decl
, t
));
13411 gimple_seq_add_seq (&new_body
, fplist
);
13413 if (offloaded
|| data_region
)
13415 tree prev
= NULL_TREE
;
13416 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13417 switch (OMP_CLAUSE_CODE (c
))
13422 case OMP_CLAUSE_FIRSTPRIVATE
:
13423 if (is_gimple_omp_oacc (ctx
->stmt
))
13425 var
= OMP_CLAUSE_DECL (c
);
13426 if (omp_privatize_by_reference (var
)
13427 || is_gimple_reg_type (TREE_TYPE (var
)))
13429 tree new_var
= lookup_decl (var
, ctx
);
13431 type
= TREE_TYPE (var
);
13432 if (omp_privatize_by_reference (var
))
13433 type
= TREE_TYPE (type
);
13434 if ((INTEGRAL_TYPE_P (type
)
13435 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13436 || TREE_CODE (type
) == POINTER_TYPE
)
13438 x
= build_receiver_ref (var
, false, ctx
);
13439 if (TREE_CODE (type
) != POINTER_TYPE
)
13440 x
= fold_convert (pointer_sized_int_node
, x
);
13441 x
= fold_convert (type
, x
);
13442 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13444 if (omp_privatize_by_reference (var
))
13446 tree v
= create_tmp_var_raw (type
, get_name (var
));
13447 gimple_add_tmp_var (v
);
13448 TREE_ADDRESSABLE (v
) = 1;
13449 gimple_seq_add_stmt (&new_body
,
13450 gimple_build_assign (v
, x
));
13451 x
= build_fold_addr_expr (v
);
13453 gimple_seq_add_stmt (&new_body
,
13454 gimple_build_assign (new_var
, x
));
13458 bool by_ref
= !omp_privatize_by_reference (var
);
13459 x
= build_receiver_ref (var
, by_ref
, ctx
);
13460 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13462 gimple_seq_add_stmt (&new_body
,
13463 gimple_build_assign (new_var
, x
));
13466 else if (is_variable_sized (var
))
13468 tree pvar
= DECL_VALUE_EXPR (var
);
13469 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13470 pvar
= TREE_OPERAND (pvar
, 0);
13471 gcc_assert (DECL_P (pvar
));
13472 tree new_var
= lookup_decl (pvar
, ctx
);
13473 x
= build_receiver_ref (var
, false, ctx
);
13474 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13475 gimple_seq_add_stmt (&new_body
,
13476 gimple_build_assign (new_var
, x
));
13479 case OMP_CLAUSE_PRIVATE
:
13480 if (is_gimple_omp_oacc (ctx
->stmt
))
13482 var
= OMP_CLAUSE_DECL (c
);
13483 if (omp_privatize_by_reference (var
))
13485 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13486 tree new_var
= lookup_decl (var
, ctx
);
13487 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13488 if (TREE_CONSTANT (x
))
13490 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13492 gimple_add_tmp_var (x
);
13493 TREE_ADDRESSABLE (x
) = 1;
13494 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13499 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13500 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13501 gimple_seq_add_stmt (&new_body
,
13502 gimple_build_assign (new_var
, x
));
13505 case OMP_CLAUSE_USE_DEVICE_PTR
:
13506 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13507 case OMP_CLAUSE_IS_DEVICE_PTR
:
13509 gimple_seq assign_body
;
13510 bool is_array_data
;
13511 bool do_optional_check
;
13512 assign_body
= NULL
;
13513 do_optional_check
= false;
13514 var
= OMP_CLAUSE_DECL (c
);
13515 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13517 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13518 x
= build_sender_ref (is_array_data
13519 ? (splay_tree_key
) &DECL_NAME (var
)
13520 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13522 x
= build_receiver_ref (var
, false, ctx
);
13526 bool is_ref
= omp_privatize_by_reference (var
);
13527 do_optional_check
= true;
13528 /* First, we copy the descriptor data from the host; then
13529 we update its data to point to the target address. */
13530 new_var
= lookup_decl (var
, ctx
);
13531 new_var
= DECL_VALUE_EXPR (new_var
);
13536 var
= build_fold_indirect_ref (var
);
13537 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
13539 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
13540 gimple_add_tmp_var (v
);
13541 TREE_ADDRESSABLE (v
) = 1;
13542 gimple_seq_add_stmt (&assign_body
,
13543 gimple_build_assign (v
, var
));
13544 tree rhs
= build_fold_addr_expr (v
);
13545 gimple_seq_add_stmt (&assign_body
,
13546 gimple_build_assign (new_var
, rhs
));
13549 gimple_seq_add_stmt (&assign_body
,
13550 gimple_build_assign (new_var
, var
));
13552 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13554 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13555 gimple_seq_add_stmt (&assign_body
,
13556 gimple_build_assign (v2
, x
));
13558 else if (is_variable_sized (var
))
13560 tree pvar
= DECL_VALUE_EXPR (var
);
13561 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13562 pvar
= TREE_OPERAND (pvar
, 0);
13563 gcc_assert (DECL_P (pvar
));
13564 new_var
= lookup_decl (pvar
, ctx
);
13565 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13566 gimple_seq_add_stmt (&assign_body
,
13567 gimple_build_assign (new_var
, x
));
13569 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13570 && !omp_privatize_by_reference (var
)
13571 && !omp_is_allocatable_or_ptr (var
))
13572 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13574 new_var
= lookup_decl (var
, ctx
);
13575 new_var
= DECL_VALUE_EXPR (new_var
);
13576 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13577 new_var
= TREE_OPERAND (new_var
, 0);
13578 gcc_assert (DECL_P (new_var
));
13579 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13580 gimple_seq_add_stmt (&assign_body
,
13581 gimple_build_assign (new_var
, x
));
13585 tree type
= TREE_TYPE (var
);
13586 new_var
= lookup_decl (var
, ctx
);
13587 if (omp_privatize_by_reference (var
))
13589 type
= TREE_TYPE (type
);
13590 if (POINTER_TYPE_P (type
)
13591 && TREE_CODE (type
) != ARRAY_TYPE
13592 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13593 || (omp_privatize_by_reference (var
)
13594 && omp_is_allocatable_or_ptr (var
))))
13596 tree v
= create_tmp_var_raw (type
, get_name (var
));
13597 gimple_add_tmp_var (v
);
13598 TREE_ADDRESSABLE (v
) = 1;
13599 x
= fold_convert (type
, x
);
13600 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13602 gimple_seq_add_stmt (&assign_body
,
13603 gimple_build_assign (v
, x
));
13604 x
= build_fold_addr_expr (v
);
13605 do_optional_check
= true;
13608 new_var
= DECL_VALUE_EXPR (new_var
);
13609 x
= fold_convert (TREE_TYPE (new_var
), x
);
13610 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13611 gimple_seq_add_stmt (&assign_body
,
13612 gimple_build_assign (new_var
, x
));
13615 present
= (do_optional_check
13616 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13620 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13621 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13622 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13623 glabel
*null_glabel
= gimple_build_label (null_label
);
13624 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13625 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13626 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13628 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13630 gcond
*cond
= gimple_build_cond_from_tree (present
,
13633 gimple_seq_add_stmt (&new_body
, cond
);
13634 gimple_seq_add_stmt (&new_body
, null_glabel
);
13635 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13636 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13637 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13638 gimple_seq_add_seq (&new_body
, assign_body
);
13639 gimple_seq_add_stmt (&new_body
,
13640 gimple_build_label (opt_arg_label
));
13643 gimple_seq_add_seq (&new_body
, assign_body
);
13646 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13647 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13648 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13649 or references to VLAs. */
13650 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13651 switch (OMP_CLAUSE_CODE (c
))
13656 case OMP_CLAUSE_MAP
:
13657 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13658 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13660 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13661 poly_int64 offset
= 0;
13663 var
= OMP_CLAUSE_DECL (c
);
13665 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13666 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13668 && varpool_node::get_create (var
)->offloadable
)
13670 if (TREE_CODE (var
) == INDIRECT_REF
13671 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13672 var
= TREE_OPERAND (var
, 0);
13673 if (TREE_CODE (var
) == COMPONENT_REF
)
13675 var
= get_addr_base_and_unit_offset (var
, &offset
);
13676 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13678 else if (DECL_SIZE (var
)
13679 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13681 tree var2
= DECL_VALUE_EXPR (var
);
13682 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13683 var2
= TREE_OPERAND (var2
, 0);
13684 gcc_assert (DECL_P (var2
));
13687 tree new_var
= lookup_decl (var
, ctx
), x
;
13688 tree type
= TREE_TYPE (new_var
);
13690 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13691 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13694 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13696 new_var
= build2 (MEM_REF
, type
,
13697 build_fold_addr_expr (new_var
),
13698 build_int_cst (build_pointer_type (type
),
13701 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
13703 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
13704 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
13705 new_var
= build2 (MEM_REF
, type
,
13706 build_fold_addr_expr (new_var
),
13707 build_int_cst (build_pointer_type (type
),
13711 is_ref
= omp_privatize_by_reference (var
);
13712 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13714 bool ref_to_array
= false;
13717 type
= TREE_TYPE (type
);
13718 if (TREE_CODE (type
) == ARRAY_TYPE
)
13720 type
= build_pointer_type (type
);
13721 ref_to_array
= true;
13724 else if (TREE_CODE (type
) == ARRAY_TYPE
)
13726 tree decl2
= DECL_VALUE_EXPR (new_var
);
13727 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
13728 decl2
= TREE_OPERAND (decl2
, 0);
13729 gcc_assert (DECL_P (decl2
));
13731 type
= TREE_TYPE (new_var
);
13733 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
13734 x
= fold_convert_loc (clause_loc
, type
, x
);
13735 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
13737 tree bias
= OMP_CLAUSE_SIZE (c
);
13739 bias
= lookup_decl (bias
, ctx
);
13740 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
13741 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
13743 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
13744 TREE_TYPE (x
), x
, bias
);
13747 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13748 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13749 if (is_ref
&& !ref_to_array
)
13751 tree t
= create_tmp_var_raw (type
, get_name (var
));
13752 gimple_add_tmp_var (t
);
13753 TREE_ADDRESSABLE (t
) = 1;
13754 gimple_seq_add_stmt (&new_body
,
13755 gimple_build_assign (t
, x
));
13756 x
= build_fold_addr_expr_loc (clause_loc
, t
);
13758 gimple_seq_add_stmt (&new_body
,
13759 gimple_build_assign (new_var
, x
));
13762 else if (OMP_CLAUSE_CHAIN (c
)
13763 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
13765 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13766 == GOMP_MAP_FIRSTPRIVATE_POINTER
13767 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13768 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13771 case OMP_CLAUSE_PRIVATE
:
13772 var
= OMP_CLAUSE_DECL (c
);
13773 if (is_variable_sized (var
))
13775 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13776 tree new_var
= lookup_decl (var
, ctx
);
13777 tree pvar
= DECL_VALUE_EXPR (var
);
13778 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13779 pvar
= TREE_OPERAND (pvar
, 0);
13780 gcc_assert (DECL_P (pvar
));
13781 tree new_pvar
= lookup_decl (pvar
, ctx
);
13782 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13783 tree al
= size_int (DECL_ALIGN (var
));
13784 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
13785 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13786 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
13787 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13788 gimple_seq_add_stmt (&new_body
,
13789 gimple_build_assign (new_pvar
, x
));
13791 else if (omp_privatize_by_reference (var
)
13792 && !is_gimple_omp_oacc (ctx
->stmt
))
13794 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13795 tree new_var
= lookup_decl (var
, ctx
);
13796 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13797 if (TREE_CONSTANT (x
))
13802 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13803 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
13804 tree al
= size_int (TYPE_ALIGN (rtype
));
13805 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13808 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13809 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13810 gimple_seq_add_stmt (&new_body
,
13811 gimple_build_assign (new_var
, x
));
13816 gimple_seq fork_seq
= NULL
;
13817 gimple_seq join_seq
= NULL
;
13819 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
13821 /* If there are reductions on the offloaded region itself, treat
13822 them as a dummy GANG loop. */
13823 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
13825 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
13827 if (private_marker
)
13828 gimple_call_set_arg (private_marker
, 2, level
);
13830 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
13831 false, NULL
, private_marker
, NULL
, &fork_seq
,
13835 gimple_seq_add_seq (&new_body
, fork_seq
);
13836 gimple_seq_add_seq (&new_body
, tgt_body
);
13837 gimple_seq_add_seq (&new_body
, join_seq
);
13841 new_body
= maybe_catch_exception (new_body
);
13842 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
13844 gimple_omp_set_body (stmt
, new_body
);
13847 bind
= gimple_build_bind (NULL
, NULL
,
13848 tgt_bind
? gimple_bind_block (tgt_bind
)
13850 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
13851 gimple_bind_add_seq (bind
, ilist
);
13852 gimple_bind_add_stmt (bind
, stmt
);
13853 gimple_bind_add_seq (bind
, olist
);
13855 pop_gimplify_context (NULL
);
13859 gimple_bind_add_seq (dep_bind
, dep_ilist
);
13860 gimple_bind_add_stmt (dep_bind
, bind
);
13861 gimple_bind_add_seq (dep_bind
, dep_olist
);
13862 pop_gimplify_context (dep_bind
);
13866 /* Expand code for an OpenMP teams directive. */
13869 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
13871 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
13872 push_gimplify_context ();
13874 tree block
= make_node (BLOCK
);
13875 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
13876 gsi_replace (gsi_p
, bind
, true);
13877 gimple_seq bind_body
= NULL
;
13878 gimple_seq dlist
= NULL
;
13879 gimple_seq olist
= NULL
;
13881 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13882 OMP_CLAUSE_NUM_TEAMS
);
13883 if (num_teams
== NULL_TREE
)
13884 num_teams
= build_int_cst (unsigned_type_node
, 0);
13887 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
13888 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
13889 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
13891 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13892 OMP_CLAUSE_THREAD_LIMIT
);
13893 if (thread_limit
== NULL_TREE
)
13894 thread_limit
= build_int_cst (unsigned_type_node
, 0);
13897 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
13898 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
13899 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
13903 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
13904 &bind_body
, &dlist
, ctx
, NULL
);
13905 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
13906 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
13908 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
13910 location_t loc
= gimple_location (teams_stmt
);
13911 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
13912 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
13913 gimple_set_location (call
, loc
);
13914 gimple_seq_add_stmt (&bind_body
, call
);
13916 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
13917 gimple_omp_set_body (teams_stmt
, NULL
);
13918 gimple_seq_add_seq (&bind_body
, olist
);
13919 gimple_seq_add_seq (&bind_body
, dlist
);
13920 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
13921 gimple_bind_set_body (bind
, bind_body
);
13923 pop_gimplify_context (bind
);
13925 gimple_bind_append_vars (bind
, ctx
->block_vars
);
13926 BLOCK_VARS (block
) = ctx
->block_vars
;
13927 if (BLOCK_VARS (block
))
13928 TREE_USED (block
) = 1;
13931 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13932 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13933 of OMP context, but with task_shared_vars set. */
13936 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
13941 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13942 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
13944 && DECL_HAS_VALUE_EXPR_P (t
))
13947 if (task_shared_vars
13949 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
13952 /* If a global variable has been privatized, TREE_CONSTANT on
13953 ADDR_EXPR might be wrong. */
13954 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
13955 recompute_tree_invariant_for_addr_expr (t
);
13957 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
13961 /* Data to be communicated between lower_omp_regimplify_operands and
13962 lower_omp_regimplify_operands_p. */
13964 struct lower_omp_regimplify_operands_data
13970 /* Helper function for lower_omp_regimplify_operands. Find
13971 omp_member_access_dummy_var vars and adjust temporarily their
13972 DECL_VALUE_EXPRs if needed. */
13975 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
13978 tree t
= omp_member_access_dummy_var (*tp
);
13981 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
13982 lower_omp_regimplify_operands_data
*ldata
13983 = (lower_omp_regimplify_operands_data
*) wi
->info
;
13984 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
13987 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
13988 ldata
->decls
->safe_push (*tp
);
13989 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
13990 SET_DECL_VALUE_EXPR (*tp
, v
);
13993 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
13997 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13998 of omp_member_access_dummy_var vars during regimplification. */
14001 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14002 gimple_stmt_iterator
*gsi_p
)
14004 auto_vec
<tree
, 10> decls
;
14007 struct walk_stmt_info wi
;
14008 memset (&wi
, '\0', sizeof (wi
));
14009 struct lower_omp_regimplify_operands_data data
;
14011 data
.decls
= &decls
;
14013 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14015 gimple_regimplify_operands (stmt
, gsi_p
);
14016 while (!decls
.is_empty ())
14018 tree t
= decls
.pop ();
14019 tree v
= decls
.pop ();
14020 SET_DECL_VALUE_EXPR (t
, v
);
14025 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14027 gimple
*stmt
= gsi_stmt (*gsi_p
);
14028 struct walk_stmt_info wi
;
14031 if (gimple_has_location (stmt
))
14032 input_location
= gimple_location (stmt
);
14034 if (task_shared_vars
)
14035 memset (&wi
, '\0', sizeof (wi
));
14037 /* If we have issued syntax errors, avoid doing any heavy lifting.
14038 Just replace the OMP directives with a NOP to avoid
14039 confusing RTL expansion. */
14040 if (seen_error () && is_gimple_omp (stmt
))
14042 gsi_replace (gsi_p
, gimple_build_nop (), true);
14046 switch (gimple_code (stmt
))
14050 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14051 if ((ctx
|| task_shared_vars
)
14052 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14053 lower_omp_regimplify_p
,
14054 ctx
? NULL
: &wi
, NULL
)
14055 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14056 lower_omp_regimplify_p
,
14057 ctx
? NULL
: &wi
, NULL
)))
14058 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14062 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14064 case GIMPLE_EH_FILTER
:
14065 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14068 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14069 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14071 case GIMPLE_TRANSACTION
:
14072 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14076 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14078 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14079 oacc_privatization_scan_decl_chain (ctx
, vars
);
14081 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14082 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14084 case GIMPLE_OMP_PARALLEL
:
14085 case GIMPLE_OMP_TASK
:
14086 ctx
= maybe_lookup_ctx (stmt
);
14088 if (ctx
->cancellable
)
14089 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14090 lower_omp_taskreg (gsi_p
, ctx
);
14092 case GIMPLE_OMP_FOR
:
14093 ctx
= maybe_lookup_ctx (stmt
);
14095 if (ctx
->cancellable
)
14096 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14097 lower_omp_for (gsi_p
, ctx
);
14099 case GIMPLE_OMP_SECTIONS
:
14100 ctx
= maybe_lookup_ctx (stmt
);
14102 if (ctx
->cancellable
)
14103 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14104 lower_omp_sections (gsi_p
, ctx
);
14106 case GIMPLE_OMP_SCOPE
:
14107 ctx
= maybe_lookup_ctx (stmt
);
14109 lower_omp_scope (gsi_p
, ctx
);
14111 case GIMPLE_OMP_SINGLE
:
14112 ctx
= maybe_lookup_ctx (stmt
);
14114 lower_omp_single (gsi_p
, ctx
);
14116 case GIMPLE_OMP_MASTER
:
14117 case GIMPLE_OMP_MASKED
:
14118 ctx
= maybe_lookup_ctx (stmt
);
14120 lower_omp_master (gsi_p
, ctx
);
14122 case GIMPLE_OMP_TASKGROUP
:
14123 ctx
= maybe_lookup_ctx (stmt
);
14125 lower_omp_taskgroup (gsi_p
, ctx
);
14127 case GIMPLE_OMP_ORDERED
:
14128 ctx
= maybe_lookup_ctx (stmt
);
14130 lower_omp_ordered (gsi_p
, ctx
);
14132 case GIMPLE_OMP_SCAN
:
14133 ctx
= maybe_lookup_ctx (stmt
);
14135 lower_omp_scan (gsi_p
, ctx
);
14137 case GIMPLE_OMP_CRITICAL
:
14138 ctx
= maybe_lookup_ctx (stmt
);
14140 lower_omp_critical (gsi_p
, ctx
);
14142 case GIMPLE_OMP_ATOMIC_LOAD
:
14143 if ((ctx
|| task_shared_vars
)
14144 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14145 as_a
<gomp_atomic_load
*> (stmt
)),
14146 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14147 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14149 case GIMPLE_OMP_TARGET
:
14150 ctx
= maybe_lookup_ctx (stmt
);
14152 lower_omp_target (gsi_p
, ctx
);
14154 case GIMPLE_OMP_TEAMS
:
14155 ctx
= maybe_lookup_ctx (stmt
);
14157 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14158 lower_omp_taskreg (gsi_p
, ctx
);
14160 lower_omp_teams (gsi_p
, ctx
);
14164 call_stmt
= as_a
<gcall
*> (stmt
);
14165 fndecl
= gimple_call_fndecl (call_stmt
);
14167 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14168 switch (DECL_FUNCTION_CODE (fndecl
))
14170 case BUILT_IN_GOMP_BARRIER
:
14174 case BUILT_IN_GOMP_CANCEL
:
14175 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14178 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14179 cctx
= cctx
->outer
;
14180 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14181 if (!cctx
->cancellable
)
14183 if (DECL_FUNCTION_CODE (fndecl
)
14184 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14186 stmt
= gimple_build_nop ();
14187 gsi_replace (gsi_p
, stmt
, false);
14191 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14193 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14194 gimple_call_set_fndecl (call_stmt
, fndecl
);
14195 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14198 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14199 gimple_call_set_lhs (call_stmt
, lhs
);
14200 tree fallthru_label
;
14201 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14203 g
= gimple_build_label (fallthru_label
);
14204 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14205 g
= gimple_build_cond (NE_EXPR
, lhs
,
14206 fold_convert (TREE_TYPE (lhs
),
14207 boolean_false_node
),
14208 cctx
->cancel_label
, fallthru_label
);
14209 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14216 case GIMPLE_ASSIGN
:
14217 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14219 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14220 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14221 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14222 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14223 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14224 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14225 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14226 && (gimple_omp_target_kind (up
->stmt
)
14227 == GF_OMP_TARGET_KIND_DATA
)))
14229 else if (!up
->lastprivate_conditional_map
)
14231 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14232 if (TREE_CODE (lhs
) == MEM_REF
14233 && DECL_P (TREE_OPERAND (lhs
, 0))
14234 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14235 0))) == REFERENCE_TYPE
)
14236 lhs
= TREE_OPERAND (lhs
, 0);
14238 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14241 if (up
->combined_into_simd_safelen1
)
14244 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14247 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14248 clauses
= gimple_omp_for_clauses (up
->stmt
);
14250 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14251 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14252 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14253 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14254 OMP_CLAUSE__CONDTEMP_
);
14255 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14256 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14257 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14264 if ((ctx
|| task_shared_vars
)
14265 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14268 /* Just remove clobbers, this should happen only if we have
14269 "privatized" local addressable variables in SIMD regions,
14270 the clobber isn't needed in that case and gimplifying address
14271 of the ARRAY_REF into a pointer and creating MEM_REF based
14272 clobber would create worse code than we get with the clobber
14274 if (gimple_clobber_p (stmt
))
14276 gsi_replace (gsi_p
, gimple_build_nop (), true);
14279 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14286 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14288 location_t saved_location
= input_location
;
14289 gimple_stmt_iterator gsi
;
14290 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14291 lower_omp_1 (&gsi
, ctx
);
14292 /* During gimplification, we haven't folded statments inside offloading
14293 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14294 if (target_nesting_level
|| taskreg_nesting_level
)
14295 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14297 input_location
= saved_location
;
14300 /* Main entry point. */
14302 static unsigned int
14303 execute_lower_omp (void)
14309 /* This pass always runs, to provide PROP_gimple_lomp.
14310 But often, there is nothing to do. */
14311 if (flag_openacc
== 0 && flag_openmp
== 0
14312 && flag_openmp_simd
== 0)
14315 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14316 delete_omp_context
);
14318 body
= gimple_body (current_function_decl
);
14320 scan_omp (&body
, NULL
);
14321 gcc_assert (taskreg_nesting_level
== 0);
14322 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14323 finish_taskreg_scan (ctx
);
14324 taskreg_contexts
.release ();
14326 if (all_contexts
->root
)
14328 if (task_shared_vars
)
14329 push_gimplify_context ();
14330 lower_omp (&body
, NULL
);
14331 if (task_shared_vars
)
14332 pop_gimplify_context (NULL
);
14337 splay_tree_delete (all_contexts
);
14338 all_contexts
= NULL
;
14340 BITMAP_FREE (task_shared_vars
);
14341 BITMAP_FREE (global_nonaddressable_vars
);
14343 /* If current function is a method, remove artificial dummy VAR_DECL created
14344 for non-static data member privatization, they aren't needed for
14345 debuginfo nor anything else, have been already replaced everywhere in the
14346 IL and cause problems with LTO. */
14347 if (DECL_ARGUMENTS (current_function_decl
)
14348 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14349 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14351 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14357 const pass_data pass_data_lower_omp
=
14359 GIMPLE_PASS
, /* type */
14360 "omplower", /* name */
14361 OPTGROUP_OMP
, /* optinfo_flags */
14362 TV_NONE
, /* tv_id */
14363 PROP_gimple_any
, /* properties_required */
14364 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14365 0, /* properties_destroyed */
14366 0, /* todo_flags_start */
14367 0, /* todo_flags_finish */
14370 class pass_lower_omp
: public gimple_opt_pass
14373 pass_lower_omp (gcc::context
*ctxt
)
14374 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14377 /* opt_pass methods: */
14378 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
14380 }; // class pass_lower_omp
14382 } // anon namespace
14385 make_pass_lower_omp (gcc::context
*ctxt
)
14387 return new pass_lower_omp (ctxt
);
14390 /* The following is a utility to diagnose structured block violations.
14391 It is not part of the "omplower" pass, as that's invoked too late. It
14392 should be invoked by the respective front ends after gimplification. */
14394 static splay_tree all_labels
;
14396 /* Check for mismatched contexts and generate an error if needed. Return
14397 true if an error is detected. */
14400 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14401 gimple
*branch_ctx
, gimple
*label_ctx
)
14403 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14404 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14406 if (label_ctx
== branch_ctx
)
14409 const char* kind
= NULL
;
14413 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14414 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14416 gcc_checking_assert (kind
== NULL
);
14422 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14426 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14427 so we could traverse it and issue a correct "exit" or "enter" error
14428 message upon a structured block violation.
14430 We built the context by building a list with tree_cons'ing, but there is
14431 no easy counterpart in gimple tuples. It seems like far too much work
14432 for issuing exit/enter error messages. If someone really misses the
14433 distinct error message... patches welcome. */
14436 /* Try to avoid confusing the user by producing and error message
14437 with correct "exit" or "enter" verbiage. We prefer "exit"
14438 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14439 if (branch_ctx
== NULL
)
14445 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14450 label_ctx
= TREE_CHAIN (label_ctx
);
14455 error ("invalid exit from %s structured block", kind
);
14457 error ("invalid entry to %s structured block", kind
);
14460 /* If it's obvious we have an invalid entry, be specific about the error. */
14461 if (branch_ctx
== NULL
)
14462 error ("invalid entry to %s structured block", kind
);
14465 /* Otherwise, be vague and lazy, but efficient. */
14466 error ("invalid branch to/from %s structured block", kind
);
14469 gsi_replace (gsi_p
, gimple_build_nop (), false);
14473 /* Pass 1: Create a minimal tree of structured blocks, and record
14474 where each label is found. */
14477 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14478 struct walk_stmt_info
*wi
)
14480 gimple
*context
= (gimple
*) wi
->info
;
14481 gimple
*inner_context
;
14482 gimple
*stmt
= gsi_stmt (*gsi_p
);
14484 *handled_ops_p
= true;
14486 switch (gimple_code (stmt
))
14490 case GIMPLE_OMP_PARALLEL
:
14491 case GIMPLE_OMP_TASK
:
14492 case GIMPLE_OMP_SCOPE
:
14493 case GIMPLE_OMP_SECTIONS
:
14494 case GIMPLE_OMP_SINGLE
:
14495 case GIMPLE_OMP_SECTION
:
14496 case GIMPLE_OMP_MASTER
:
14497 case GIMPLE_OMP_MASKED
:
14498 case GIMPLE_OMP_ORDERED
:
14499 case GIMPLE_OMP_SCAN
:
14500 case GIMPLE_OMP_CRITICAL
:
14501 case GIMPLE_OMP_TARGET
:
14502 case GIMPLE_OMP_TEAMS
:
14503 case GIMPLE_OMP_TASKGROUP
:
14504 /* The minimal context here is just the current OMP construct. */
14505 inner_context
= stmt
;
14506 wi
->info
= inner_context
;
14507 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14508 wi
->info
= context
;
14511 case GIMPLE_OMP_FOR
:
14512 inner_context
= stmt
;
14513 wi
->info
= inner_context
;
14514 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14516 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14517 diagnose_sb_1
, NULL
, wi
);
14518 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14519 wi
->info
= context
;
14523 splay_tree_insert (all_labels
,
14524 (splay_tree_key
) gimple_label_label (
14525 as_a
<glabel
*> (stmt
)),
14526 (splay_tree_value
) context
);
14536 /* Pass 2: Check each branch and see if its context differs from that of
14537 the destination label's context. */
14540 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14541 struct walk_stmt_info
*wi
)
14543 gimple
*context
= (gimple
*) wi
->info
;
14545 gimple
*stmt
= gsi_stmt (*gsi_p
);
14547 *handled_ops_p
= true;
14549 switch (gimple_code (stmt
))
14553 case GIMPLE_OMP_PARALLEL
:
14554 case GIMPLE_OMP_TASK
:
14555 case GIMPLE_OMP_SCOPE
:
14556 case GIMPLE_OMP_SECTIONS
:
14557 case GIMPLE_OMP_SINGLE
:
14558 case GIMPLE_OMP_SECTION
:
14559 case GIMPLE_OMP_MASTER
:
14560 case GIMPLE_OMP_MASKED
:
14561 case GIMPLE_OMP_ORDERED
:
14562 case GIMPLE_OMP_SCAN
:
14563 case GIMPLE_OMP_CRITICAL
:
14564 case GIMPLE_OMP_TARGET
:
14565 case GIMPLE_OMP_TEAMS
:
14566 case GIMPLE_OMP_TASKGROUP
:
14568 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14569 wi
->info
= context
;
14572 case GIMPLE_OMP_FOR
:
14574 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14576 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14577 diagnose_sb_2
, NULL
, wi
);
14578 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14579 wi
->info
= context
;
14584 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14585 tree lab
= gimple_cond_true_label (cond_stmt
);
14588 n
= splay_tree_lookup (all_labels
,
14589 (splay_tree_key
) lab
);
14590 diagnose_sb_0 (gsi_p
, context
,
14591 n
? (gimple
*) n
->value
: NULL
);
14593 lab
= gimple_cond_false_label (cond_stmt
);
14596 n
= splay_tree_lookup (all_labels
,
14597 (splay_tree_key
) lab
);
14598 diagnose_sb_0 (gsi_p
, context
,
14599 n
? (gimple
*) n
->value
: NULL
);
14606 tree lab
= gimple_goto_dest (stmt
);
14607 if (TREE_CODE (lab
) != LABEL_DECL
)
14610 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14611 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14615 case GIMPLE_SWITCH
:
14617 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14619 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14621 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14622 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14623 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14629 case GIMPLE_RETURN
:
14630 diagnose_sb_0 (gsi_p
, context
, NULL
);
14640 static unsigned int
14641 diagnose_omp_structured_block_errors (void)
14643 struct walk_stmt_info wi
;
14644 gimple_seq body
= gimple_body (current_function_decl
);
14646 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14648 memset (&wi
, 0, sizeof (wi
));
14649 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14651 memset (&wi
, 0, sizeof (wi
));
14652 wi
.want_locations
= true;
14653 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14655 gimple_set_body (current_function_decl
, body
);
14657 splay_tree_delete (all_labels
);
14665 const pass_data pass_data_diagnose_omp_blocks
=
14667 GIMPLE_PASS
, /* type */
14668 "*diagnose_omp_blocks", /* name */
14669 OPTGROUP_OMP
, /* optinfo_flags */
14670 TV_NONE
, /* tv_id */
14671 PROP_gimple_any
, /* properties_required */
14672 0, /* properties_provided */
14673 0, /* properties_destroyed */
14674 0, /* todo_flags_start */
14675 0, /* todo_flags_finish */
14678 class pass_diagnose_omp_blocks
: public gimple_opt_pass
14681 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14682 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
14685 /* opt_pass methods: */
14686 virtual bool gate (function
*)
14688 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
14690 virtual unsigned int execute (function
*)
14692 return diagnose_omp_structured_block_errors ();
14695 }; // class pass_diagnose_omp_blocks
14697 } // anon namespace
14700 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14702 return new pass_diagnose_omp_blocks (ctxt
);
14706 #include "gt-omp-low.h"