1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap task_shared_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
195 static void scan_omp (gimple_seq
*, omp_context
*);
196 static tree
scan_omp_1_op (tree
*, int *, void *);
198 #define WALK_SUBSTMTS \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212 is_oacc_parallel_or_serial (omp_context
*ctx
)
214 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
215 return ((outer_type
== GIMPLE_OMP_TARGET
)
216 && ((gimple_omp_target_kind (ctx
->stmt
)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
218 || (gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
226 is_oacc_kernels (omp_context
*ctx
)
228 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
229 return ((outer_type
== GIMPLE_OMP_TARGET
)
230 && (gimple_omp_target_kind (ctx
->stmt
)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
237 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
239 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
240 return ((outer_type
== GIMPLE_OMP_TARGET
)
241 && ((gimple_omp_target_kind (ctx
->stmt
)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
243 || (gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
251 is_omp_target (gimple
*stmt
)
253 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
255 int kind
= gimple_omp_target_kind (stmt
);
256 return (kind
== GF_OMP_TARGET_KIND_REGION
257 || kind
== GF_OMP_TARGET_KIND_DATA
258 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
269 omp_member_access_dummy_var (tree decl
)
272 || !DECL_ARTIFICIAL (decl
)
273 || !DECL_IGNORED_P (decl
)
274 || !DECL_HAS_VALUE_EXPR_P (decl
)
275 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
278 tree v
= DECL_VALUE_EXPR (decl
);
279 if (TREE_CODE (v
) != COMPONENT_REF
)
283 switch (TREE_CODE (v
))
289 case POINTER_PLUS_EXPR
:
290 v
= TREE_OPERAND (v
, 0);
293 if (DECL_CONTEXT (v
) == current_function_decl
294 && DECL_ARTIFICIAL (v
)
295 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
303 /* Helper for unshare_and_remap, called through walk_tree. */
306 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
308 tree
*pair
= (tree
*) data
;
311 *tp
= unshare_expr (pair
[1]);
314 else if (IS_TYPE_OR_DECL_P (*tp
))
319 /* Return unshare_expr (X) with all occurrences of FROM
323 unshare_and_remap (tree x
, tree from
, tree to
)
325 tree pair
[2] = { from
, to
};
326 x
= unshare_expr (x
);
327 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
334 scan_omp_op (tree
*tp
, omp_context
*ctx
)
336 struct walk_stmt_info wi
;
338 memset (&wi
, 0, sizeof (wi
));
340 wi
.want_locations
= true;
342 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
345 static void lower_omp (gimple_seq
*, omp_context
*);
346 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
347 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 /* Return true if CTX is for an omp parallel. */
352 is_parallel_ctx (omp_context
*ctx
)
354 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
358 /* Return true if CTX is for an omp task. */
361 is_task_ctx (omp_context
*ctx
)
363 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
367 /* Return true if CTX is for an omp taskloop. */
370 is_taskloop_ctx (omp_context
*ctx
)
372 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
377 /* Return true if CTX is for a host omp teams. */
380 is_host_teams_ctx (omp_context
*ctx
)
382 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
391 is_taskreg_ctx (omp_context
*ctx
)
393 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
396 /* Return true if EXPR is variable sized. */
399 is_variable_sized (const_tree expr
)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
409 lookup_decl (tree var
, omp_context
*ctx
)
411 tree
*n
= ctx
->cb
.decl_map
->get (var
);
416 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
418 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
419 return n
? *n
: NULL_TREE
;
423 lookup_field (tree var
, omp_context
*ctx
)
426 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
427 return (tree
) n
->value
;
431 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
434 n
= splay_tree_lookup (ctx
->sfield_map
435 ? ctx
->sfield_map
: ctx
->field_map
, key
);
436 return (tree
) n
->value
;
440 lookup_sfield (tree var
, omp_context
*ctx
)
442 return lookup_sfield ((splay_tree_key
) var
, ctx
);
446 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
449 n
= splay_tree_lookup (ctx
->field_map
, key
);
450 return n
? (tree
) n
->value
: NULL_TREE
;
454 maybe_lookup_field (tree var
, omp_context
*ctx
)
456 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
463 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
466 || TYPE_ATOMIC (TREE_TYPE (decl
)))
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
489 /* Do not use copy-in/copy-out for variables that have their
491 if (is_global_var (decl
))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl
))
501 if (!global_nonaddressable_vars
)
502 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
503 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars
,
510 else if (TREE_ADDRESSABLE (decl
))
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
515 if (TREE_READONLY (decl
)
516 || ((TREE_CODE (decl
) == RESULT_DECL
517 || TREE_CODE (decl
) == PARM_DECL
)
518 && DECL_BY_REFERENCE (decl
)))
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx
->is_nested
)
530 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
531 if ((is_taskreg_ctx (up
)
532 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up
->stmt
)))
534 && maybe_lookup_decl (decl
, up
))
541 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
543 for (c
= gimple_omp_target_clauses (up
->stmt
);
544 c
; c
= OMP_CLAUSE_CHAIN (c
))
545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c
) == decl
)
550 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
551 c
; c
= OMP_CLAUSE_CHAIN (c
))
552 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c
) == decl
)
557 goto maybe_mark_addressable_and_ret
;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx
))
567 maybe_mark_addressable_and_ret
:
568 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
569 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
574 if (!task_shared_vars
)
575 task_shared_vars
= BITMAP_ALLOC (NULL
);
576 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
577 TREE_ADDRESSABLE (outer
) = 1;
586 /* Construct a new automatic decl similar to VAR. */
589 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
591 tree copy
= copy_var_decl (var
, name
, type
);
593 DECL_CONTEXT (copy
) = current_function_decl
;
597 DECL_CHAIN (copy
) = ctx
->block_vars
;
598 ctx
->block_vars
= copy
;
603 /* If VAR is listed in task_shared_vars, it means it wasn't
604 originally addressable and is just because task needs to take
605 it's address. But we don't need to take address of privatizations
607 if (TREE_ADDRESSABLE (var
)
608 && ((task_shared_vars
609 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
610 || (global_nonaddressable_vars
611 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
612 TREE_ADDRESSABLE (copy
) = 0;
618 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
620 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
623 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
625 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
628 omp_build_component_ref (tree obj
, tree field
)
630 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
631 if (TREE_THIS_VOLATILE (field
))
632 TREE_THIS_VOLATILE (ret
) |= 1;
633 if (TREE_READONLY (field
))
634 TREE_READONLY (ret
) |= 1;
638 /* Build tree nodes to access the field for VAR on the receiver side. */
641 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
643 tree x
, field
= lookup_field (var
, ctx
);
645 /* If the receiver record type was remapped in the child function,
646 remap the field into the new record type. */
647 x
= maybe_lookup_field (field
, ctx
);
651 x
= build_simple_mem_ref (ctx
->receiver_decl
);
652 TREE_THIS_NOTRAP (x
) = 1;
653 x
= omp_build_component_ref (x
, field
);
656 x
= build_simple_mem_ref (x
);
657 TREE_THIS_NOTRAP (x
) = 1;
663 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
664 of a parallel, this is a component reference; for workshare constructs
665 this is some variable. */
668 build_outer_var_ref (tree var
, omp_context
*ctx
,
669 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
672 omp_context
*outer
= ctx
->outer
;
673 for (; outer
; outer
= outer
->outer
)
675 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
677 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
678 && !maybe_lookup_decl (var
, outer
))
683 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
685 else if (is_variable_sized (var
))
687 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
688 x
= build_outer_var_ref (x
, ctx
, code
);
689 x
= build_simple_mem_ref (x
);
691 else if (is_taskreg_ctx (ctx
))
693 bool by_ref
= use_pointer_for_field (var
, NULL
);
694 x
= build_receiver_ref (var
, by_ref
, ctx
);
696 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
697 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
699 || (code
== OMP_CLAUSE_PRIVATE
700 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
701 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
702 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
704 /* #pragma omp simd isn't a worksharing construct, and can reference
705 even private vars in its linear etc. clauses.
706 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
707 to private vars in all worksharing constructs. */
709 if (outer
&& is_taskreg_ctx (outer
))
710 x
= lookup_decl (var
, outer
);
712 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
716 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
720 = splay_tree_lookup (outer
->field_map
,
721 (splay_tree_key
) &DECL_UID (var
));
724 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
727 x
= lookup_decl (var
, outer
);
731 tree field
= (tree
) n
->value
;
732 /* If the receiver record type was remapped in the child function,
733 remap the field into the new record type. */
734 x
= maybe_lookup_field (field
, outer
);
738 x
= build_simple_mem_ref (outer
->receiver_decl
);
739 x
= omp_build_component_ref (x
, field
);
740 if (use_pointer_for_field (var
, outer
))
741 x
= build_simple_mem_ref (x
);
745 x
= lookup_decl (var
, outer
);
746 else if (omp_privatize_by_reference (var
))
747 /* This can happen with orphaned constructs. If var is reference, it is
748 possible it is shared and as such valid. */
750 else if (omp_member_access_dummy_var (var
))
757 tree t
= omp_member_access_dummy_var (var
);
760 x
= DECL_VALUE_EXPR (var
);
761 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
763 x
= unshare_and_remap (x
, t
, o
);
765 x
= unshare_expr (x
);
769 if (omp_privatize_by_reference (var
))
770 x
= build_simple_mem_ref (x
);
775 /* Build tree nodes to access the field for VAR on the sender side. */
778 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
780 tree field
= lookup_sfield (key
, ctx
);
781 return omp_build_component_ref (ctx
->sender_decl
, field
);
785 build_sender_ref (tree var
, omp_context
*ctx
)
787 return build_sender_ref ((splay_tree_key
) var
, ctx
);
790 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
791 BASE_POINTERS_RESTRICT, declare the field with restrict. */
794 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
796 tree field
, type
, sfield
= NULL_TREE
;
797 splay_tree_key key
= (splay_tree_key
) var
;
799 if ((mask
& 16) != 0)
801 key
= (splay_tree_key
) &DECL_NAME (var
);
802 gcc_checking_assert (key
!= (splay_tree_key
) var
);
806 key
= (splay_tree_key
) &DECL_UID (var
);
807 gcc_checking_assert (key
!= (splay_tree_key
) var
);
809 gcc_assert ((mask
& 1) == 0
810 || !splay_tree_lookup (ctx
->field_map
, key
));
811 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
812 || !splay_tree_lookup (ctx
->sfield_map
, key
));
813 gcc_assert ((mask
& 3) == 3
814 || !is_gimple_omp_oacc (ctx
->stmt
));
816 type
= TREE_TYPE (var
);
817 if ((mask
& 16) != 0)
818 type
= lang_hooks
.decls
.omp_array_data (var
, true);
820 /* Prevent redeclaring the var in the split-off function with a restrict
821 pointer type. Note that we only clear type itself, restrict qualifiers in
822 the pointed-to type will be ignored by points-to analysis. */
823 if (POINTER_TYPE_P (type
)
824 && TYPE_RESTRICT (type
))
825 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
829 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
830 type
= build_pointer_type (build_pointer_type (type
));
833 type
= build_pointer_type (type
);
834 else if ((mask
& (32 | 3)) == 1
835 && omp_privatize_by_reference (var
))
836 type
= TREE_TYPE (type
);
838 field
= build_decl (DECL_SOURCE_LOCATION (var
),
839 FIELD_DECL
, DECL_NAME (var
), type
);
841 /* Remember what variable this field was created for. This does have a
842 side effect of making dwarf2out ignore this member, so for helpful
843 debugging we clear it later in delete_omp_context. */
844 DECL_ABSTRACT_ORIGIN (field
) = var
;
845 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
847 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
848 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
849 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
852 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
856 insert_field_into_struct (ctx
->record_type
, field
);
857 if (ctx
->srecord_type
)
859 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
860 FIELD_DECL
, DECL_NAME (var
), type
);
861 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
862 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
863 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
864 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
865 insert_field_into_struct (ctx
->srecord_type
, sfield
);
870 if (ctx
->srecord_type
== NULL_TREE
)
874 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
875 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
876 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
878 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
879 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
880 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
881 insert_field_into_struct (ctx
->srecord_type
, sfield
);
882 splay_tree_insert (ctx
->sfield_map
,
883 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
884 (splay_tree_value
) sfield
);
888 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
889 : ctx
->srecord_type
, field
);
893 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
894 if ((mask
& 2) && ctx
->sfield_map
)
895 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
899 install_var_local (tree var
, omp_context
*ctx
)
901 tree new_var
= omp_copy_decl_1 (var
, ctx
);
902 insert_decl_map (&ctx
->cb
, var
, new_var
);
906 /* Adjust the replacement for DECL in CTX for the new context. This means
907 copying the DECL_VALUE_EXPR, and fixing up the type. */
910 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
914 new_decl
= lookup_decl (decl
, ctx
);
916 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
918 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
919 && DECL_HAS_VALUE_EXPR_P (decl
))
921 tree ve
= DECL_VALUE_EXPR (decl
);
922 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
923 SET_DECL_VALUE_EXPR (new_decl
, ve
);
924 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
927 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
929 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
930 if (size
== error_mark_node
)
931 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
932 DECL_SIZE (new_decl
) = size
;
934 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
935 if (size
== error_mark_node
)
936 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
937 DECL_SIZE_UNIT (new_decl
) = size
;
941 /* The callback for remap_decl. Search all containing contexts for a
942 mapping of the variable; this avoids having to duplicate the splay
943 tree ahead of time. We know a mapping doesn't already exist in the
944 given context. Create new mappings to implement default semantics. */
947 omp_copy_decl (tree var
, copy_body_data
*cb
)
949 omp_context
*ctx
= (omp_context
*) cb
;
952 if (TREE_CODE (var
) == LABEL_DECL
)
954 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
956 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
957 DECL_CONTEXT (new_var
) = current_function_decl
;
958 insert_decl_map (&ctx
->cb
, var
, new_var
);
962 while (!is_taskreg_ctx (ctx
))
967 new_var
= maybe_lookup_decl (var
, ctx
);
972 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
975 return error_mark_node
;
978 /* Create a new context, with OUTER_CTX being the surrounding context. */
981 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
983 omp_context
*ctx
= XCNEW (omp_context
);
985 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
986 (splay_tree_value
) ctx
);
991 ctx
->outer
= outer_ctx
;
992 ctx
->cb
= outer_ctx
->cb
;
993 ctx
->cb
.block
= NULL
;
994 ctx
->depth
= outer_ctx
->depth
+ 1;
998 ctx
->cb
.src_fn
= current_function_decl
;
999 ctx
->cb
.dst_fn
= current_function_decl
;
1000 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
1001 gcc_checking_assert (ctx
->cb
.src_node
);
1002 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
1003 ctx
->cb
.src_cfun
= cfun
;
1004 ctx
->cb
.copy_decl
= omp_copy_decl
;
1005 ctx
->cb
.eh_lp_nr
= 0;
1006 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
1007 ctx
->cb
.adjust_array_error_bounds
= true;
1008 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1012 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1017 static gimple_seq
maybe_catch_exception (gimple_seq
);
1019 /* Finalize task copyfn. */
1022 finalize_task_copyfn (gomp_task
*task_stmt
)
1024 struct function
*child_cfun
;
1026 gimple_seq seq
= NULL
, new_seq
;
1029 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1030 if (child_fn
== NULL_TREE
)
1033 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1034 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1036 push_cfun (child_cfun
);
1037 bind
= gimplify_body (child_fn
, false);
1038 gimple_seq_add_stmt (&seq
, bind
);
1039 new_seq
= maybe_catch_exception (seq
);
1042 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1044 gimple_seq_add_stmt (&seq
, bind
);
1046 gimple_set_body (child_fn
, seq
);
1049 /* Inform the callgraph about the new function. */
1050 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1051 node
->parallelized_function
= 1;
1052 cgraph_node::add_new_function (child_fn
, false);
1055 /* Destroy a omp_context data structures. Called through the splay tree
1056 value delete callback. */
1059 delete_omp_context (splay_tree_value value
)
1061 omp_context
*ctx
= (omp_context
*) value
;
1063 delete ctx
->cb
.decl_map
;
1066 splay_tree_delete (ctx
->field_map
);
1067 if (ctx
->sfield_map
)
1068 splay_tree_delete (ctx
->sfield_map
);
1070 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1071 it produces corrupt debug information. */
1072 if (ctx
->record_type
)
1075 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1076 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1078 if (ctx
->srecord_type
)
1081 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1082 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1085 if (is_task_ctx (ctx
))
1086 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1088 if (ctx
->task_reduction_map
)
1090 ctx
->task_reductions
.release ();
1091 delete ctx
->task_reduction_map
;
1094 delete ctx
->lastprivate_conditional_map
;
1095 delete ctx
->allocate_map
;
1100 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1104 fixup_child_record_type (omp_context
*ctx
)
1106 tree f
, type
= ctx
->record_type
;
1108 if (!ctx
->receiver_decl
)
1110 /* ??? It isn't sufficient to just call remap_type here, because
1111 variably_modified_type_p doesn't work the way we expect for
1112 record types. Testing each field for whether it needs remapping
1113 and creating a new record by hand works, however. */
1114 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1115 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1119 tree name
, new_fields
= NULL
;
1121 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1122 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1123 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1124 TYPE_DECL
, name
, type
);
1125 TYPE_NAME (type
) = name
;
1127 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1129 tree new_f
= copy_node (f
);
1130 DECL_CONTEXT (new_f
) = type
;
1131 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1132 DECL_CHAIN (new_f
) = new_fields
;
1133 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1134 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1136 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1140 /* Arrange to be able to look up the receiver field
1141 given the sender field. */
1142 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1143 (splay_tree_value
) new_f
);
1145 TYPE_FIELDS (type
) = nreverse (new_fields
);
1149 /* In a target region we never modify any of the pointers in *.omp_data_i,
1150 so attempt to help the optimizers. */
1151 if (is_gimple_omp_offloaded (ctx
->stmt
))
1152 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1154 TREE_TYPE (ctx
->receiver_decl
)
1155 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1158 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1159 specified by CLAUSES. */
1162 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1165 bool scan_array_reductions
= false;
1167 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1168 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1169 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1170 /* omp_default_mem_alloc is 1 */
1171 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1172 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1174 if (ctx
->allocate_map
== NULL
)
1175 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1176 tree val
= integer_zero_node
;
1177 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1178 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1179 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1180 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1181 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1184 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1188 switch (OMP_CLAUSE_CODE (c
))
1190 case OMP_CLAUSE_PRIVATE
:
1191 decl
= OMP_CLAUSE_DECL (c
);
1192 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1194 else if (!is_variable_sized (decl
))
1195 install_var_local (decl
, ctx
);
1198 case OMP_CLAUSE_SHARED
:
1199 decl
= OMP_CLAUSE_DECL (c
);
1200 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1201 ctx
->allocate_map
->remove (decl
);
1202 /* Ignore shared directives in teams construct inside of
1203 target construct. */
1204 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1205 && !is_host_teams_ctx (ctx
))
1207 /* Global variables don't need to be copied,
1208 the receiver side will use them directly. */
1209 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1210 if (is_global_var (odecl
))
1212 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1215 gcc_assert (is_taskreg_ctx (ctx
));
1216 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1217 || !is_variable_sized (decl
));
1218 /* Global variables don't need to be copied,
1219 the receiver side will use them directly. */
1220 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1222 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1224 use_pointer_for_field (decl
, ctx
);
1227 by_ref
= use_pointer_for_field (decl
, NULL
);
1228 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1229 || TREE_ADDRESSABLE (decl
)
1231 || omp_privatize_by_reference (decl
))
1233 by_ref
= use_pointer_for_field (decl
, ctx
);
1234 install_var_field (decl
, by_ref
, 3, ctx
);
1235 install_var_local (decl
, ctx
);
1238 /* We don't need to copy const scalar vars back. */
1239 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1242 case OMP_CLAUSE_REDUCTION
:
1243 /* Collect 'reduction' clauses on OpenACC compute construct. */
1244 if (is_gimple_omp_oacc (ctx
->stmt
)
1245 && is_gimple_omp_offloaded (ctx
->stmt
))
1247 /* No 'reduction' clauses on OpenACC 'kernels'. */
1248 gcc_checking_assert (!is_oacc_kernels (ctx
));
1249 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1250 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1252 ctx
->local_reduction_clauses
1253 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1257 case OMP_CLAUSE_IN_REDUCTION
:
1258 decl
= OMP_CLAUSE_DECL (c
);
1259 if (ctx
->allocate_map
1260 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1261 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1262 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1263 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1264 || is_task_ctx (ctx
)))
1267 if (ctx
->allocate_map
->get (decl
))
1268 ctx
->allocate_map
->remove (decl
);
1270 if (TREE_CODE (decl
) == MEM_REF
)
1272 tree t
= TREE_OPERAND (decl
, 0);
1273 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1274 t
= TREE_OPERAND (t
, 0);
1275 if (TREE_CODE (t
) == INDIRECT_REF
1276 || TREE_CODE (t
) == ADDR_EXPR
)
1277 t
= TREE_OPERAND (t
, 0);
1278 if (is_omp_target (ctx
->stmt
))
1280 if (is_variable_sized (t
))
1282 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1283 t
= DECL_VALUE_EXPR (t
);
1284 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1285 t
= TREE_OPERAND (t
, 0);
1286 gcc_assert (DECL_P (t
));
1290 scan_omp_op (&at
, ctx
->outer
);
1291 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1292 splay_tree_insert (ctx
->field_map
,
1293 (splay_tree_key
) &DECL_CONTEXT (t
),
1294 (splay_tree_value
) nt
);
1296 splay_tree_insert (ctx
->field_map
,
1297 (splay_tree_key
) &DECL_CONTEXT (at
),
1298 (splay_tree_value
) nt
);
1301 install_var_local (t
, ctx
);
1302 if (is_taskreg_ctx (ctx
)
1303 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1304 || (is_task_ctx (ctx
)
1305 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1306 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1308 == POINTER_TYPE
)))))
1309 && !is_variable_sized (t
)
1310 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1311 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1312 && !is_task_ctx (ctx
))))
1314 by_ref
= use_pointer_for_field (t
, NULL
);
1315 if (is_task_ctx (ctx
)
1316 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1317 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1319 install_var_field (t
, false, 1, ctx
);
1320 install_var_field (t
, by_ref
, 2, ctx
);
1323 install_var_field (t
, by_ref
, 3, ctx
);
1327 if (is_omp_target (ctx
->stmt
))
1331 scan_omp_op (&at
, ctx
->outer
);
1332 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1333 splay_tree_insert (ctx
->field_map
,
1334 (splay_tree_key
) &DECL_CONTEXT (decl
),
1335 (splay_tree_value
) nt
);
1337 splay_tree_insert (ctx
->field_map
,
1338 (splay_tree_key
) &DECL_CONTEXT (at
),
1339 (splay_tree_value
) nt
);
1342 if (is_task_ctx (ctx
)
1343 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1344 && OMP_CLAUSE_REDUCTION_TASK (c
)
1345 && is_parallel_ctx (ctx
)))
1347 /* Global variables don't need to be copied,
1348 the receiver side will use them directly. */
1349 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1351 by_ref
= use_pointer_for_field (decl
, ctx
);
1352 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1353 install_var_field (decl
, by_ref
, 3, ctx
);
1355 install_var_local (decl
, ctx
);
1358 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1359 && OMP_CLAUSE_REDUCTION_TASK (c
))
1361 install_var_local (decl
, ctx
);
1366 case OMP_CLAUSE_LASTPRIVATE
:
1367 /* Let the corresponding firstprivate clause create
1369 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1373 case OMP_CLAUSE_FIRSTPRIVATE
:
1374 case OMP_CLAUSE_LINEAR
:
1375 decl
= OMP_CLAUSE_DECL (c
);
1377 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1378 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1379 && is_gimple_omp_offloaded (ctx
->stmt
))
1381 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1383 by_ref
= !omp_privatize_by_reference (decl
);
1384 install_var_field (decl
, by_ref
, 3, ctx
);
1386 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1387 install_var_field (decl
, true, 3, ctx
);
1389 install_var_field (decl
, false, 3, ctx
);
1391 if (is_variable_sized (decl
))
1393 if (is_task_ctx (ctx
))
1395 if (ctx
->allocate_map
1396 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1399 if (ctx
->allocate_map
->get (decl
))
1400 ctx
->allocate_map
->remove (decl
);
1402 install_var_field (decl
, false, 1, ctx
);
1406 else if (is_taskreg_ctx (ctx
))
1409 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1410 by_ref
= use_pointer_for_field (decl
, NULL
);
1412 if (is_task_ctx (ctx
)
1413 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1415 if (ctx
->allocate_map
1416 && ctx
->allocate_map
->get (decl
))
1417 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1419 install_var_field (decl
, false, 1, ctx
);
1421 install_var_field (decl
, by_ref
, 2, ctx
);
1424 install_var_field (decl
, by_ref
, 3, ctx
);
1426 install_var_local (decl
, ctx
);
1429 case OMP_CLAUSE_USE_DEVICE_PTR
:
1430 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1431 decl
= OMP_CLAUSE_DECL (c
);
1433 /* Fortran array descriptors. */
1434 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1435 install_var_field (decl
, false, 19, ctx
);
1436 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1437 && !omp_privatize_by_reference (decl
)
1438 && !omp_is_allocatable_or_ptr (decl
))
1439 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1440 install_var_field (decl
, true, 11, ctx
);
1442 install_var_field (decl
, false, 11, ctx
);
1443 if (DECL_SIZE (decl
)
1444 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1446 tree decl2
= DECL_VALUE_EXPR (decl
);
1447 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1448 decl2
= TREE_OPERAND (decl2
, 0);
1449 gcc_assert (DECL_P (decl2
));
1450 install_var_local (decl2
, ctx
);
1452 install_var_local (decl
, ctx
);
1455 case OMP_CLAUSE_IS_DEVICE_PTR
:
1456 decl
= OMP_CLAUSE_DECL (c
);
1459 case OMP_CLAUSE__LOOPTEMP_
:
1460 case OMP_CLAUSE__REDUCTEMP_
:
1461 gcc_assert (is_taskreg_ctx (ctx
));
1462 decl
= OMP_CLAUSE_DECL (c
);
1463 install_var_field (decl
, false, 3, ctx
);
1464 install_var_local (decl
, ctx
);
1467 case OMP_CLAUSE_COPYPRIVATE
:
1468 case OMP_CLAUSE_COPYIN
:
1469 decl
= OMP_CLAUSE_DECL (c
);
1470 by_ref
= use_pointer_for_field (decl
, NULL
);
1471 install_var_field (decl
, by_ref
, 3, ctx
);
1474 case OMP_CLAUSE_FINAL
:
1476 case OMP_CLAUSE_NUM_THREADS
:
1477 case OMP_CLAUSE_NUM_TEAMS
:
1478 case OMP_CLAUSE_THREAD_LIMIT
:
1479 case OMP_CLAUSE_DEVICE
:
1480 case OMP_CLAUSE_SCHEDULE
:
1481 case OMP_CLAUSE_DIST_SCHEDULE
:
1482 case OMP_CLAUSE_DEPEND
:
1483 case OMP_CLAUSE_PRIORITY
:
1484 case OMP_CLAUSE_GRAINSIZE
:
1485 case OMP_CLAUSE_NUM_TASKS
:
1486 case OMP_CLAUSE_NUM_GANGS
:
1487 case OMP_CLAUSE_NUM_WORKERS
:
1488 case OMP_CLAUSE_VECTOR_LENGTH
:
1489 case OMP_CLAUSE_DETACH
:
1490 case OMP_CLAUSE_FILTER
:
1492 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1496 case OMP_CLAUSE_FROM
:
1497 case OMP_CLAUSE_MAP
:
1499 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1500 decl
= OMP_CLAUSE_DECL (c
);
1501 /* Global variables with "omp declare target" attribute
1502 don't need to be copied, the receiver side will use them
1503 directly. However, global variables with "omp declare target link"
1504 attribute need to be copied. Or when ALWAYS modifier is used. */
1505 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1507 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1508 && (OMP_CLAUSE_MAP_KIND (c
)
1509 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1510 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1511 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1512 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1513 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1514 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1515 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1516 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1517 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1518 && varpool_node::get_create (decl
)->offloadable
1519 && !lookup_attribute ("omp declare target link",
1520 DECL_ATTRIBUTES (decl
)))
1522 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1525 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1526 not offloaded; there is nothing to map for those. */
1527 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1528 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1529 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1532 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1534 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1535 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1536 && is_omp_target (ctx
->stmt
))
1538 /* If this is an offloaded region, an attach operation should
1539 only exist when the pointer variable is mapped in a prior
1541 if (is_gimple_omp_offloaded (ctx
->stmt
))
1543 (maybe_lookup_decl (decl
, ctx
)
1544 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1545 && lookup_attribute ("omp declare target",
1546 DECL_ATTRIBUTES (decl
))));
1548 /* By itself, attach/detach is generated as part of pointer
1549 variable mapping and should not create new variables in the
1550 offloaded region, however sender refs for it must be created
1551 for its address to be passed to the runtime. */
1553 = build_decl (OMP_CLAUSE_LOCATION (c
),
1554 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1555 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1556 insert_field_into_struct (ctx
->record_type
, field
);
1557 /* To not clash with a map of the pointer variable itself,
1558 attach/detach maps have their field looked up by the *clause*
1559 tree expression, not the decl. */
1560 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1561 (splay_tree_key
) c
));
1562 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1563 (splay_tree_value
) field
);
1566 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1567 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1568 || (OMP_CLAUSE_MAP_KIND (c
)
1569 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1571 if (TREE_CODE (decl
) == COMPONENT_REF
1572 || (TREE_CODE (decl
) == INDIRECT_REF
1573 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1574 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1575 == REFERENCE_TYPE
)))
1577 if (DECL_SIZE (decl
)
1578 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1580 tree decl2
= DECL_VALUE_EXPR (decl
);
1581 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1582 decl2
= TREE_OPERAND (decl2
, 0);
1583 gcc_assert (DECL_P (decl2
));
1584 install_var_local (decl2
, ctx
);
1586 install_var_local (decl
, ctx
);
1591 if (DECL_SIZE (decl
)
1592 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1594 tree decl2
= DECL_VALUE_EXPR (decl
);
1595 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1596 decl2
= TREE_OPERAND (decl2
, 0);
1597 gcc_assert (DECL_P (decl2
));
1598 install_var_field (decl2
, true, 3, ctx
);
1599 install_var_local (decl2
, ctx
);
1600 install_var_local (decl
, ctx
);
1604 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1605 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1606 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1607 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1608 install_var_field (decl
, true, 7, ctx
);
1610 install_var_field (decl
, true, 3, ctx
);
1611 if (is_gimple_omp_offloaded (ctx
->stmt
)
1612 && !(is_gimple_omp_oacc (ctx
->stmt
)
1613 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1614 install_var_local (decl
, ctx
);
1619 tree base
= get_base_address (decl
);
1620 tree nc
= OMP_CLAUSE_CHAIN (c
);
1623 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1624 && OMP_CLAUSE_DECL (nc
) == base
1625 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1626 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1628 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1629 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1635 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1636 decl
= OMP_CLAUSE_DECL (c
);
1638 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1639 (splay_tree_key
) decl
));
1641 = build_decl (OMP_CLAUSE_LOCATION (c
),
1642 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1643 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1644 insert_field_into_struct (ctx
->record_type
, field
);
1645 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1646 (splay_tree_value
) field
);
1651 case OMP_CLAUSE_ORDER
:
1652 ctx
->order_concurrent
= true;
1655 case OMP_CLAUSE_BIND
:
1659 case OMP_CLAUSE_NOWAIT
:
1660 case OMP_CLAUSE_ORDERED
:
1661 case OMP_CLAUSE_COLLAPSE
:
1662 case OMP_CLAUSE_UNTIED
:
1663 case OMP_CLAUSE_MERGEABLE
:
1664 case OMP_CLAUSE_PROC_BIND
:
1665 case OMP_CLAUSE_SAFELEN
:
1666 case OMP_CLAUSE_SIMDLEN
:
1667 case OMP_CLAUSE_THREADS
:
1668 case OMP_CLAUSE_SIMD
:
1669 case OMP_CLAUSE_NOGROUP
:
1670 case OMP_CLAUSE_DEFAULTMAP
:
1671 case OMP_CLAUSE_ASYNC
:
1672 case OMP_CLAUSE_WAIT
:
1673 case OMP_CLAUSE_GANG
:
1674 case OMP_CLAUSE_WORKER
:
1675 case OMP_CLAUSE_VECTOR
:
1676 case OMP_CLAUSE_INDEPENDENT
:
1677 case OMP_CLAUSE_AUTO
:
1678 case OMP_CLAUSE_SEQ
:
1679 case OMP_CLAUSE_TILE
:
1680 case OMP_CLAUSE__SIMT_
:
1681 case OMP_CLAUSE_DEFAULT
:
1682 case OMP_CLAUSE_NONTEMPORAL
:
1683 case OMP_CLAUSE_IF_PRESENT
:
1684 case OMP_CLAUSE_FINALIZE
:
1685 case OMP_CLAUSE_TASK_REDUCTION
:
1686 case OMP_CLAUSE_ALLOCATE
:
1689 case OMP_CLAUSE_ALIGNED
:
1690 decl
= OMP_CLAUSE_DECL (c
);
1691 if (is_global_var (decl
)
1692 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1693 install_var_local (decl
, ctx
);
1696 case OMP_CLAUSE__CONDTEMP_
:
1697 decl
= OMP_CLAUSE_DECL (c
);
1698 if (is_parallel_ctx (ctx
))
1700 install_var_field (decl
, false, 3, ctx
);
1701 install_var_local (decl
, ctx
);
1703 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1704 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1705 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1706 install_var_local (decl
, ctx
);
1709 case OMP_CLAUSE__CACHE_
:
1710 case OMP_CLAUSE_NOHOST
:
1716 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1718 switch (OMP_CLAUSE_CODE (c
))
1720 case OMP_CLAUSE_LASTPRIVATE
:
1721 /* Let the corresponding firstprivate clause create
1723 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1724 scan_array_reductions
= true;
1725 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1729 case OMP_CLAUSE_FIRSTPRIVATE
:
1730 case OMP_CLAUSE_PRIVATE
:
1731 case OMP_CLAUSE_LINEAR
:
1732 case OMP_CLAUSE_IS_DEVICE_PTR
:
1733 decl
= OMP_CLAUSE_DECL (c
);
1734 if (is_variable_sized (decl
))
1736 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1737 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1738 && is_gimple_omp_offloaded (ctx
->stmt
))
1740 tree decl2
= DECL_VALUE_EXPR (decl
);
1741 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1742 decl2
= TREE_OPERAND (decl2
, 0);
1743 gcc_assert (DECL_P (decl2
));
1744 install_var_local (decl2
, ctx
);
1745 fixup_remapped_decl (decl2
, ctx
, false);
1747 install_var_local (decl
, ctx
);
1749 fixup_remapped_decl (decl
, ctx
,
1750 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1751 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1752 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1753 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1754 scan_array_reductions
= true;
1757 case OMP_CLAUSE_REDUCTION
:
1758 case OMP_CLAUSE_IN_REDUCTION
:
1759 decl
= OMP_CLAUSE_DECL (c
);
1760 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1762 if (is_variable_sized (decl
))
1763 install_var_local (decl
, ctx
);
1764 fixup_remapped_decl (decl
, ctx
, false);
1766 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1767 scan_array_reductions
= true;
1770 case OMP_CLAUSE_TASK_REDUCTION
:
1771 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1772 scan_array_reductions
= true;
1775 case OMP_CLAUSE_SHARED
:
1776 /* Ignore shared directives in teams construct inside of
1777 target construct. */
1778 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1779 && !is_host_teams_ctx (ctx
))
1781 decl
= OMP_CLAUSE_DECL (c
);
1782 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1784 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1786 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1789 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1790 install_var_field (decl
, by_ref
, 11, ctx
);
1793 fixup_remapped_decl (decl
, ctx
, false);
1796 case OMP_CLAUSE_MAP
:
1797 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1799 decl
= OMP_CLAUSE_DECL (c
);
1801 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1802 && (OMP_CLAUSE_MAP_KIND (c
)
1803 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1804 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1805 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1806 && varpool_node::get_create (decl
)->offloadable
)
1808 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1809 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1810 && is_omp_target (ctx
->stmt
)
1811 && !is_gimple_omp_offloaded (ctx
->stmt
))
1815 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1816 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1817 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1818 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1820 tree new_decl
= lookup_decl (decl
, ctx
);
1821 TREE_TYPE (new_decl
)
1822 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1824 else if (DECL_SIZE (decl
)
1825 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1827 tree decl2
= DECL_VALUE_EXPR (decl
);
1828 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1829 decl2
= TREE_OPERAND (decl2
, 0);
1830 gcc_assert (DECL_P (decl2
));
1831 fixup_remapped_decl (decl2
, ctx
, false);
1832 fixup_remapped_decl (decl
, ctx
, true);
1835 fixup_remapped_decl (decl
, ctx
, false);
1839 case OMP_CLAUSE_COPYPRIVATE
:
1840 case OMP_CLAUSE_COPYIN
:
1841 case OMP_CLAUSE_DEFAULT
:
1843 case OMP_CLAUSE_NUM_THREADS
:
1844 case OMP_CLAUSE_NUM_TEAMS
:
1845 case OMP_CLAUSE_THREAD_LIMIT
:
1846 case OMP_CLAUSE_DEVICE
:
1847 case OMP_CLAUSE_SCHEDULE
:
1848 case OMP_CLAUSE_DIST_SCHEDULE
:
1849 case OMP_CLAUSE_NOWAIT
:
1850 case OMP_CLAUSE_ORDERED
:
1851 case OMP_CLAUSE_COLLAPSE
:
1852 case OMP_CLAUSE_UNTIED
:
1853 case OMP_CLAUSE_FINAL
:
1854 case OMP_CLAUSE_MERGEABLE
:
1855 case OMP_CLAUSE_PROC_BIND
:
1856 case OMP_CLAUSE_SAFELEN
:
1857 case OMP_CLAUSE_SIMDLEN
:
1858 case OMP_CLAUSE_ALIGNED
:
1859 case OMP_CLAUSE_DEPEND
:
1860 case OMP_CLAUSE_DETACH
:
1861 case OMP_CLAUSE_ALLOCATE
:
1862 case OMP_CLAUSE__LOOPTEMP_
:
1863 case OMP_CLAUSE__REDUCTEMP_
:
1865 case OMP_CLAUSE_FROM
:
1866 case OMP_CLAUSE_PRIORITY
:
1867 case OMP_CLAUSE_GRAINSIZE
:
1868 case OMP_CLAUSE_NUM_TASKS
:
1869 case OMP_CLAUSE_THREADS
:
1870 case OMP_CLAUSE_SIMD
:
1871 case OMP_CLAUSE_NOGROUP
:
1872 case OMP_CLAUSE_DEFAULTMAP
:
1873 case OMP_CLAUSE_ORDER
:
1874 case OMP_CLAUSE_BIND
:
1875 case OMP_CLAUSE_USE_DEVICE_PTR
:
1876 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1877 case OMP_CLAUSE_NONTEMPORAL
:
1878 case OMP_CLAUSE_ASYNC
:
1879 case OMP_CLAUSE_WAIT
:
1880 case OMP_CLAUSE_NUM_GANGS
:
1881 case OMP_CLAUSE_NUM_WORKERS
:
1882 case OMP_CLAUSE_VECTOR_LENGTH
:
1883 case OMP_CLAUSE_GANG
:
1884 case OMP_CLAUSE_WORKER
:
1885 case OMP_CLAUSE_VECTOR
:
1886 case OMP_CLAUSE_INDEPENDENT
:
1887 case OMP_CLAUSE_AUTO
:
1888 case OMP_CLAUSE_SEQ
:
1889 case OMP_CLAUSE_TILE
:
1890 case OMP_CLAUSE__SIMT_
:
1891 case OMP_CLAUSE_IF_PRESENT
:
1892 case OMP_CLAUSE_FINALIZE
:
1893 case OMP_CLAUSE_FILTER
:
1894 case OMP_CLAUSE__CONDTEMP_
:
1897 case OMP_CLAUSE__CACHE_
:
1898 case OMP_CLAUSE_NOHOST
:
1904 gcc_checking_assert (!scan_array_reductions
1905 || !is_gimple_omp_oacc (ctx
->stmt
));
1906 if (scan_array_reductions
)
1908 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1909 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1910 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1911 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1912 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1914 omp_context
*rctx
= ctx
;
1915 if (is_omp_target (ctx
->stmt
))
1917 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1918 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1920 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1921 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1922 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1923 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1924 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1925 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1929 /* Create a new name for omp child function. Returns an identifier. */
1932 create_omp_child_function_name (bool task_copy
)
1934 return clone_function_name_numbered (current_function_decl
,
1935 task_copy
? "_omp_cpyfn" : "_omp_fn");
1938 /* Return true if CTX may belong to offloaded code: either if current function
1939 is offloaded, or any enclosing context corresponds to a target region. */
1942 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1944 if (cgraph_node::get (current_function_decl
)->offloadable
)
1946 for (; ctx
; ctx
= ctx
->outer
)
1947 if (is_gimple_omp_offloaded (ctx
->stmt
))
1952 /* Build a decl for the omp child function. It'll not contain a body
1953 yet, just the bare decl. */
1956 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1958 tree decl
, type
, name
, t
;
1960 name
= create_omp_child_function_name (task_copy
);
1962 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1963 ptr_type_node
, NULL_TREE
);
1965 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1967 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1969 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1972 ctx
->cb
.dst_fn
= decl
;
1974 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1976 TREE_STATIC (decl
) = 1;
1977 TREE_USED (decl
) = 1;
1978 DECL_ARTIFICIAL (decl
) = 1;
1979 DECL_IGNORED_P (decl
) = 0;
1980 TREE_PUBLIC (decl
) = 0;
1981 DECL_UNINLINABLE (decl
) = 1;
1982 DECL_EXTERNAL (decl
) = 0;
1983 DECL_CONTEXT (decl
) = NULL_TREE
;
1984 DECL_INITIAL (decl
) = make_node (BLOCK
);
1985 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1986 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1987 /* Remove omp declare simd attribute from the new attributes. */
1988 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1990 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1993 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1994 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1995 *p
= TREE_CHAIN (*p
);
1998 tree chain
= TREE_CHAIN (*p
);
1999 *p
= copy_node (*p
);
2000 p
= &TREE_CHAIN (*p
);
2004 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2005 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2006 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2007 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2008 DECL_FUNCTION_VERSIONED (decl
)
2009 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2011 if (omp_maybe_offloaded_ctx (ctx
))
2013 cgraph_node::get_create (decl
)->offloadable
= 1;
2014 if (ENABLE_OFFLOADING
)
2015 g
->have_offload
= true;
2018 if (cgraph_node::get_create (decl
)->offloadable
)
2020 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2021 ? "omp target entrypoint"
2022 : "omp declare target");
2023 if (lookup_attribute ("omp declare target",
2024 DECL_ATTRIBUTES (current_function_decl
)))
2026 if (is_gimple_omp_offloaded (ctx
->stmt
))
2027 DECL_ATTRIBUTES (decl
)
2028 = remove_attribute ("omp declare target",
2029 copy_list (DECL_ATTRIBUTES (decl
)));
2034 DECL_ATTRIBUTES (decl
)
2035 = tree_cons (get_identifier (target_attr
),
2036 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2039 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2040 RESULT_DECL
, NULL_TREE
, void_type_node
);
2041 DECL_ARTIFICIAL (t
) = 1;
2042 DECL_IGNORED_P (t
) = 1;
2043 DECL_CONTEXT (t
) = decl
;
2044 DECL_RESULT (decl
) = t
;
2046 tree data_name
= get_identifier (".omp_data_i");
2047 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2049 DECL_ARTIFICIAL (t
) = 1;
2050 DECL_NAMELESS (t
) = 1;
2051 DECL_ARG_TYPE (t
) = ptr_type_node
;
2052 DECL_CONTEXT (t
) = current_function_decl
;
2054 TREE_READONLY (t
) = 1;
2055 DECL_ARGUMENTS (decl
) = t
;
2057 ctx
->receiver_decl
= t
;
2060 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2061 PARM_DECL
, get_identifier (".omp_data_o"),
2063 DECL_ARTIFICIAL (t
) = 1;
2064 DECL_NAMELESS (t
) = 1;
2065 DECL_ARG_TYPE (t
) = ptr_type_node
;
2066 DECL_CONTEXT (t
) = current_function_decl
;
2068 TREE_ADDRESSABLE (t
) = 1;
2069 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2070 DECL_ARGUMENTS (decl
) = t
;
2073 /* Allocate memory for the function structure. The call to
2074 allocate_struct_function clobbers CFUN, so we need to restore
2076 push_struct_function (decl
);
2077 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2078 init_tree_ssa (cfun
);
2082 /* Callback for walk_gimple_seq. Check if combined parallel
2083 contains gimple_omp_for_combined_into_p OMP_FOR. */
2086 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2087 bool *handled_ops_p
,
2088 struct walk_stmt_info
*wi
)
2090 gimple
*stmt
= gsi_stmt (*gsi_p
);
2092 *handled_ops_p
= true;
2093 switch (gimple_code (stmt
))
2097 case GIMPLE_OMP_FOR
:
2098 if (gimple_omp_for_combined_into_p (stmt
)
2099 && gimple_omp_for_kind (stmt
)
2100 == *(const enum gf_mask
*) (wi
->info
))
2103 return integer_zero_node
;
2112 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2115 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2116 omp_context
*outer_ctx
)
2118 struct walk_stmt_info wi
;
2120 memset (&wi
, 0, sizeof (wi
));
2122 wi
.info
= (void *) &msk
;
2123 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2124 if (wi
.info
!= (void *) &msk
)
2126 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2127 struct omp_for_data fd
;
2128 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2129 /* We need two temporaries with fd.loop.v type (istart/iend)
2130 and then (fd.collapse - 1) temporaries with the same
2131 type for count2 ... countN-1 vars if not constant. */
2132 size_t count
= 2, i
;
2133 tree type
= fd
.iter_type
;
2135 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2137 count
+= fd
.collapse
- 1;
2138 /* If there are lastprivate clauses on the inner
2139 GIMPLE_OMP_FOR, add one more temporaries for the total number
2140 of iterations (product of count1 ... countN-1). */
2141 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2142 OMP_CLAUSE_LASTPRIVATE
)
2143 || (msk
== GF_OMP_FOR_KIND_FOR
2144 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2145 OMP_CLAUSE_LASTPRIVATE
)))
2147 tree temp
= create_tmp_var (type
);
2148 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2149 OMP_CLAUSE__LOOPTEMP_
);
2150 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2151 OMP_CLAUSE_DECL (c
) = temp
;
2152 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2153 gimple_omp_taskreg_set_clauses (stmt
, c
);
2156 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2157 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2158 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2160 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2161 tree type2
= TREE_TYPE (v
);
2163 for (i
= 0; i
< 3; i
++)
2165 tree temp
= create_tmp_var (type2
);
2166 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2167 OMP_CLAUSE__LOOPTEMP_
);
2168 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2169 OMP_CLAUSE_DECL (c
) = temp
;
2170 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2171 gimple_omp_taskreg_set_clauses (stmt
, c
);
2175 for (i
= 0; i
< count
; i
++)
2177 tree temp
= create_tmp_var (type
);
2178 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2179 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2180 OMP_CLAUSE_DECL (c
) = temp
;
2181 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2182 gimple_omp_taskreg_set_clauses (stmt
, c
);
2185 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2186 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2187 OMP_CLAUSE_REDUCTION
))
2189 tree type
= build_pointer_type (pointer_sized_int_node
);
2190 tree temp
= create_tmp_var (type
);
2191 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2192 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2193 OMP_CLAUSE_DECL (c
) = temp
;
2194 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2195 gimple_omp_task_set_clauses (stmt
, c
);
2199 /* Scan an OpenMP parallel directive. */
2202 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2206 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2208 /* Ignore parallel directives with empty bodies, unless there
2209 are copyin clauses. */
2211 && empty_body_p (gimple_omp_body (stmt
))
2212 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2213 OMP_CLAUSE_COPYIN
) == NULL
)
2215 gsi_replace (gsi
, gimple_build_nop (), false);
2219 if (gimple_omp_parallel_combined_p (stmt
))
2220 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2221 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2222 OMP_CLAUSE_REDUCTION
);
2223 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2224 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2226 tree type
= build_pointer_type (pointer_sized_int_node
);
2227 tree temp
= create_tmp_var (type
);
2228 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2230 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2231 OMP_CLAUSE_DECL (c
) = temp
;
2232 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2233 gimple_omp_parallel_set_clauses (stmt
, c
);
2236 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2239 ctx
= new_omp_context (stmt
, outer_ctx
);
2240 taskreg_contexts
.safe_push (ctx
);
2241 if (taskreg_nesting_level
> 1)
2242 ctx
->is_nested
= true;
2243 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2244 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2245 name
= create_tmp_var_name (".omp_data_s");
2246 name
= build_decl (gimple_location (stmt
),
2247 TYPE_DECL
, name
, ctx
->record_type
);
2248 DECL_ARTIFICIAL (name
) = 1;
2249 DECL_NAMELESS (name
) = 1;
2250 TYPE_NAME (ctx
->record_type
) = name
;
2251 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2252 create_omp_child_function (ctx
, false);
2253 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2255 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2256 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2258 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2259 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2262 /* Scan an OpenMP task directive. */
2265 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2269 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2271 /* Ignore task directives with empty bodies, unless they have depend
2274 && gimple_omp_body (stmt
)
2275 && empty_body_p (gimple_omp_body (stmt
))
2276 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2278 gsi_replace (gsi
, gimple_build_nop (), false);
2282 if (gimple_omp_task_taskloop_p (stmt
))
2283 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2285 ctx
= new_omp_context (stmt
, outer_ctx
);
2287 if (gimple_omp_task_taskwait_p (stmt
))
2289 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2293 taskreg_contexts
.safe_push (ctx
);
2294 if (taskreg_nesting_level
> 1)
2295 ctx
->is_nested
= true;
2296 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2297 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2298 name
= create_tmp_var_name (".omp_data_s");
2299 name
= build_decl (gimple_location (stmt
),
2300 TYPE_DECL
, name
, ctx
->record_type
);
2301 DECL_ARTIFICIAL (name
) = 1;
2302 DECL_NAMELESS (name
) = 1;
2303 TYPE_NAME (ctx
->record_type
) = name
;
2304 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2305 create_omp_child_function (ctx
, false);
2306 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2308 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2310 if (ctx
->srecord_type
)
2312 name
= create_tmp_var_name (".omp_data_a");
2313 name
= build_decl (gimple_location (stmt
),
2314 TYPE_DECL
, name
, ctx
->srecord_type
);
2315 DECL_ARTIFICIAL (name
) = 1;
2316 DECL_NAMELESS (name
) = 1;
2317 TYPE_NAME (ctx
->srecord_type
) = name
;
2318 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2319 create_omp_child_function (ctx
, true);
2322 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2324 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2326 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2327 t
= build_int_cst (long_integer_type_node
, 0);
2328 gimple_omp_task_set_arg_size (stmt
, t
);
2329 t
= build_int_cst (long_integer_type_node
, 1);
2330 gimple_omp_task_set_arg_align (stmt
, t
);
2334 /* Helper function for finish_taskreg_scan, called through walk_tree.
2335 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2336 tree, replace it in the expression. */
2339 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2343 omp_context
*ctx
= (omp_context
*) data
;
2344 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2347 if (DECL_HAS_VALUE_EXPR_P (t
))
2348 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2353 else if (IS_TYPE_OR_DECL_P (*tp
))
2358 /* If any decls have been made addressable during scan_omp,
2359 adjust their fields if needed, and layout record types
2360 of parallel/task constructs. */
2363 finish_taskreg_scan (omp_context
*ctx
)
2365 if (ctx
->record_type
== NULL_TREE
)
2368 /* If any task_shared_vars were needed, verify all
2369 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2370 statements if use_pointer_for_field hasn't changed
2371 because of that. If it did, update field types now. */
2372 if (task_shared_vars
)
2376 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2377 c
; c
= OMP_CLAUSE_CHAIN (c
))
2378 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2379 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2381 tree decl
= OMP_CLAUSE_DECL (c
);
2383 /* Global variables don't need to be copied,
2384 the receiver side will use them directly. */
2385 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2387 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2388 || !use_pointer_for_field (decl
, ctx
))
2390 tree field
= lookup_field (decl
, ctx
);
2391 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2392 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2394 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2395 TREE_THIS_VOLATILE (field
) = 0;
2396 DECL_USER_ALIGN (field
) = 0;
2397 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2398 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2399 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2400 if (ctx
->srecord_type
)
2402 tree sfield
= lookup_sfield (decl
, ctx
);
2403 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2404 TREE_THIS_VOLATILE (sfield
) = 0;
2405 DECL_USER_ALIGN (sfield
) = 0;
2406 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2407 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2408 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2413 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2415 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2416 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2419 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2420 expects to find it at the start of data. */
2421 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2422 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2426 *p
= DECL_CHAIN (*p
);
2430 p
= &DECL_CHAIN (*p
);
2431 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2432 TYPE_FIELDS (ctx
->record_type
) = f
;
2434 layout_type (ctx
->record_type
);
2435 fixup_child_record_type (ctx
);
2437 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2439 layout_type (ctx
->record_type
);
2440 fixup_child_record_type (ctx
);
2444 location_t loc
= gimple_location (ctx
->stmt
);
2445 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2447 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2449 /* Move VLA fields to the end. */
2450 p
= &TYPE_FIELDS (ctx
->record_type
);
2452 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2453 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2456 *p
= TREE_CHAIN (*p
);
2457 TREE_CHAIN (*q
) = NULL_TREE
;
2458 q
= &TREE_CHAIN (*q
);
2461 p
= &DECL_CHAIN (*p
);
2463 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2465 /* Move fields corresponding to first and second _looptemp_
2466 clause first. There are filled by GOMP_taskloop
2467 and thus need to be in specific positions. */
2468 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2469 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2470 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2471 OMP_CLAUSE__LOOPTEMP_
);
2472 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2473 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2474 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2475 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2476 p
= &TYPE_FIELDS (ctx
->record_type
);
2478 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2479 *p
= DECL_CHAIN (*p
);
2481 p
= &DECL_CHAIN (*p
);
2482 DECL_CHAIN (f1
) = f2
;
2485 DECL_CHAIN (f2
) = f3
;
2486 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2489 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2490 TYPE_FIELDS (ctx
->record_type
) = f1
;
2491 if (ctx
->srecord_type
)
2493 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2494 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2496 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2497 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2499 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2500 *p
= DECL_CHAIN (*p
);
2502 p
= &DECL_CHAIN (*p
);
2503 DECL_CHAIN (f1
) = f2
;
2504 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2507 DECL_CHAIN (f2
) = f3
;
2508 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2511 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2512 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2519 /* Look for a firstprivate clause with the detach event handle. */
2520 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2521 c
; c
= OMP_CLAUSE_CHAIN (c
))
2523 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2525 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2526 == OMP_CLAUSE_DECL (detach_clause
))
2531 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2533 /* Move field corresponding to the detach clause first.
2534 This is filled by GOMP_task and needs to be in a
2535 specific position. */
2536 p
= &TYPE_FIELDS (ctx
->record_type
);
2539 *p
= DECL_CHAIN (*p
);
2541 p
= &DECL_CHAIN (*p
);
2542 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2543 TYPE_FIELDS (ctx
->record_type
) = field
;
2544 if (ctx
->srecord_type
)
2546 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2547 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2550 *p
= DECL_CHAIN (*p
);
2552 p
= &DECL_CHAIN (*p
);
2553 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2554 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2557 layout_type (ctx
->record_type
);
2558 fixup_child_record_type (ctx
);
2559 if (ctx
->srecord_type
)
2560 layout_type (ctx
->srecord_type
);
2561 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2562 TYPE_SIZE_UNIT (ctx
->record_type
));
2563 if (TREE_CODE (t
) != INTEGER_CST
)
2565 t
= unshare_expr (t
);
2566 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2568 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2569 t
= build_int_cst (long_integer_type_node
,
2570 TYPE_ALIGN_UNIT (ctx
->record_type
));
2571 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2575 /* Find the enclosing offload context. */
2577 static omp_context
*
2578 enclosing_target_ctx (omp_context
*ctx
)
2580 for (; ctx
; ctx
= ctx
->outer
)
2581 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2587 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2589 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2592 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2594 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2596 gimple
*stmt
= ctx
->stmt
;
2597 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2598 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2605 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2606 (This doesn't include OpenACC 'kernels' decomposed parts.)
2607 Until kernels handling moves to use the same loop indirection
2608 scheme as parallel, we need to do this checking early. */
2611 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2613 bool checking
= true;
2614 unsigned outer_mask
= 0;
2615 unsigned this_mask
= 0;
2616 bool has_seq
= false, has_auto
= false;
2619 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2623 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2625 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2628 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2630 switch (OMP_CLAUSE_CODE (c
))
2632 case OMP_CLAUSE_GANG
:
2633 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2635 case OMP_CLAUSE_WORKER
:
2636 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2638 case OMP_CLAUSE_VECTOR
:
2639 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2641 case OMP_CLAUSE_SEQ
:
2644 case OMP_CLAUSE_AUTO
:
2654 if (has_seq
&& (this_mask
|| has_auto
))
2655 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2656 " OpenACC loop specifiers");
2657 else if (has_auto
&& this_mask
)
2658 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2659 " OpenACC loop specifiers");
2661 if (this_mask
& outer_mask
)
2662 error_at (gimple_location (stmt
), "inner loop uses same"
2663 " OpenACC parallelism as containing loop");
2666 return outer_mask
| this_mask
;
2669 /* Scan a GIMPLE_OMP_FOR. */
2671 static omp_context
*
2672 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2676 tree clauses
= gimple_omp_for_clauses (stmt
);
2678 ctx
= new_omp_context (stmt
, outer_ctx
);
2680 if (is_gimple_omp_oacc (stmt
))
2682 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2684 if (!(tgt
&& is_oacc_kernels (tgt
)))
2685 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2688 switch (OMP_CLAUSE_CODE (c
))
2690 case OMP_CLAUSE_GANG
:
2691 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2694 case OMP_CLAUSE_WORKER
:
2695 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2698 case OMP_CLAUSE_VECTOR
:
2699 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2708 /* By construction, this is impossible for OpenACC 'kernels'
2709 decomposed parts. */
2710 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2712 error_at (OMP_CLAUSE_LOCATION (c
),
2713 "argument not permitted on %qs clause",
2714 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2716 inform (gimple_location (tgt
->stmt
),
2717 "enclosing parent compute construct");
2718 else if (oacc_get_fn_attrib (current_function_decl
))
2719 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2720 "enclosing routine");
2726 if (tgt
&& is_oacc_kernels (tgt
))
2727 check_oacc_kernel_gwv (stmt
, ctx
);
2729 /* Collect all variables named in reductions on this loop. Ensure
2730 that, if this loop has a reduction on some variable v, and there is
2731 a reduction on v somewhere in an outer context, then there is a
2732 reduction on v on all intervening loops as well. */
2733 tree local_reduction_clauses
= NULL
;
2734 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2736 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2737 local_reduction_clauses
2738 = tree_cons (NULL
, c
, local_reduction_clauses
);
2740 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2741 ctx
->outer_reduction_clauses
2742 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2743 ctx
->outer
->outer_reduction_clauses
);
2744 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2745 tree local_iter
= local_reduction_clauses
;
2746 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2748 tree local_clause
= TREE_VALUE (local_iter
);
2749 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2750 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2751 bool have_outer_reduction
= false;
2752 tree ctx_iter
= outer_reduction_clauses
;
2753 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2755 tree outer_clause
= TREE_VALUE (ctx_iter
);
2756 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2757 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2758 if (outer_var
== local_var
&& outer_op
!= local_op
)
2760 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2761 "conflicting reduction operations for %qE",
2763 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2764 "location of the previous reduction for %qE",
2767 if (outer_var
== local_var
)
2769 have_outer_reduction
= true;
2773 if (have_outer_reduction
)
2775 /* There is a reduction on outer_var both on this loop and on
2776 some enclosing loop. Walk up the context tree until such a
2777 loop with a reduction on outer_var is found, and complain
2778 about all intervening loops that do not have such a
2780 struct omp_context
*curr_loop
= ctx
->outer
;
2782 while (curr_loop
!= NULL
)
2784 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2785 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2787 tree curr_clause
= TREE_VALUE (curr_iter
);
2788 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2789 if (curr_var
== local_var
)
2796 warning_at (gimple_location (curr_loop
->stmt
), 0,
2797 "nested loop in reduction needs "
2798 "reduction clause for %qE",
2802 curr_loop
= curr_loop
->outer
;
2806 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2807 ctx
->outer_reduction_clauses
2808 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2809 ctx
->outer_reduction_clauses
);
2811 if (tgt
&& is_oacc_kernels (tgt
))
2813 /* Strip out reductions, as they are not handled yet. */
2814 tree
*prev_ptr
= &clauses
;
2816 while (tree probe
= *prev_ptr
)
2818 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2820 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2821 *prev_ptr
= *next_ptr
;
2823 prev_ptr
= next_ptr
;
2826 gimple_omp_for_set_clauses (stmt
, clauses
);
2830 scan_sharing_clauses (clauses
, ctx
);
2832 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2833 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2835 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2836 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2837 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2838 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2840 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2844 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2847 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2848 omp_context
*outer_ctx
)
2850 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2851 gsi_replace (gsi
, bind
, false);
2852 gimple_seq seq
= NULL
;
2853 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2854 tree cond
= create_tmp_var_raw (integer_type_node
);
2855 DECL_CONTEXT (cond
) = current_function_decl
;
2856 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2857 gimple_bind_set_vars (bind
, cond
);
2858 gimple_call_set_lhs (g
, cond
);
2859 gimple_seq_add_stmt (&seq
, g
);
2860 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2861 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2862 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2863 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2864 gimple_seq_add_stmt (&seq
, g
);
2865 g
= gimple_build_label (lab1
);
2866 gimple_seq_add_stmt (&seq
, g
);
2867 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2868 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2869 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2870 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2871 gimple_omp_for_set_clauses (new_stmt
, clause
);
2872 gimple_seq_add_stmt (&seq
, new_stmt
);
2873 g
= gimple_build_goto (lab3
);
2874 gimple_seq_add_stmt (&seq
, g
);
2875 g
= gimple_build_label (lab2
);
2876 gimple_seq_add_stmt (&seq
, g
);
2877 gimple_seq_add_stmt (&seq
, stmt
);
2878 g
= gimple_build_label (lab3
);
2879 gimple_seq_add_stmt (&seq
, g
);
2880 gimple_bind_set_body (bind
, seq
);
2882 scan_omp_for (new_stmt
, outer_ctx
);
2883 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2886 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2887 struct walk_stmt_info
*);
2888 static omp_context
*maybe_lookup_ctx (gimple
*);
2890 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2891 for scan phase loop. */
2894 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2895 omp_context
*outer_ctx
)
2897 /* The only change between inclusive and exclusive scan will be
2898 within the first simd loop, so just use inclusive in the
2899 worksharing loop. */
2900 outer_ctx
->scan_inclusive
= true;
2901 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2902 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2904 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2905 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2906 gsi_replace (gsi
, input_stmt
, false);
2907 gimple_seq input_body
= NULL
;
2908 gimple_seq_add_stmt (&input_body
, stmt
);
2909 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2911 gimple_stmt_iterator input1_gsi
= gsi_none ();
2912 struct walk_stmt_info wi
;
2913 memset (&wi
, 0, sizeof (wi
));
2915 wi
.info
= (void *) &input1_gsi
;
2916 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2917 gcc_assert (!gsi_end_p (input1_gsi
));
2919 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2920 gsi_next (&input1_gsi
);
2921 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2922 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2923 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2924 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2925 std::swap (input_stmt1
, scan_stmt1
);
2927 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2928 gimple_omp_set_body (input_stmt1
, NULL
);
2930 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2931 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2933 gimple_omp_set_body (input_stmt1
, input_body1
);
2934 gimple_omp_set_body (scan_stmt1
, NULL
);
2936 gimple_stmt_iterator input2_gsi
= gsi_none ();
2937 memset (&wi
, 0, sizeof (wi
));
2939 wi
.info
= (void *) &input2_gsi
;
2940 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2942 gcc_assert (!gsi_end_p (input2_gsi
));
2944 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2945 gsi_next (&input2_gsi
);
2946 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2947 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2948 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2949 std::swap (input_stmt2
, scan_stmt2
);
2951 gimple_omp_set_body (input_stmt2
, NULL
);
2953 gimple_omp_set_body (input_stmt
, input_body
);
2954 gimple_omp_set_body (scan_stmt
, scan_body
);
2956 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2957 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2959 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2960 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2962 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2965 /* Scan an OpenMP sections directive. */
2968 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2972 ctx
= new_omp_context (stmt
, outer_ctx
);
2973 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2974 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2977 /* Scan an OpenMP single directive. */
2980 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2985 ctx
= new_omp_context (stmt
, outer_ctx
);
2986 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2987 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2988 name
= create_tmp_var_name (".omp_copy_s");
2989 name
= build_decl (gimple_location (stmt
),
2990 TYPE_DECL
, name
, ctx
->record_type
);
2991 TYPE_NAME (ctx
->record_type
) = name
;
2993 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2994 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2996 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2997 ctx
->record_type
= NULL
;
2999 layout_type (ctx
->record_type
);
3002 /* Scan a GIMPLE_OMP_TARGET. */
3005 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3009 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3010 tree clauses
= gimple_omp_target_clauses (stmt
);
3012 ctx
= new_omp_context (stmt
, outer_ctx
);
3013 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3014 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3015 name
= create_tmp_var_name (".omp_data_t");
3016 name
= build_decl (gimple_location (stmt
),
3017 TYPE_DECL
, name
, ctx
->record_type
);
3018 DECL_ARTIFICIAL (name
) = 1;
3019 DECL_NAMELESS (name
) = 1;
3020 TYPE_NAME (ctx
->record_type
) = name
;
3021 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3025 create_omp_child_function (ctx
, false);
3026 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3029 scan_sharing_clauses (clauses
, ctx
);
3030 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3032 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3033 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3036 TYPE_FIELDS (ctx
->record_type
)
3037 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3040 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3041 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3043 field
= DECL_CHAIN (field
))
3044 gcc_assert (DECL_ALIGN (field
) == align
);
3046 layout_type (ctx
->record_type
);
3048 fixup_child_record_type (ctx
);
3051 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3053 error_at (gimple_location (stmt
),
3054 "%<target%> construct with nested %<teams%> construct "
3055 "contains directives outside of the %<teams%> construct");
3056 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3060 /* Scan an OpenMP teams directive. */
3063 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3065 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3067 if (!gimple_omp_teams_host (stmt
))
3069 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3070 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3073 taskreg_contexts
.safe_push (ctx
);
3074 gcc_assert (taskreg_nesting_level
== 1);
3075 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3076 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3077 tree name
= create_tmp_var_name (".omp_data_s");
3078 name
= build_decl (gimple_location (stmt
),
3079 TYPE_DECL
, name
, ctx
->record_type
);
3080 DECL_ARTIFICIAL (name
) = 1;
3081 DECL_NAMELESS (name
) = 1;
3082 TYPE_NAME (ctx
->record_type
) = name
;
3083 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3084 create_omp_child_function (ctx
, false);
3085 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3087 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3088 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3090 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3091 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3094 /* Check nesting restrictions. */
3096 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3100 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3101 inside an OpenACC CTX. */
3102 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3103 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3104 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3106 else if (!(is_gimple_omp (stmt
)
3107 && is_gimple_omp_oacc (stmt
)))
3109 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3111 error_at (gimple_location (stmt
),
3112 "non-OpenACC construct inside of OpenACC routine");
3116 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3117 if (is_gimple_omp (octx
->stmt
)
3118 && is_gimple_omp_oacc (octx
->stmt
))
3120 error_at (gimple_location (stmt
),
3121 "non-OpenACC construct inside of OpenACC region");
3128 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3129 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3131 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3133 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3135 error_at (gimple_location (stmt
),
3136 "OpenMP constructs are not allowed in target region "
3137 "with %<ancestor%>");
3141 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3142 ctx
->teams_nested_p
= true;
3144 ctx
->nonteams_nested_p
= true;
3146 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3148 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3150 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3151 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3155 if (ctx
->order_concurrent
3156 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3157 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3158 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3160 error_at (gimple_location (stmt
),
3161 "OpenMP constructs other than %<parallel%>, %<loop%>"
3162 " or %<simd%> may not be nested inside a region with"
3163 " the %<order(concurrent)%> clause");
3166 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3168 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3169 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3171 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3172 && (ctx
->outer
== NULL
3173 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3174 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3175 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3176 != GF_OMP_FOR_KIND_FOR
)
3177 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3179 error_at (gimple_location (stmt
),
3180 "%<ordered simd threads%> must be closely "
3181 "nested inside of %<%s simd%> region",
3182 lang_GNU_Fortran () ? "do" : "for");
3188 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3189 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3190 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3192 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3193 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3195 error_at (gimple_location (stmt
),
3196 "OpenMP constructs other than "
3197 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3198 "not be nested inside %<simd%> region");
3201 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3203 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3204 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3205 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3206 OMP_CLAUSE_BIND
) == NULL_TREE
))
3207 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3209 error_at (gimple_location (stmt
),
3210 "only %<distribute%>, %<parallel%> or %<loop%> "
3211 "regions are allowed to be strictly nested inside "
3212 "%<teams%> region");
3216 else if (ctx
->order_concurrent
3217 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3218 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3219 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3220 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3223 error_at (gimple_location (stmt
),
3224 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3225 "%<simd%> may not be nested inside a %<loop%> region");
3227 error_at (gimple_location (stmt
),
3228 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3229 "%<simd%> may not be nested inside a region with "
3230 "the %<order(concurrent)%> clause");
3234 switch (gimple_code (stmt
))
3236 case GIMPLE_OMP_FOR
:
3237 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3239 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3241 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3243 error_at (gimple_location (stmt
),
3244 "%<distribute%> region must be strictly nested "
3245 "inside %<teams%> construct");
3250 /* We split taskloop into task and nested taskloop in it. */
3251 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3253 /* For now, hope this will change and loop bind(parallel) will not
3254 be allowed in lots of contexts. */
3255 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3256 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3258 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3263 switch (gimple_code (ctx
->stmt
))
3265 case GIMPLE_OMP_FOR
:
3266 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3267 == GF_OMP_FOR_KIND_OACC_LOOP
);
3270 case GIMPLE_OMP_TARGET
:
3271 switch (gimple_omp_target_kind (ctx
->stmt
))
3273 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3274 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3275 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3276 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3277 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3288 else if (oacc_get_fn_attrib (current_function_decl
))
3292 error_at (gimple_location (stmt
),
3293 "OpenACC loop directive must be associated with"
3294 " an OpenACC compute region");
3300 if (is_gimple_call (stmt
)
3301 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3302 == BUILT_IN_GOMP_CANCEL
3303 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3304 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3306 const char *bad
= NULL
;
3307 const char *kind
= NULL
;
3308 const char *construct
3309 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3310 == BUILT_IN_GOMP_CANCEL
)
3312 : "cancellation point";
3315 error_at (gimple_location (stmt
), "orphaned %qs construct",
3319 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3320 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3324 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3326 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3327 == BUILT_IN_GOMP_CANCEL
3328 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3329 ctx
->cancellable
= true;
3333 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3334 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3336 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3337 == BUILT_IN_GOMP_CANCEL
3338 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3340 ctx
->cancellable
= true;
3341 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3343 warning_at (gimple_location (stmt
), 0,
3344 "%<cancel for%> inside "
3345 "%<nowait%> for construct");
3346 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3347 OMP_CLAUSE_ORDERED
))
3348 warning_at (gimple_location (stmt
), 0,
3349 "%<cancel for%> inside "
3350 "%<ordered%> for construct");
3355 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3356 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3358 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3359 == BUILT_IN_GOMP_CANCEL
3360 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3362 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3364 ctx
->cancellable
= true;
3365 if (omp_find_clause (gimple_omp_sections_clauses
3368 warning_at (gimple_location (stmt
), 0,
3369 "%<cancel sections%> inside "
3370 "%<nowait%> sections construct");
3374 gcc_assert (ctx
->outer
3375 && gimple_code (ctx
->outer
->stmt
)
3376 == GIMPLE_OMP_SECTIONS
);
3377 ctx
->outer
->cancellable
= true;
3378 if (omp_find_clause (gimple_omp_sections_clauses
3381 warning_at (gimple_location (stmt
), 0,
3382 "%<cancel sections%> inside "
3383 "%<nowait%> sections construct");
3389 if (!is_task_ctx (ctx
)
3390 && (!is_taskloop_ctx (ctx
)
3391 || ctx
->outer
== NULL
3392 || !is_task_ctx (ctx
->outer
)))
3396 for (omp_context
*octx
= ctx
->outer
;
3397 octx
; octx
= octx
->outer
)
3399 switch (gimple_code (octx
->stmt
))
3401 case GIMPLE_OMP_TASKGROUP
:
3403 case GIMPLE_OMP_TARGET
:
3404 if (gimple_omp_target_kind (octx
->stmt
)
3405 != GF_OMP_TARGET_KIND_REGION
)
3408 case GIMPLE_OMP_PARALLEL
:
3409 case GIMPLE_OMP_TEAMS
:
3410 error_at (gimple_location (stmt
),
3411 "%<%s taskgroup%> construct not closely "
3412 "nested inside of %<taskgroup%> region",
3415 case GIMPLE_OMP_TASK
:
3416 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3418 && is_taskloop_ctx (octx
->outer
))
3421 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3422 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3431 ctx
->cancellable
= true;
3436 error_at (gimple_location (stmt
), "invalid arguments");
3441 error_at (gimple_location (stmt
),
3442 "%<%s %s%> construct not closely nested inside of %qs",
3443 construct
, kind
, bad
);
3448 case GIMPLE_OMP_SECTIONS
:
3449 case GIMPLE_OMP_SINGLE
:
3450 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3451 switch (gimple_code (ctx
->stmt
))
3453 case GIMPLE_OMP_FOR
:
3454 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3455 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3458 case GIMPLE_OMP_SECTIONS
:
3459 case GIMPLE_OMP_SINGLE
:
3460 case GIMPLE_OMP_ORDERED
:
3461 case GIMPLE_OMP_MASTER
:
3462 case GIMPLE_OMP_MASKED
:
3463 case GIMPLE_OMP_TASK
:
3464 case GIMPLE_OMP_CRITICAL
:
3465 if (is_gimple_call (stmt
))
3467 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3468 != BUILT_IN_GOMP_BARRIER
)
3470 error_at (gimple_location (stmt
),
3471 "barrier region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, "
3473 "%<ordered%>, %<master%>, %<masked%>, explicit "
3474 "%<task%> or %<taskloop%> region");
3477 error_at (gimple_location (stmt
),
3478 "work-sharing region may not be closely nested inside "
3479 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3480 "%<master%>, %<masked%>, explicit %<task%> or "
3481 "%<taskloop%> region");
3483 case GIMPLE_OMP_PARALLEL
:
3484 case GIMPLE_OMP_TEAMS
:
3486 case GIMPLE_OMP_TARGET
:
3487 if (gimple_omp_target_kind (ctx
->stmt
)
3488 == GF_OMP_TARGET_KIND_REGION
)
3495 case GIMPLE_OMP_MASTER
:
3496 case GIMPLE_OMP_MASKED
:
3497 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3498 switch (gimple_code (ctx
->stmt
))
3500 case GIMPLE_OMP_FOR
:
3501 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3502 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3505 case GIMPLE_OMP_SECTIONS
:
3506 case GIMPLE_OMP_SINGLE
:
3507 case GIMPLE_OMP_TASK
:
3508 error_at (gimple_location (stmt
),
3509 "%qs region may not be closely nested inside "
3510 "of work-sharing, %<loop%>, explicit %<task%> or "
3511 "%<taskloop%> region",
3512 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3513 ? "master" : "masked");
3515 case GIMPLE_OMP_PARALLEL
:
3516 case GIMPLE_OMP_TEAMS
:
3518 case GIMPLE_OMP_TARGET
:
3519 if (gimple_omp_target_kind (ctx
->stmt
)
3520 == GF_OMP_TARGET_KIND_REGION
)
3527 case GIMPLE_OMP_SCOPE
:
3528 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3529 switch (gimple_code (ctx
->stmt
))
3531 case GIMPLE_OMP_FOR
:
3532 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3533 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3536 case GIMPLE_OMP_SECTIONS
:
3537 case GIMPLE_OMP_SINGLE
:
3538 case GIMPLE_OMP_TASK
:
3539 case GIMPLE_OMP_CRITICAL
:
3540 case GIMPLE_OMP_ORDERED
:
3541 case GIMPLE_OMP_MASTER
:
3542 case GIMPLE_OMP_MASKED
:
3543 error_at (gimple_location (stmt
),
3544 "%<scope%> region may not be closely nested inside "
3545 "of work-sharing, %<loop%>, explicit %<task%>, "
3546 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3547 "or %<masked%> region");
3549 case GIMPLE_OMP_PARALLEL
:
3550 case GIMPLE_OMP_TEAMS
:
3552 case GIMPLE_OMP_TARGET
:
3553 if (gimple_omp_target_kind (ctx
->stmt
)
3554 == GF_OMP_TARGET_KIND_REGION
)
3561 case GIMPLE_OMP_TASK
:
3562 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3563 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3564 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3565 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3567 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3568 error_at (OMP_CLAUSE_LOCATION (c
),
3569 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3570 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3574 case GIMPLE_OMP_ORDERED
:
3575 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3576 c
; c
= OMP_CLAUSE_CHAIN (c
))
3578 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3580 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3581 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3584 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3585 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3586 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3589 /* Look for containing ordered(N) loop. */
3591 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3593 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3594 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3596 error_at (OMP_CLAUSE_LOCATION (c
),
3597 "%<ordered%> construct with %<depend%> clause "
3598 "must be closely nested inside an %<ordered%> "
3602 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3604 error_at (OMP_CLAUSE_LOCATION (c
),
3605 "%<ordered%> construct with %<depend%> clause "
3606 "must be closely nested inside a loop with "
3607 "%<ordered%> clause with a parameter");
3613 error_at (OMP_CLAUSE_LOCATION (c
),
3614 "invalid depend kind in omp %<ordered%> %<depend%>");
3618 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3619 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3621 /* ordered simd must be closely nested inside of simd region,
3622 and simd region must not encounter constructs other than
3623 ordered simd, therefore ordered simd may be either orphaned,
3624 or ctx->stmt must be simd. The latter case is handled already
3628 error_at (gimple_location (stmt
),
3629 "%<ordered%> %<simd%> must be closely nested inside "
3634 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3635 switch (gimple_code (ctx
->stmt
))
3637 case GIMPLE_OMP_CRITICAL
:
3638 case GIMPLE_OMP_TASK
:
3639 case GIMPLE_OMP_ORDERED
:
3640 ordered_in_taskloop
:
3641 error_at (gimple_location (stmt
),
3642 "%<ordered%> region may not be closely nested inside "
3643 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3644 "%<taskloop%> region");
3646 case GIMPLE_OMP_FOR
:
3647 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3648 goto ordered_in_taskloop
;
3650 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3651 OMP_CLAUSE_ORDERED
);
3654 error_at (gimple_location (stmt
),
3655 "%<ordered%> region must be closely nested inside "
3656 "a loop region with an %<ordered%> clause");
3659 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3660 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3662 error_at (gimple_location (stmt
),
3663 "%<ordered%> region without %<depend%> clause may "
3664 "not be closely nested inside a loop region with "
3665 "an %<ordered%> clause with a parameter");
3669 case GIMPLE_OMP_TARGET
:
3670 if (gimple_omp_target_kind (ctx
->stmt
)
3671 != GF_OMP_TARGET_KIND_REGION
)
3674 case GIMPLE_OMP_PARALLEL
:
3675 case GIMPLE_OMP_TEAMS
:
3676 error_at (gimple_location (stmt
),
3677 "%<ordered%> region must be closely nested inside "
3678 "a loop region with an %<ordered%> clause");
3684 case GIMPLE_OMP_CRITICAL
:
3687 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3688 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3689 if (gomp_critical
*other_crit
3690 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3691 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3693 error_at (gimple_location (stmt
),
3694 "%<critical%> region may not be nested inside "
3695 "a %<critical%> region with the same name");
3700 case GIMPLE_OMP_TEAMS
:
3703 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3704 || (gimple_omp_target_kind (ctx
->stmt
)
3705 != GF_OMP_TARGET_KIND_REGION
))
3707 /* Teams construct can appear either strictly nested inside of
3708 target construct with no intervening stmts, or can be encountered
3709 only by initial task (so must not appear inside any OpenMP
3711 error_at (gimple_location (stmt
),
3712 "%<teams%> construct must be closely nested inside of "
3713 "%<target%> construct or not nested in any OpenMP "
3718 case GIMPLE_OMP_TARGET
:
3719 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3720 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3721 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3722 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3724 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3725 error_at (OMP_CLAUSE_LOCATION (c
),
3726 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3727 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3730 if (is_gimple_omp_offloaded (stmt
)
3731 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3733 error_at (gimple_location (stmt
),
3734 "OpenACC region inside of OpenACC routine, nested "
3735 "parallelism not supported yet");
3738 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3740 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3742 if (is_gimple_omp (stmt
)
3743 && is_gimple_omp_oacc (stmt
)
3744 && is_gimple_omp (ctx
->stmt
))
3746 error_at (gimple_location (stmt
),
3747 "OpenACC construct inside of non-OpenACC region");
3753 const char *stmt_name
, *ctx_stmt_name
;
3754 switch (gimple_omp_target_kind (stmt
))
3756 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3757 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3758 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3759 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3760 stmt_name
= "target enter data"; break;
3761 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3762 stmt_name
= "target exit data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3764 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3765 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3766 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3767 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3768 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3769 stmt_name
= "enter data"; break;
3770 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3771 stmt_name
= "exit data"; break;
3772 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3773 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3775 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3776 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3777 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3778 /* OpenACC 'kernels' decomposed parts. */
3779 stmt_name
= "kernels"; break;
3780 default: gcc_unreachable ();
3782 switch (gimple_omp_target_kind (ctx
->stmt
))
3784 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3785 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3787 ctx_stmt_name
= "parallel"; break;
3788 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3789 ctx_stmt_name
= "kernels"; break;
3790 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3791 ctx_stmt_name
= "serial"; break;
3792 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3793 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3794 ctx_stmt_name
= "host_data"; break;
3795 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3796 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3797 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3798 /* OpenACC 'kernels' decomposed parts. */
3799 ctx_stmt_name
= "kernels"; break;
3800 default: gcc_unreachable ();
3803 /* OpenACC/OpenMP mismatch? */
3804 if (is_gimple_omp_oacc (stmt
)
3805 != is_gimple_omp_oacc (ctx
->stmt
))
3807 error_at (gimple_location (stmt
),
3808 "%s %qs construct inside of %s %qs region",
3809 (is_gimple_omp_oacc (stmt
)
3810 ? "OpenACC" : "OpenMP"), stmt_name
,
3811 (is_gimple_omp_oacc (ctx
->stmt
)
3812 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3815 if (is_gimple_omp_offloaded (ctx
->stmt
))
3817 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3818 if (is_gimple_omp_oacc (ctx
->stmt
))
3820 error_at (gimple_location (stmt
),
3821 "%qs construct inside of %qs region",
3822 stmt_name
, ctx_stmt_name
);
3827 warning_at (gimple_location (stmt
), 0,
3828 "%qs construct inside of %qs region",
3829 stmt_name
, ctx_stmt_name
);
3841 /* Helper function scan_omp.
3843 Callback for walk_tree or operators in walk_gimple_stmt used to
3844 scan for OMP directives in TP. */
3847 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3849 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3850 omp_context
*ctx
= (omp_context
*) wi
->info
;
3853 switch (TREE_CODE (t
))
3861 tree repl
= remap_decl (t
, &ctx
->cb
);
3862 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3868 if (ctx
&& TYPE_P (t
))
3869 *tp
= remap_type (t
, &ctx
->cb
);
3870 else if (!DECL_P (t
))
3875 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3876 if (tem
!= TREE_TYPE (t
))
3878 if (TREE_CODE (t
) == INTEGER_CST
)
3879 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3881 TREE_TYPE (t
) = tem
;
3891 /* Return true if FNDECL is a setjmp or a longjmp. */
3894 setjmp_or_longjmp_p (const_tree fndecl
)
3896 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3897 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3900 tree declname
= DECL_NAME (fndecl
);
3902 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3903 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3904 || !TREE_PUBLIC (fndecl
))
3907 const char *name
= IDENTIFIER_POINTER (declname
);
3908 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3911 /* Return true if FNDECL is an omp_* runtime API call. */
3914 omp_runtime_api_call (const_tree fndecl
)
3916 tree declname
= DECL_NAME (fndecl
);
3918 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3919 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3920 || !TREE_PUBLIC (fndecl
))
3923 const char *name
= IDENTIFIER_POINTER (declname
);
3924 if (!startswith (name
, "omp_"))
3927 static const char *omp_runtime_apis
[] =
3929 /* This array has 3 sections. First omp_* calls that don't
3930 have any suffixes. */
3938 "target_associate_ptr",
3939 "target_disassociate_ptr",
3941 "target_is_present",
3943 "target_memcpy_rect",
3945 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
3946 DECL_NAME is always omp_* without tailing underscore. */
3948 "destroy_allocator",
3950 "destroy_nest_lock",
3954 "get_affinity_format",
3956 "get_default_allocator",
3957 "get_default_device",
3960 "get_initial_device",
3962 "get_max_active_levels",
3963 "get_max_task_priority",
3972 "get_partition_num_places",
3975 "get_supported_active_levels",
3977 "get_teams_thread_limit",
3986 "is_initial_device",
3988 "pause_resource_all",
3989 "set_affinity_format",
3990 "set_default_allocator",
3998 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
3999 as DECL_NAME only omp_* and omp_*_8 appear. */
4001 "get_ancestor_thread_num",
4003 "get_partition_place_nums",
4004 "get_place_num_procs",
4005 "get_place_proc_ids",
4008 "set_default_device",
4010 "set_max_active_levels",
4015 "set_teams_thread_limit"
4019 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4021 if (omp_runtime_apis
[i
] == NULL
)
4026 size_t len
= strlen (omp_runtime_apis
[i
]);
4027 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4028 && (name
[4 + len
] == '\0'
4029 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4035 /* Helper function for scan_omp.
4037 Callback for walk_gimple_stmt used to scan for OMP directives in
4038 the current statement in GSI. */
4041 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4042 struct walk_stmt_info
*wi
)
4044 gimple
*stmt
= gsi_stmt (*gsi
);
4045 omp_context
*ctx
= (omp_context
*) wi
->info
;
4047 if (gimple_has_location (stmt
))
4048 input_location
= gimple_location (stmt
);
4050 /* Check the nesting restrictions. */
4051 bool remove
= false;
4052 if (is_gimple_omp (stmt
))
4053 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4054 else if (is_gimple_call (stmt
))
4056 tree fndecl
= gimple_call_fndecl (stmt
);
4060 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4061 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4062 && setjmp_or_longjmp_p (fndecl
)
4066 error_at (gimple_location (stmt
),
4067 "setjmp/longjmp inside %<simd%> construct");
4069 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4070 switch (DECL_FUNCTION_CODE (fndecl
))
4072 case BUILT_IN_GOMP_BARRIER
:
4073 case BUILT_IN_GOMP_CANCEL
:
4074 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4075 case BUILT_IN_GOMP_TASKYIELD
:
4076 case BUILT_IN_GOMP_TASKWAIT
:
4077 case BUILT_IN_GOMP_TASKGROUP_START
:
4078 case BUILT_IN_GOMP_TASKGROUP_END
:
4079 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4086 omp_context
*octx
= ctx
;
4087 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4089 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4092 error_at (gimple_location (stmt
),
4093 "OpenMP runtime API call %qD in a region with "
4094 "%<order(concurrent)%> clause", fndecl
);
4096 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4097 && omp_runtime_api_call (fndecl
)
4098 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4099 != strlen ("omp_get_num_teams"))
4100 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4101 "omp_get_num_teams") != 0)
4102 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4103 != strlen ("omp_get_team_num"))
4104 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4105 "omp_get_team_num") != 0))
4108 error_at (gimple_location (stmt
),
4109 "OpenMP runtime API call %qD strictly nested in a "
4110 "%<teams%> region", fndecl
);
4112 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4113 && (gimple_omp_target_kind (ctx
->stmt
)
4114 == GF_OMP_TARGET_KIND_REGION
)
4115 && omp_runtime_api_call (fndecl
))
4117 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4118 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4119 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4120 error_at (gimple_location (stmt
),
4121 "OpenMP runtime API call %qD in a region with "
4122 "%<device(ancestor)%> clause", fndecl
);
4129 stmt
= gimple_build_nop ();
4130 gsi_replace (gsi
, stmt
, false);
4133 *handled_ops_p
= true;
4135 switch (gimple_code (stmt
))
4137 case GIMPLE_OMP_PARALLEL
:
4138 taskreg_nesting_level
++;
4139 scan_omp_parallel (gsi
, ctx
);
4140 taskreg_nesting_level
--;
4143 case GIMPLE_OMP_TASK
:
4144 taskreg_nesting_level
++;
4145 scan_omp_task (gsi
, ctx
);
4146 taskreg_nesting_level
--;
4149 case GIMPLE_OMP_FOR
:
4150 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4151 == GF_OMP_FOR_KIND_SIMD
)
4152 && gimple_omp_for_combined_into_p (stmt
)
4153 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4155 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4156 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4157 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4159 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4163 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4164 == GF_OMP_FOR_KIND_SIMD
)
4165 && omp_maybe_offloaded_ctx (ctx
)
4166 && omp_max_simt_vf ()
4167 && gimple_omp_for_collapse (stmt
) == 1)
4168 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4170 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4173 case GIMPLE_OMP_SCOPE
:
4174 ctx
= new_omp_context (stmt
, ctx
);
4175 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4176 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4179 case GIMPLE_OMP_SECTIONS
:
4180 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4183 case GIMPLE_OMP_SINGLE
:
4184 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4187 case GIMPLE_OMP_SCAN
:
4188 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4190 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4191 ctx
->scan_inclusive
= true;
4192 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4193 ctx
->scan_exclusive
= true;
4196 case GIMPLE_OMP_SECTION
:
4197 case GIMPLE_OMP_MASTER
:
4198 case GIMPLE_OMP_ORDERED
:
4199 case GIMPLE_OMP_CRITICAL
:
4200 ctx
= new_omp_context (stmt
, ctx
);
4201 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4204 case GIMPLE_OMP_MASKED
:
4205 ctx
= new_omp_context (stmt
, ctx
);
4206 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4207 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4210 case GIMPLE_OMP_TASKGROUP
:
4211 ctx
= new_omp_context (stmt
, ctx
);
4212 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4213 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4216 case GIMPLE_OMP_TARGET
:
4217 if (is_gimple_omp_offloaded (stmt
))
4219 taskreg_nesting_level
++;
4220 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4221 taskreg_nesting_level
--;
4224 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4227 case GIMPLE_OMP_TEAMS
:
4228 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4230 taskreg_nesting_level
++;
4231 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4232 taskreg_nesting_level
--;
4235 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4242 *handled_ops_p
= false;
4244 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4246 var
= DECL_CHAIN (var
))
4247 insert_decl_map (&ctx
->cb
, var
, var
);
4251 *handled_ops_p
= false;
4259 /* Scan all the statements starting at the current statement. CTX
4260 contains context information about the OMP directives and
4261 clauses found during the scan. */
4264 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4266 location_t saved_location
;
4267 struct walk_stmt_info wi
;
4269 memset (&wi
, 0, sizeof (wi
));
4271 wi
.want_locations
= true;
4273 saved_location
= input_location
;
4274 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4275 input_location
= saved_location
;
4278 /* Re-gimplification and code generation routines. */
4280 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4281 of BIND if in a method. */
4284 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4286 if (DECL_ARGUMENTS (current_function_decl
)
4287 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4288 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4291 tree vars
= gimple_bind_vars (bind
);
4292 for (tree
*pvar
= &vars
; *pvar
; )
4293 if (omp_member_access_dummy_var (*pvar
))
4294 *pvar
= DECL_CHAIN (*pvar
);
4296 pvar
= &DECL_CHAIN (*pvar
);
4297 gimple_bind_set_vars (bind
, vars
);
4301 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4302 block and its subblocks. */
4305 remove_member_access_dummy_vars (tree block
)
4307 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4308 if (omp_member_access_dummy_var (*pvar
))
4309 *pvar
= DECL_CHAIN (*pvar
);
4311 pvar
= &DECL_CHAIN (*pvar
);
4313 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4314 remove_member_access_dummy_vars (block
);
4317 /* If a context was created for STMT when it was scanned, return it. */
4319 static omp_context
*
4320 maybe_lookup_ctx (gimple
*stmt
)
4323 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4324 return n
? (omp_context
*) n
->value
: NULL
;
4328 /* Find the mapping for DECL in CTX or the immediately enclosing
4329 context that has a mapping for DECL.
4331 If CTX is a nested parallel directive, we may have to use the decl
4332 mappings created in CTX's parent context. Suppose that we have the
4333 following parallel nesting (variable UIDs showed for clarity):
4336 #omp parallel shared(iD.1562) -> outer parallel
4337 iD.1562 = iD.1562 + 1;
4339 #omp parallel shared (iD.1562) -> inner parallel
4340 iD.1562 = iD.1562 - 1;
4342 Each parallel structure will create a distinct .omp_data_s structure
4343 for copying iD.1562 in/out of the directive:
4345 outer parallel .omp_data_s.1.i -> iD.1562
4346 inner parallel .omp_data_s.2.i -> iD.1562
4348 A shared variable mapping will produce a copy-out operation before
4349 the parallel directive and a copy-in operation after it. So, in
4350 this case we would have:
4353 .omp_data_o.1.i = iD.1562;
4354 #omp parallel shared(iD.1562) -> outer parallel
4355 .omp_data_i.1 = &.omp_data_o.1
4356 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4358 .omp_data_o.2.i = iD.1562; -> **
4359 #omp parallel shared(iD.1562) -> inner parallel
4360 .omp_data_i.2 = &.omp_data_o.2
4361 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4364 ** This is a problem. The symbol iD.1562 cannot be referenced
4365 inside the body of the outer parallel region. But since we are
4366 emitting this copy operation while expanding the inner parallel
4367 directive, we need to access the CTX structure of the outer
4368 parallel directive to get the correct mapping:
4370 .omp_data_o.2.i = .omp_data_i.1->i
4372 Since there may be other workshare or parallel directives enclosing
4373 the parallel directive, it may be necessary to walk up the context
4374 parent chain. This is not a problem in general because nested
4375 parallelism happens only rarely. */
4378 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4383 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4384 t
= maybe_lookup_decl (decl
, up
);
4386 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4388 return t
? t
: decl
;
4392 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4393 in outer contexts. */
4396 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4401 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4402 t
= maybe_lookup_decl (decl
, up
);
4404 return t
? t
: decl
;
4408 /* Construct the initialization value for reduction operation OP. */
4411 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4420 case TRUTH_ORIF_EXPR
:
4421 case TRUTH_XOR_EXPR
:
4423 return build_zero_cst (type
);
4426 case TRUTH_AND_EXPR
:
4427 case TRUTH_ANDIF_EXPR
:
4429 return fold_convert_loc (loc
, type
, integer_one_node
);
4432 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4435 if (SCALAR_FLOAT_TYPE_P (type
))
4437 REAL_VALUE_TYPE max
, min
;
4438 if (HONOR_INFINITIES (type
))
4441 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4444 real_maxval (&min
, 1, TYPE_MODE (type
));
4445 return build_real (type
, min
);
4447 else if (POINTER_TYPE_P (type
))
4450 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4451 return wide_int_to_tree (type
, min
);
4455 gcc_assert (INTEGRAL_TYPE_P (type
));
4456 return TYPE_MIN_VALUE (type
);
4460 if (SCALAR_FLOAT_TYPE_P (type
))
4462 REAL_VALUE_TYPE max
;
4463 if (HONOR_INFINITIES (type
))
4466 real_maxval (&max
, 0, TYPE_MODE (type
));
4467 return build_real (type
, max
);
4469 else if (POINTER_TYPE_P (type
))
4472 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4473 return wide_int_to_tree (type
, max
);
4477 gcc_assert (INTEGRAL_TYPE_P (type
));
4478 return TYPE_MAX_VALUE (type
);
4486 /* Construct the initialization value for reduction CLAUSE. */
4489 omp_reduction_init (tree clause
, tree type
)
4491 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4492 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4495 /* Return alignment to be assumed for var in CLAUSE, which should be
4496 OMP_CLAUSE_ALIGNED. */
4499 omp_clause_aligned_alignment (tree clause
)
4501 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4502 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4504 /* Otherwise return implementation defined alignment. */
4505 unsigned int al
= 1;
4506 opt_scalar_mode mode_iter
;
4507 auto_vector_modes modes
;
4508 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4509 static enum mode_class classes
[]
4510 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4511 for (int i
= 0; i
< 4; i
+= 2)
4512 /* The for loop above dictates that we only walk through scalar classes. */
4513 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4515 scalar_mode mode
= mode_iter
.require ();
4516 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4517 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4519 machine_mode alt_vmode
;
4520 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4521 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4522 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4525 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4526 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4528 type
= build_vector_type_for_mode (type
, vmode
);
4529 if (TYPE_MODE (type
) != vmode
)
4531 if (TYPE_ALIGN_UNIT (type
) > al
)
4532 al
= TYPE_ALIGN_UNIT (type
);
4534 return build_int_cst (integer_type_node
, al
);
4538 /* This structure is part of the interface between lower_rec_simd_input_clauses
4539 and lower_rec_input_clauses. */
4541 class omplow_simd_context
{
4543 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4547 vec
<tree
, va_heap
> simt_eargs
;
4548 gimple_seq simt_dlist
;
4549 poly_uint64_pod max_vf
;
4553 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4557 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4558 omplow_simd_context
*sctx
, tree
&ivar
,
4559 tree
&lvar
, tree
*rvar
= NULL
,
4562 if (known_eq (sctx
->max_vf
, 0U))
4564 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4565 if (maybe_gt (sctx
->max_vf
, 1U))
4567 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4568 OMP_CLAUSE_SAFELEN
);
4571 poly_uint64 safe_len
;
4572 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4573 || maybe_lt (safe_len
, 1U))
4576 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4579 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4581 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4582 c
= OMP_CLAUSE_CHAIN (c
))
4584 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4587 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4589 /* UDR reductions are not supported yet for SIMT, disable
4595 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4596 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4598 /* Doing boolean operations on non-integral types is
4599 for conformance only, it's not worth supporting this
4606 if (maybe_gt (sctx
->max_vf
, 1U))
4608 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4609 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4612 if (known_eq (sctx
->max_vf
, 1U))
4617 if (is_gimple_reg (new_var
))
4619 ivar
= lvar
= new_var
;
4622 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4623 ivar
= lvar
= create_tmp_var (type
);
4624 TREE_ADDRESSABLE (ivar
) = 1;
4625 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4626 NULL
, DECL_ATTRIBUTES (ivar
));
4627 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4628 tree clobber
= build_clobber (type
);
4629 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4630 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4634 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4635 tree avar
= create_tmp_var_raw (atype
);
4636 if (TREE_ADDRESSABLE (new_var
))
4637 TREE_ADDRESSABLE (avar
) = 1;
4638 DECL_ATTRIBUTES (avar
)
4639 = tree_cons (get_identifier ("omp simd array"), NULL
,
4640 DECL_ATTRIBUTES (avar
));
4641 gimple_add_tmp_var (avar
);
4643 if (rvar
&& !ctx
->for_simd_scan_phase
)
4645 /* For inscan reductions, create another array temporary,
4646 which will hold the reduced value. */
4647 iavar
= create_tmp_var_raw (atype
);
4648 if (TREE_ADDRESSABLE (new_var
))
4649 TREE_ADDRESSABLE (iavar
) = 1;
4650 DECL_ATTRIBUTES (iavar
)
4651 = tree_cons (get_identifier ("omp simd array"), NULL
,
4652 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4653 DECL_ATTRIBUTES (iavar
)));
4654 gimple_add_tmp_var (iavar
);
4655 ctx
->cb
.decl_map
->put (avar
, iavar
);
4656 if (sctx
->lastlane
== NULL_TREE
)
4657 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4658 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4659 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4660 TREE_THIS_NOTRAP (*rvar
) = 1;
4662 if (ctx
->scan_exclusive
)
4664 /* And for exclusive scan yet another one, which will
4665 hold the value during the scan phase. */
4666 tree savar
= create_tmp_var_raw (atype
);
4667 if (TREE_ADDRESSABLE (new_var
))
4668 TREE_ADDRESSABLE (savar
) = 1;
4669 DECL_ATTRIBUTES (savar
)
4670 = tree_cons (get_identifier ("omp simd array"), NULL
,
4671 tree_cons (get_identifier ("omp simd inscan "
4673 DECL_ATTRIBUTES (savar
)));
4674 gimple_add_tmp_var (savar
);
4675 ctx
->cb
.decl_map
->put (iavar
, savar
);
4676 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4677 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4678 TREE_THIS_NOTRAP (*rvar2
) = 1;
4681 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4682 NULL_TREE
, NULL_TREE
);
4683 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4684 NULL_TREE
, NULL_TREE
);
4685 TREE_THIS_NOTRAP (ivar
) = 1;
4686 TREE_THIS_NOTRAP (lvar
) = 1;
4688 if (DECL_P (new_var
))
4690 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4691 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4696 /* Helper function of lower_rec_input_clauses. For a reference
4697 in simd reduction, add an underlying variable it will reference. */
4700 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4702 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4703 if (TREE_CONSTANT (z
))
4705 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4706 get_name (new_vard
));
4707 gimple_add_tmp_var (z
);
4708 TREE_ADDRESSABLE (z
) = 1;
4709 z
= build_fold_addr_expr_loc (loc
, z
);
4710 gimplify_assign (new_vard
, z
, ilist
);
4714 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4715 code to emit (type) (tskred_temp[idx]). */
4718 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4721 unsigned HOST_WIDE_INT sz
4722 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4723 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4724 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4726 tree v
= create_tmp_var (pointer_sized_int_node
);
4727 gimple
*g
= gimple_build_assign (v
, r
);
4728 gimple_seq_add_stmt (ilist
, g
);
4729 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4731 v
= create_tmp_var (type
);
4732 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4733 gimple_seq_add_stmt (ilist
, g
);
4738 /* Lower early initialization of privatized variable NEW_VAR
4739 if it needs an allocator (has allocate clause). */
4742 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4743 tree
&allocate_ptr
, gimple_seq
*ilist
,
4744 omp_context
*ctx
, bool is_ref
, tree size
)
4748 gcc_assert (allocate_ptr
== NULL_TREE
);
4749 if (ctx
->allocate_map
4750 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4751 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4752 allocator
= *allocatorp
;
4753 if (allocator
== NULL_TREE
)
4755 if (!is_ref
&& omp_privatize_by_reference (var
))
4757 allocator
= NULL_TREE
;
4761 unsigned HOST_WIDE_INT ialign
= 0;
4762 if (TREE_CODE (allocator
) == TREE_LIST
)
4764 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4765 allocator
= TREE_PURPOSE (allocator
);
4767 if (TREE_CODE (allocator
) != INTEGER_CST
)
4768 allocator
= build_outer_var_ref (allocator
, ctx
);
4769 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4770 if (TREE_CODE (allocator
) != INTEGER_CST
)
4772 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4773 gimplify_assign (var
, allocator
, ilist
);
4777 tree ptr_type
, align
, sz
= size
;
4778 if (TYPE_P (new_var
))
4780 ptr_type
= build_pointer_type (new_var
);
4781 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4785 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4786 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4790 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4791 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4792 if (sz
== NULL_TREE
)
4793 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4795 align
= build_int_cst (size_type_node
, ialign
);
4796 if (TREE_CODE (sz
) != INTEGER_CST
)
4798 tree szvar
= create_tmp_var (size_type_node
);
4799 gimplify_assign (szvar
, sz
, ilist
);
4802 allocate_ptr
= create_tmp_var (ptr_type
);
4803 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4804 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4805 gimple_call_set_lhs (g
, allocate_ptr
);
4806 gimple_seq_add_stmt (ilist
, g
);
4809 tree x
= build_simple_mem_ref (allocate_ptr
);
4810 TREE_THIS_NOTRAP (x
) = 1;
4811 SET_DECL_VALUE_EXPR (new_var
, x
);
4812 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4817 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4818 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4819 private variables. Initialization statements go in ILIST, while calls
4820 to destructors go in DLIST. */
4823 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4824 omp_context
*ctx
, struct omp_for_data
*fd
)
4826 tree c
, copyin_seq
, x
, ptr
;
4827 bool copyin_by_ref
= false;
4828 bool lastprivate_firstprivate
= false;
4829 bool reduction_omp_orig_ref
= false;
4831 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4832 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4833 omplow_simd_context sctx
= omplow_simd_context ();
4834 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4835 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4836 gimple_seq llist
[4] = { };
4837 tree nonconst_simd_if
= NULL_TREE
;
4840 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4842 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4843 with data sharing clauses referencing variable sized vars. That
4844 is unnecessarily hard to support and very unlikely to result in
4845 vectorized code anyway. */
4847 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4848 switch (OMP_CLAUSE_CODE (c
))
4850 case OMP_CLAUSE_LINEAR
:
4851 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4854 case OMP_CLAUSE_PRIVATE
:
4855 case OMP_CLAUSE_FIRSTPRIVATE
:
4856 case OMP_CLAUSE_LASTPRIVATE
:
4857 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4859 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4861 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4862 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4866 case OMP_CLAUSE_REDUCTION
:
4867 case OMP_CLAUSE_IN_REDUCTION
:
4868 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4869 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4871 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4873 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4874 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4879 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4881 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4882 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4884 case OMP_CLAUSE_SIMDLEN
:
4885 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4888 case OMP_CLAUSE__CONDTEMP_
:
4889 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4897 /* Add a placeholder for simduid. */
4898 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4899 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4901 unsigned task_reduction_cnt
= 0;
4902 unsigned task_reduction_cntorig
= 0;
4903 unsigned task_reduction_cnt_full
= 0;
4904 unsigned task_reduction_cntorig_full
= 0;
4905 unsigned task_reduction_other_cnt
= 0;
4906 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4907 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4908 /* Do all the fixed sized types in the first pass, and the variable sized
4909 types in the second pass. This makes sure that the scalar arguments to
4910 the variable sized types are processed before we use them in the
4911 variable sized operations. For task reductions we use 4 passes, in the
4912 first two we ignore them, in the third one gather arguments for
4913 GOMP_task_reduction_remap call and in the last pass actually handle
4914 the task reductions. */
4915 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4918 if (pass
== 2 && task_reduction_cnt
)
4921 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4922 + task_reduction_cntorig
);
4923 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4924 gimple_add_tmp_var (tskred_avar
);
4925 TREE_ADDRESSABLE (tskred_avar
) = 1;
4926 task_reduction_cnt_full
= task_reduction_cnt
;
4927 task_reduction_cntorig_full
= task_reduction_cntorig
;
4929 else if (pass
== 3 && task_reduction_cnt
)
4931 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4933 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4934 size_int (task_reduction_cntorig
),
4935 build_fold_addr_expr (tskred_avar
));
4936 gimple_seq_add_stmt (ilist
, g
);
4938 if (pass
== 3 && task_reduction_other_cnt
)
4940 /* For reduction clauses, build
4941 tskred_base = (void *) tskred_temp[2]
4942 + omp_get_thread_num () * tskred_temp[1]
4943 or if tskred_temp[1] is known to be constant, that constant
4944 directly. This is the start of the private reduction copy block
4945 for the current thread. */
4946 tree v
= create_tmp_var (integer_type_node
);
4947 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4948 gimple
*g
= gimple_build_call (x
, 0);
4949 gimple_call_set_lhs (g
, v
);
4950 gimple_seq_add_stmt (ilist
, g
);
4951 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4952 tskred_temp
= OMP_CLAUSE_DECL (c
);
4953 if (is_taskreg_ctx (ctx
))
4954 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4955 tree v2
= create_tmp_var (sizetype
);
4956 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4957 gimple_seq_add_stmt (ilist
, g
);
4958 if (ctx
->task_reductions
[0])
4959 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4961 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4962 tree v3
= create_tmp_var (sizetype
);
4963 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4964 gimple_seq_add_stmt (ilist
, g
);
4965 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4966 tskred_base
= create_tmp_var (ptr_type_node
);
4967 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4968 gimple_seq_add_stmt (ilist
, g
);
4970 task_reduction_cnt
= 0;
4971 task_reduction_cntorig
= 0;
4972 task_reduction_other_cnt
= 0;
4973 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4975 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4978 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4979 bool task_reduction_p
= false;
4980 bool task_reduction_needs_orig_p
= false;
4981 tree cond
= NULL_TREE
;
4982 tree allocator
, allocate_ptr
;
4986 case OMP_CLAUSE_PRIVATE
:
4987 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4990 case OMP_CLAUSE_SHARED
:
4991 /* Ignore shared directives in teams construct inside
4992 of target construct. */
4993 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4994 && !is_host_teams_ctx (ctx
))
4996 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4998 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4999 || is_global_var (OMP_CLAUSE_DECL (c
)));
5002 case OMP_CLAUSE_FIRSTPRIVATE
:
5003 case OMP_CLAUSE_COPYIN
:
5005 case OMP_CLAUSE_LINEAR
:
5006 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5007 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5008 lastprivate_firstprivate
= true;
5010 case OMP_CLAUSE_REDUCTION
:
5011 case OMP_CLAUSE_IN_REDUCTION
:
5012 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5013 || is_task_ctx (ctx
)
5014 || OMP_CLAUSE_REDUCTION_TASK (c
))
5016 task_reduction_p
= true;
5017 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5019 task_reduction_other_cnt
++;
5024 task_reduction_cnt
++;
5025 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5027 var
= OMP_CLAUSE_DECL (c
);
5028 /* If var is a global variable that isn't privatized
5029 in outer contexts, we don't need to look up the
5030 original address, it is always the address of the
5031 global variable itself. */
5033 || omp_privatize_by_reference (var
)
5035 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5037 task_reduction_needs_orig_p
= true;
5038 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5039 task_reduction_cntorig
++;
5043 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5044 reduction_omp_orig_ref
= true;
5046 case OMP_CLAUSE__REDUCTEMP_
:
5047 if (!is_taskreg_ctx (ctx
))
5050 case OMP_CLAUSE__LOOPTEMP_
:
5051 /* Handle _looptemp_/_reductemp_ clauses only on
5056 case OMP_CLAUSE_LASTPRIVATE
:
5057 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5059 lastprivate_firstprivate
= true;
5060 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5063 /* Even without corresponding firstprivate, if
5064 decl is Fortran allocatable, it needs outer var
5067 && lang_hooks
.decls
.omp_private_outer_ref
5068 (OMP_CLAUSE_DECL (c
)))
5069 lastprivate_firstprivate
= true;
5071 case OMP_CLAUSE_ALIGNED
:
5074 var
= OMP_CLAUSE_DECL (c
);
5075 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5076 && !is_global_var (var
))
5078 new_var
= maybe_lookup_decl (var
, ctx
);
5079 if (new_var
== NULL_TREE
)
5080 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5081 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5082 tree alarg
= omp_clause_aligned_alignment (c
);
5083 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5084 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5085 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5086 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5087 gimplify_and_add (x
, ilist
);
5089 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5090 && is_global_var (var
))
5092 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5093 new_var
= lookup_decl (var
, ctx
);
5094 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5095 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5096 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5097 tree alarg
= omp_clause_aligned_alignment (c
);
5098 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5099 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5100 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5101 x
= create_tmp_var (ptype
);
5102 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5103 gimplify_and_add (t
, ilist
);
5104 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5105 SET_DECL_VALUE_EXPR (new_var
, t
);
5106 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5109 case OMP_CLAUSE__CONDTEMP_
:
5110 if (is_parallel_ctx (ctx
)
5111 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5118 if (task_reduction_p
!= (pass
>= 2))
5121 allocator
= NULL_TREE
;
5122 allocate_ptr
= NULL_TREE
;
5123 new_var
= var
= OMP_CLAUSE_DECL (c
);
5124 if ((c_kind
== OMP_CLAUSE_REDUCTION
5125 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5126 && TREE_CODE (var
) == MEM_REF
)
5128 var
= TREE_OPERAND (var
, 0);
5129 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5130 var
= TREE_OPERAND (var
, 0);
5131 if (TREE_CODE (var
) == INDIRECT_REF
5132 || TREE_CODE (var
) == ADDR_EXPR
)
5133 var
= TREE_OPERAND (var
, 0);
5134 if (is_variable_sized (var
))
5136 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5137 var
= DECL_VALUE_EXPR (var
);
5138 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5139 var
= TREE_OPERAND (var
, 0);
5140 gcc_assert (DECL_P (var
));
5144 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5146 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5147 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5149 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5150 new_var
= lookup_decl (var
, ctx
);
5152 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5157 /* C/C++ array section reductions. */
5158 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5159 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5160 && var
!= OMP_CLAUSE_DECL (c
))
5165 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5166 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5168 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5170 tree b
= TREE_OPERAND (orig_var
, 1);
5171 if (is_omp_target (ctx
->stmt
))
5174 b
= maybe_lookup_decl (b
, ctx
);
5177 b
= TREE_OPERAND (orig_var
, 1);
5178 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5180 if (integer_zerop (bias
))
5184 bias
= fold_convert_loc (clause_loc
,
5185 TREE_TYPE (b
), bias
);
5186 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5187 TREE_TYPE (b
), b
, bias
);
5189 orig_var
= TREE_OPERAND (orig_var
, 0);
5193 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5194 if (is_global_var (out
)
5195 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5196 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5197 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5200 else if (is_omp_target (ctx
->stmt
))
5204 bool by_ref
= use_pointer_for_field (var
, NULL
);
5205 x
= build_receiver_ref (var
, by_ref
, ctx
);
5206 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5207 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5209 x
= build_fold_addr_expr (x
);
5211 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5212 x
= build_simple_mem_ref (x
);
5213 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5215 if (var
== TREE_OPERAND (orig_var
, 0))
5216 x
= build_fold_addr_expr (x
);
5218 bias
= fold_convert (sizetype
, bias
);
5219 x
= fold_convert (ptr_type_node
, x
);
5220 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5221 TREE_TYPE (x
), x
, bias
);
5222 unsigned cnt
= task_reduction_cnt
- 1;
5223 if (!task_reduction_needs_orig_p
)
5224 cnt
+= (task_reduction_cntorig_full
5225 - task_reduction_cntorig
);
5227 cnt
= task_reduction_cntorig
- 1;
5228 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5229 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5230 gimplify_assign (r
, x
, ilist
);
5234 if (TREE_CODE (orig_var
) == INDIRECT_REF
5235 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5236 orig_var
= TREE_OPERAND (orig_var
, 0);
5237 tree d
= OMP_CLAUSE_DECL (c
);
5238 tree type
= TREE_TYPE (d
);
5239 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5240 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5242 const char *name
= get_name (orig_var
);
5243 if (pass
!= 3 && !TREE_CONSTANT (v
))
5246 if (is_omp_target (ctx
->stmt
))
5249 t
= maybe_lookup_decl (v
, ctx
);
5253 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5254 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5255 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5257 build_int_cst (TREE_TYPE (v
), 1));
5258 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5260 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5264 tree xv
= create_tmp_var (ptr_type_node
);
5265 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5267 unsigned cnt
= task_reduction_cnt
- 1;
5268 if (!task_reduction_needs_orig_p
)
5269 cnt
+= (task_reduction_cntorig_full
5270 - task_reduction_cntorig
);
5272 cnt
= task_reduction_cntorig
- 1;
5273 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5274 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5276 gimple
*g
= gimple_build_assign (xv
, x
);
5277 gimple_seq_add_stmt (ilist
, g
);
5281 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5283 if (ctx
->task_reductions
[1 + idx
])
5284 off
= fold_convert (sizetype
,
5285 ctx
->task_reductions
[1 + idx
]);
5287 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5289 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5291 gimple_seq_add_stmt (ilist
, g
);
5293 x
= fold_convert (build_pointer_type (boolean_type_node
),
5295 if (TREE_CONSTANT (v
))
5296 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5297 TYPE_SIZE_UNIT (type
));
5301 if (is_omp_target (ctx
->stmt
))
5304 t
= maybe_lookup_decl (v
, ctx
);
5308 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5309 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5311 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5313 build_int_cst (TREE_TYPE (v
), 1));
5314 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5316 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5317 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5319 cond
= create_tmp_var (TREE_TYPE (x
));
5320 gimplify_assign (cond
, x
, ilist
);
5323 else if (lower_private_allocate (var
, type
, allocator
,
5324 allocate_ptr
, ilist
, ctx
,
5327 ? TYPE_SIZE_UNIT (type
)
5330 else if (TREE_CONSTANT (v
))
5332 x
= create_tmp_var_raw (type
, name
);
5333 gimple_add_tmp_var (x
);
5334 TREE_ADDRESSABLE (x
) = 1;
5335 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5340 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5341 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5342 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5345 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5346 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5347 tree y
= create_tmp_var (ptype
, name
);
5348 gimplify_assign (y
, x
, ilist
);
5352 if (!integer_zerop (bias
))
5354 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5356 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5358 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5359 pointer_sized_int_node
, yb
, bias
);
5360 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5361 yb
= create_tmp_var (ptype
, name
);
5362 gimplify_assign (yb
, x
, ilist
);
5366 d
= TREE_OPERAND (d
, 0);
5367 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5368 d
= TREE_OPERAND (d
, 0);
5369 if (TREE_CODE (d
) == ADDR_EXPR
)
5371 if (orig_var
!= var
)
5373 gcc_assert (is_variable_sized (orig_var
));
5374 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5376 gimplify_assign (new_var
, x
, ilist
);
5377 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5378 tree t
= build_fold_indirect_ref (new_var
);
5379 DECL_IGNORED_P (new_var
) = 0;
5380 TREE_THIS_NOTRAP (t
) = 1;
5381 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5382 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5386 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5387 build_int_cst (ptype
, 0));
5388 SET_DECL_VALUE_EXPR (new_var
, x
);
5389 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5394 gcc_assert (orig_var
== var
);
5395 if (TREE_CODE (d
) == INDIRECT_REF
)
5397 x
= create_tmp_var (ptype
, name
);
5398 TREE_ADDRESSABLE (x
) = 1;
5399 gimplify_assign (x
, yb
, ilist
);
5400 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5402 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5403 gimplify_assign (new_var
, x
, ilist
);
5405 /* GOMP_taskgroup_reduction_register memsets the whole
5406 array to zero. If the initializer is zero, we don't
5407 need to initialize it again, just mark it as ever
5408 used unconditionally, i.e. cond = true. */
5410 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5411 && initializer_zerop (omp_reduction_init (c
,
5414 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5416 gimple_seq_add_stmt (ilist
, g
);
5419 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5423 if (!is_parallel_ctx (ctx
))
5425 tree condv
= create_tmp_var (boolean_type_node
);
5426 g
= gimple_build_assign (condv
,
5427 build_simple_mem_ref (cond
));
5428 gimple_seq_add_stmt (ilist
, g
);
5429 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5430 g
= gimple_build_cond (NE_EXPR
, condv
,
5431 boolean_false_node
, end
, lab1
);
5432 gimple_seq_add_stmt (ilist
, g
);
5433 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5435 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5437 gimple_seq_add_stmt (ilist
, g
);
5440 tree y1
= create_tmp_var (ptype
);
5441 gimplify_assign (y1
, y
, ilist
);
5442 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5443 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5444 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5445 if (task_reduction_needs_orig_p
)
5447 y3
= create_tmp_var (ptype
);
5449 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5450 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5451 size_int (task_reduction_cnt_full
5452 + task_reduction_cntorig
- 1),
5453 NULL_TREE
, NULL_TREE
);
5456 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5457 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5460 gimplify_assign (y3
, ref
, ilist
);
5462 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5466 y2
= create_tmp_var (ptype
);
5467 gimplify_assign (y2
, y
, ilist
);
5469 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5471 tree ref
= build_outer_var_ref (var
, ctx
);
5472 /* For ref build_outer_var_ref already performs this. */
5473 if (TREE_CODE (d
) == INDIRECT_REF
)
5474 gcc_assert (omp_privatize_by_reference (var
));
5475 else if (TREE_CODE (d
) == ADDR_EXPR
)
5476 ref
= build_fold_addr_expr (ref
);
5477 else if (omp_privatize_by_reference (var
))
5478 ref
= build_fold_addr_expr (ref
);
5479 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5480 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5481 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5483 y3
= create_tmp_var (ptype
);
5484 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5488 y4
= create_tmp_var (ptype
);
5489 gimplify_assign (y4
, ref
, dlist
);
5493 tree i
= create_tmp_var (TREE_TYPE (v
));
5494 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5495 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5496 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5499 i2
= create_tmp_var (TREE_TYPE (v
));
5500 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5501 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5502 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5503 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5505 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5507 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5508 tree decl_placeholder
5509 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5510 SET_DECL_VALUE_EXPR (decl_placeholder
,
5511 build_simple_mem_ref (y1
));
5512 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5513 SET_DECL_VALUE_EXPR (placeholder
,
5514 y3
? build_simple_mem_ref (y3
)
5516 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5517 x
= lang_hooks
.decls
.omp_clause_default_ctor
5518 (c
, build_simple_mem_ref (y1
),
5519 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5521 gimplify_and_add (x
, ilist
);
5522 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5524 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5525 lower_omp (&tseq
, ctx
);
5526 gimple_seq_add_seq (ilist
, tseq
);
5528 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5531 SET_DECL_VALUE_EXPR (decl_placeholder
,
5532 build_simple_mem_ref (y2
));
5533 SET_DECL_VALUE_EXPR (placeholder
,
5534 build_simple_mem_ref (y4
));
5535 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5536 lower_omp (&tseq
, ctx
);
5537 gimple_seq_add_seq (dlist
, tseq
);
5538 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5540 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5541 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5544 x
= lang_hooks
.decls
.omp_clause_dtor
5545 (c
, build_simple_mem_ref (y2
));
5547 gimplify_and_add (x
, dlist
);
5552 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5553 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5555 /* reduction(-:var) sums up the partial results, so it
5556 acts identically to reduction(+:var). */
5557 if (code
== MINUS_EXPR
)
5560 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5563 x
= build2 (code
, TREE_TYPE (type
),
5564 build_simple_mem_ref (y4
),
5565 build_simple_mem_ref (y2
));
5566 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5570 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5571 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5572 gimple_seq_add_stmt (ilist
, g
);
5575 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5576 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5577 gimple_seq_add_stmt (ilist
, g
);
5579 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5580 build_int_cst (TREE_TYPE (i
), 1));
5581 gimple_seq_add_stmt (ilist
, g
);
5582 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5583 gimple_seq_add_stmt (ilist
, g
);
5584 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5587 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5588 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5589 gimple_seq_add_stmt (dlist
, g
);
5592 g
= gimple_build_assign
5593 (y4
, POINTER_PLUS_EXPR
, y4
,
5594 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5595 gimple_seq_add_stmt (dlist
, g
);
5597 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5598 build_int_cst (TREE_TYPE (i2
), 1));
5599 gimple_seq_add_stmt (dlist
, g
);
5600 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5601 gimple_seq_add_stmt (dlist
, g
);
5602 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5606 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5607 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5608 gimple_seq_add_stmt (dlist
, g
);
5614 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5615 if (is_global_var (out
))
5617 else if (is_omp_target (ctx
->stmt
))
5621 bool by_ref
= use_pointer_for_field (var
, ctx
);
5622 x
= build_receiver_ref (var
, by_ref
, ctx
);
5624 if (!omp_privatize_by_reference (var
))
5625 x
= build_fold_addr_expr (x
);
5626 x
= fold_convert (ptr_type_node
, x
);
5627 unsigned cnt
= task_reduction_cnt
- 1;
5628 if (!task_reduction_needs_orig_p
)
5629 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5631 cnt
= task_reduction_cntorig
- 1;
5632 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5633 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5634 gimplify_assign (r
, x
, ilist
);
5639 tree type
= TREE_TYPE (new_var
);
5640 if (!omp_privatize_by_reference (var
))
5641 type
= build_pointer_type (type
);
5642 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5644 unsigned cnt
= task_reduction_cnt
- 1;
5645 if (!task_reduction_needs_orig_p
)
5646 cnt
+= (task_reduction_cntorig_full
5647 - task_reduction_cntorig
);
5649 cnt
= task_reduction_cntorig
- 1;
5650 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5651 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5655 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5657 if (ctx
->task_reductions
[1 + idx
])
5658 off
= fold_convert (sizetype
,
5659 ctx
->task_reductions
[1 + idx
]);
5661 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5663 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5666 x
= fold_convert (type
, x
);
5668 if (omp_privatize_by_reference (var
))
5670 gimplify_assign (new_var
, x
, ilist
);
5672 new_var
= build_simple_mem_ref (new_var
);
5676 t
= create_tmp_var (type
);
5677 gimplify_assign (t
, x
, ilist
);
5678 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5679 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5681 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5682 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5683 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5684 cond
= create_tmp_var (TREE_TYPE (t
));
5685 gimplify_assign (cond
, t
, ilist
);
5687 else if (is_variable_sized (var
))
5689 /* For variable sized types, we need to allocate the
5690 actual storage here. Call alloca and store the
5691 result in the pointer decl that we created elsewhere. */
5695 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5699 ptr
= DECL_VALUE_EXPR (new_var
);
5700 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5701 ptr
= TREE_OPERAND (ptr
, 0);
5702 gcc_assert (DECL_P (ptr
));
5703 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5705 if (lower_private_allocate (var
, new_var
, allocator
,
5706 allocate_ptr
, ilist
, ctx
,
5711 /* void *tmp = __builtin_alloca */
5713 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5715 = gimple_build_call (atmp
, 2, x
,
5716 size_int (DECL_ALIGN (var
)));
5717 cfun
->calls_alloca
= 1;
5718 tmp
= create_tmp_var_raw (ptr_type_node
);
5719 gimple_add_tmp_var (tmp
);
5720 gimple_call_set_lhs (stmt
, tmp
);
5722 gimple_seq_add_stmt (ilist
, stmt
);
5725 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5726 gimplify_assign (ptr
, x
, ilist
);
5729 else if (omp_privatize_by_reference (var
)
5730 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5731 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5733 /* For references that are being privatized for Fortran,
5734 allocate new backing storage for the new pointer
5735 variable. This allows us to avoid changing all the
5736 code that expects a pointer to something that expects
5737 a direct variable. */
5741 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5742 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5744 x
= build_receiver_ref (var
, false, ctx
);
5745 if (ctx
->allocate_map
)
5746 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5748 allocator
= *allocatep
;
5749 if (TREE_CODE (allocator
) == TREE_LIST
)
5750 allocator
= TREE_PURPOSE (allocator
);
5751 if (TREE_CODE (allocator
) != INTEGER_CST
)
5752 allocator
= build_outer_var_ref (allocator
, ctx
);
5753 allocator
= fold_convert (pointer_sized_int_node
,
5755 allocate_ptr
= unshare_expr (x
);
5757 if (allocator
== NULL_TREE
)
5758 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5760 else if (lower_private_allocate (var
, new_var
, allocator
,
5762 ilist
, ctx
, true, x
))
5764 else if (TREE_CONSTANT (x
))
5766 /* For reduction in SIMD loop, defer adding the
5767 initialization of the reference, because if we decide
5768 to use SIMD array for it, the initilization could cause
5769 expansion ICE. Ditto for other privatization clauses. */
5774 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5776 gimple_add_tmp_var (x
);
5777 TREE_ADDRESSABLE (x
) = 1;
5778 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5784 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5785 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5786 tree al
= size_int (TYPE_ALIGN (rtype
));
5787 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5792 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5793 gimplify_assign (new_var
, x
, ilist
);
5796 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5798 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5799 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5800 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5808 switch (OMP_CLAUSE_CODE (c
))
5810 case OMP_CLAUSE_SHARED
:
5811 /* Ignore shared directives in teams construct inside
5812 target construct. */
5813 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5814 && !is_host_teams_ctx (ctx
))
5816 /* Shared global vars are just accessed directly. */
5817 if (is_global_var (new_var
))
5819 /* For taskloop firstprivate/lastprivate, represented
5820 as firstprivate and shared clause on the task, new_var
5821 is the firstprivate var. */
5822 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5824 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5825 needs to be delayed until after fixup_child_record_type so
5826 that we get the correct type during the dereference. */
5827 by_ref
= use_pointer_for_field (var
, ctx
);
5828 x
= build_receiver_ref (var
, by_ref
, ctx
);
5829 SET_DECL_VALUE_EXPR (new_var
, x
);
5830 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5832 /* ??? If VAR is not passed by reference, and the variable
5833 hasn't been initialized yet, then we'll get a warning for
5834 the store into the omp_data_s structure. Ideally, we'd be
5835 able to notice this and not store anything at all, but
5836 we're generating code too early. Suppress the warning. */
5838 suppress_warning (var
, OPT_Wuninitialized
);
5841 case OMP_CLAUSE__CONDTEMP_
:
5842 if (is_parallel_ctx (ctx
))
5844 x
= build_receiver_ref (var
, false, ctx
);
5845 SET_DECL_VALUE_EXPR (new_var
, x
);
5846 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5848 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5850 x
= build_zero_cst (TREE_TYPE (var
));
5855 case OMP_CLAUSE_LASTPRIVATE
:
5856 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5860 case OMP_CLAUSE_PRIVATE
:
5861 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5862 x
= build_outer_var_ref (var
, ctx
);
5863 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5865 if (is_task_ctx (ctx
))
5866 x
= build_receiver_ref (var
, false, ctx
);
5868 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5876 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5877 ilist
, ctx
, false, NULL_TREE
);
5878 nx
= unshare_expr (new_var
);
5880 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5881 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5884 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5886 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5889 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5890 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5891 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5892 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5893 || (gimple_omp_for_index (ctx
->stmt
, 0)
5895 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5896 || omp_privatize_by_reference (var
))
5897 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5900 if (omp_privatize_by_reference (var
))
5902 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5903 tree new_vard
= TREE_OPERAND (new_var
, 0);
5904 gcc_assert (DECL_P (new_vard
));
5905 SET_DECL_VALUE_EXPR (new_vard
,
5906 build_fold_addr_expr (lvar
));
5907 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5912 tree iv
= unshare_expr (ivar
);
5914 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5917 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5921 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5923 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5924 unshare_expr (ivar
), x
);
5928 gimplify_and_add (x
, &llist
[0]);
5929 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5930 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5935 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5936 v
= TREE_OPERAND (v
, 0);
5937 gcc_assert (DECL_P (v
));
5939 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5940 tree t
= create_tmp_var (TREE_TYPE (v
));
5941 tree z
= build_zero_cst (TREE_TYPE (v
));
5943 = build_outer_var_ref (var
, ctx
,
5944 OMP_CLAUSE_LASTPRIVATE
);
5945 gimple_seq_add_stmt (dlist
,
5946 gimple_build_assign (t
, z
));
5947 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5948 tree civar
= DECL_VALUE_EXPR (v
);
5949 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5950 civar
= unshare_expr (civar
);
5951 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5952 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5953 unshare_expr (civar
));
5954 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5955 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5956 orig_v
, unshare_expr (ivar
)));
5957 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5959 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5961 gimple_seq tseq
= NULL
;
5962 gimplify_and_add (x
, &tseq
);
5964 lower_omp (&tseq
, ctx
->outer
);
5965 gimple_seq_add_seq (&llist
[1], tseq
);
5967 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5968 && ctx
->for_simd_scan_phase
)
5970 x
= unshare_expr (ivar
);
5972 = build_outer_var_ref (var
, ctx
,
5973 OMP_CLAUSE_LASTPRIVATE
);
5974 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5976 gimplify_and_add (x
, &llist
[0]);
5980 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5982 gimplify_and_add (y
, &llist
[1]);
5986 if (omp_privatize_by_reference (var
))
5988 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5989 tree new_vard
= TREE_OPERAND (new_var
, 0);
5990 gcc_assert (DECL_P (new_vard
));
5991 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5992 x
= TYPE_SIZE_UNIT (type
);
5993 if (TREE_CONSTANT (x
))
5995 x
= create_tmp_var_raw (type
, get_name (var
));
5996 gimple_add_tmp_var (x
);
5997 TREE_ADDRESSABLE (x
) = 1;
5998 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5999 x
= fold_convert_loc (clause_loc
,
6000 TREE_TYPE (new_vard
), x
);
6001 gimplify_assign (new_vard
, x
, ilist
);
6006 gimplify_and_add (nx
, ilist
);
6007 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6009 && ctx
->for_simd_scan_phase
)
6011 tree orig_v
= build_outer_var_ref (var
, ctx
,
6012 OMP_CLAUSE_LASTPRIVATE
);
6013 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6015 gimplify_and_add (x
, ilist
);
6020 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6022 gimplify_and_add (x
, dlist
);
6025 if (!is_gimple_val (allocator
))
6027 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6028 gimplify_assign (avar
, allocator
, dlist
);
6031 if (!is_gimple_val (allocate_ptr
))
6033 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6034 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6035 allocate_ptr
= apvar
;
6037 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6039 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6040 gimple_seq_add_stmt (dlist
, g
);
6044 case OMP_CLAUSE_LINEAR
:
6045 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6046 goto do_firstprivate
;
6047 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6050 x
= build_outer_var_ref (var
, ctx
);
6053 case OMP_CLAUSE_FIRSTPRIVATE
:
6054 if (is_task_ctx (ctx
))
6056 if ((omp_privatize_by_reference (var
)
6057 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6058 || is_variable_sized (var
))
6060 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6062 || use_pointer_for_field (var
, NULL
))
6064 x
= build_receiver_ref (var
, false, ctx
);
6065 if (ctx
->allocate_map
)
6066 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6068 allocator
= *allocatep
;
6069 if (TREE_CODE (allocator
) == TREE_LIST
)
6070 allocator
= TREE_PURPOSE (allocator
);
6071 if (TREE_CODE (allocator
) != INTEGER_CST
)
6072 allocator
= build_outer_var_ref (allocator
, ctx
);
6073 allocator
= fold_convert (pointer_sized_int_node
,
6075 allocate_ptr
= unshare_expr (x
);
6076 x
= build_simple_mem_ref (x
);
6077 TREE_THIS_NOTRAP (x
) = 1;
6079 SET_DECL_VALUE_EXPR (new_var
, x
);
6080 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6084 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6085 && omp_privatize_by_reference (var
))
6087 x
= build_outer_var_ref (var
, ctx
);
6088 gcc_assert (TREE_CODE (x
) == MEM_REF
6089 && integer_zerop (TREE_OPERAND (x
, 1)));
6090 x
= TREE_OPERAND (x
, 0);
6091 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6092 (c
, unshare_expr (new_var
), x
);
6093 gimplify_and_add (x
, ilist
);
6097 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6098 ilist
, ctx
, false, NULL_TREE
);
6099 x
= build_outer_var_ref (var
, ctx
);
6102 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6103 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6105 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6106 tree stept
= TREE_TYPE (t
);
6107 tree ct
= omp_find_clause (clauses
,
6108 OMP_CLAUSE__LOOPTEMP_
);
6110 tree l
= OMP_CLAUSE_DECL (ct
);
6111 tree n1
= fd
->loop
.n1
;
6112 tree step
= fd
->loop
.step
;
6113 tree itype
= TREE_TYPE (l
);
6114 if (POINTER_TYPE_P (itype
))
6115 itype
= signed_type_for (itype
);
6116 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6117 if (TYPE_UNSIGNED (itype
)
6118 && fd
->loop
.cond_code
== GT_EXPR
)
6119 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6120 fold_build1 (NEGATE_EXPR
, itype
, l
),
6121 fold_build1 (NEGATE_EXPR
,
6124 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6125 t
= fold_build2 (MULT_EXPR
, stept
,
6126 fold_convert (stept
, l
), t
);
6128 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6130 if (omp_privatize_by_reference (var
))
6132 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6133 tree new_vard
= TREE_OPERAND (new_var
, 0);
6134 gcc_assert (DECL_P (new_vard
));
6135 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6136 nx
= TYPE_SIZE_UNIT (type
);
6137 if (TREE_CONSTANT (nx
))
6139 nx
= create_tmp_var_raw (type
,
6141 gimple_add_tmp_var (nx
);
6142 TREE_ADDRESSABLE (nx
) = 1;
6143 nx
= build_fold_addr_expr_loc (clause_loc
,
6145 nx
= fold_convert_loc (clause_loc
,
6146 TREE_TYPE (new_vard
),
6148 gimplify_assign (new_vard
, nx
, ilist
);
6152 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6154 gimplify_and_add (x
, ilist
);
6158 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6159 x
= fold_build2 (POINTER_PLUS_EXPR
,
6160 TREE_TYPE (x
), x
, t
);
6162 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
6165 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6166 || TREE_ADDRESSABLE (new_var
)
6167 || omp_privatize_by_reference (var
))
6168 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6171 if (omp_privatize_by_reference (var
))
6173 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6174 tree new_vard
= TREE_OPERAND (new_var
, 0);
6175 gcc_assert (DECL_P (new_vard
));
6176 SET_DECL_VALUE_EXPR (new_vard
,
6177 build_fold_addr_expr (lvar
));
6178 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6180 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6182 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6183 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6184 gimplify_and_add (x
, ilist
);
6185 gimple_stmt_iterator gsi
6186 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6188 = gimple_build_assign (unshare_expr (lvar
), iv
);
6189 gsi_insert_before_without_update (&gsi
, g
,
6191 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6192 enum tree_code code
= PLUS_EXPR
;
6193 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6194 code
= POINTER_PLUS_EXPR
;
6195 g
= gimple_build_assign (iv
, code
, iv
, t
);
6196 gsi_insert_before_without_update (&gsi
, g
,
6200 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6201 (c
, unshare_expr (ivar
), x
);
6202 gimplify_and_add (x
, &llist
[0]);
6203 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6205 gimplify_and_add (x
, &llist
[1]);
6208 if (omp_privatize_by_reference (var
))
6210 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6211 tree new_vard
= TREE_OPERAND (new_var
, 0);
6212 gcc_assert (DECL_P (new_vard
));
6213 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6214 nx
= TYPE_SIZE_UNIT (type
);
6215 if (TREE_CONSTANT (nx
))
6217 nx
= create_tmp_var_raw (type
, get_name (var
));
6218 gimple_add_tmp_var (nx
);
6219 TREE_ADDRESSABLE (nx
) = 1;
6220 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6221 nx
= fold_convert_loc (clause_loc
,
6222 TREE_TYPE (new_vard
), nx
);
6223 gimplify_assign (new_vard
, nx
, ilist
);
6227 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6228 (c
, unshare_expr (new_var
), x
);
6229 gimplify_and_add (x
, ilist
);
6232 case OMP_CLAUSE__LOOPTEMP_
:
6233 case OMP_CLAUSE__REDUCTEMP_
:
6234 gcc_assert (is_taskreg_ctx (ctx
));
6235 x
= build_outer_var_ref (var
, ctx
);
6236 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6237 gimplify_and_add (x
, ilist
);
6240 case OMP_CLAUSE_COPYIN
:
6241 by_ref
= use_pointer_for_field (var
, NULL
);
6242 x
= build_receiver_ref (var
, by_ref
, ctx
);
6243 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6244 append_to_statement_list (x
, ©in_seq
);
6245 copyin_by_ref
|= by_ref
;
6248 case OMP_CLAUSE_REDUCTION
:
6249 case OMP_CLAUSE_IN_REDUCTION
:
6250 /* OpenACC reductions are initialized using the
6251 GOACC_REDUCTION internal function. */
6252 if (is_gimple_omp_oacc (ctx
->stmt
))
6254 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6256 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6258 tree ptype
= TREE_TYPE (placeholder
);
6261 x
= error_mark_node
;
6262 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6263 && !task_reduction_needs_orig_p
)
6265 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6267 tree pptype
= build_pointer_type (ptype
);
6268 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6269 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6270 size_int (task_reduction_cnt_full
6271 + task_reduction_cntorig
- 1),
6272 NULL_TREE
, NULL_TREE
);
6276 = *ctx
->task_reduction_map
->get (c
);
6277 x
= task_reduction_read (ilist
, tskred_temp
,
6278 pptype
, 7 + 3 * idx
);
6280 x
= fold_convert (pptype
, x
);
6281 x
= build_simple_mem_ref (x
);
6286 lower_private_allocate (var
, new_var
, allocator
,
6287 allocate_ptr
, ilist
, ctx
, false,
6289 x
= build_outer_var_ref (var
, ctx
);
6291 if (omp_privatize_by_reference (var
)
6292 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6293 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6295 SET_DECL_VALUE_EXPR (placeholder
, x
);
6296 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6297 tree new_vard
= new_var
;
6298 if (omp_privatize_by_reference (var
))
6300 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6301 new_vard
= TREE_OPERAND (new_var
, 0);
6302 gcc_assert (DECL_P (new_vard
));
6304 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6306 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6307 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6310 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6314 if (new_vard
== new_var
)
6316 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6317 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6321 SET_DECL_VALUE_EXPR (new_vard
,
6322 build_fold_addr_expr (ivar
));
6323 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6325 x
= lang_hooks
.decls
.omp_clause_default_ctor
6326 (c
, unshare_expr (ivar
),
6327 build_outer_var_ref (var
, ctx
));
6328 if (rvarp
&& ctx
->for_simd_scan_phase
)
6331 gimplify_and_add (x
, &llist
[0]);
6332 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6334 gimplify_and_add (x
, &llist
[1]);
6341 gimplify_and_add (x
, &llist
[0]);
6343 tree ivar2
= unshare_expr (lvar
);
6344 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6345 x
= lang_hooks
.decls
.omp_clause_default_ctor
6346 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6347 gimplify_and_add (x
, &llist
[0]);
6351 x
= lang_hooks
.decls
.omp_clause_default_ctor
6352 (c
, unshare_expr (rvar2
),
6353 build_outer_var_ref (var
, ctx
));
6354 gimplify_and_add (x
, &llist
[0]);
6357 /* For types that need construction, add another
6358 private var which will be default constructed
6359 and optionally initialized with
6360 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6361 loop we want to assign this value instead of
6362 constructing and destructing it in each
6364 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6365 gimple_add_tmp_var (nv
);
6366 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6370 x
= lang_hooks
.decls
.omp_clause_default_ctor
6371 (c
, nv
, build_outer_var_ref (var
, ctx
));
6372 gimplify_and_add (x
, ilist
);
6374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6376 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6377 x
= DECL_VALUE_EXPR (new_vard
);
6379 if (new_vard
!= new_var
)
6380 vexpr
= build_fold_addr_expr (nv
);
6381 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6382 lower_omp (&tseq
, ctx
);
6383 SET_DECL_VALUE_EXPR (new_vard
, x
);
6384 gimple_seq_add_seq (ilist
, tseq
);
6385 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6388 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6390 gimplify_and_add (x
, dlist
);
6393 tree ref
= build_outer_var_ref (var
, ctx
);
6394 x
= unshare_expr (ivar
);
6395 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6397 gimplify_and_add (x
, &llist
[0]);
6399 ref
= build_outer_var_ref (var
, ctx
);
6400 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6402 gimplify_and_add (x
, &llist
[3]);
6404 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6405 if (new_vard
== new_var
)
6406 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6408 SET_DECL_VALUE_EXPR (new_vard
,
6409 build_fold_addr_expr (lvar
));
6411 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6413 gimplify_and_add (x
, &llist
[1]);
6415 tree ivar2
= unshare_expr (lvar
);
6416 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6417 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6419 gimplify_and_add (x
, &llist
[1]);
6423 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6425 gimplify_and_add (x
, &llist
[1]);
6430 gimplify_and_add (x
, &llist
[0]);
6431 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6433 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6434 lower_omp (&tseq
, ctx
);
6435 gimple_seq_add_seq (&llist
[0], tseq
);
6437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6438 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6439 lower_omp (&tseq
, ctx
);
6440 gimple_seq_add_seq (&llist
[1], tseq
);
6441 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6442 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6443 if (new_vard
== new_var
)
6444 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6446 SET_DECL_VALUE_EXPR (new_vard
,
6447 build_fold_addr_expr (lvar
));
6448 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6450 gimplify_and_add (x
, &llist
[1]);
6453 /* If this is a reference to constant size reduction var
6454 with placeholder, we haven't emitted the initializer
6455 for it because it is undesirable if SIMD arrays are used.
6456 But if they aren't used, we need to emit the deferred
6457 initialization now. */
6458 else if (omp_privatize_by_reference (var
) && is_simd
)
6459 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6461 tree lab2
= NULL_TREE
;
6465 if (!is_parallel_ctx (ctx
))
6467 tree condv
= create_tmp_var (boolean_type_node
);
6468 tree m
= build_simple_mem_ref (cond
);
6469 g
= gimple_build_assign (condv
, m
);
6470 gimple_seq_add_stmt (ilist
, g
);
6472 = create_artificial_label (UNKNOWN_LOCATION
);
6473 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6474 g
= gimple_build_cond (NE_EXPR
, condv
,
6477 gimple_seq_add_stmt (ilist
, g
);
6478 gimple_seq_add_stmt (ilist
,
6479 gimple_build_label (lab1
));
6481 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6483 gimple_seq_add_stmt (ilist
, g
);
6485 x
= lang_hooks
.decls
.omp_clause_default_ctor
6486 (c
, unshare_expr (new_var
),
6488 : build_outer_var_ref (var
, ctx
));
6490 gimplify_and_add (x
, ilist
);
6492 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6493 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6495 if (ctx
->for_simd_scan_phase
)
6498 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6500 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6501 gimple_add_tmp_var (nv
);
6502 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6503 x
= lang_hooks
.decls
.omp_clause_default_ctor
6504 (c
, nv
, build_outer_var_ref (var
, ctx
));
6506 gimplify_and_add (x
, ilist
);
6507 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6509 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6511 if (new_vard
!= new_var
)
6512 vexpr
= build_fold_addr_expr (nv
);
6513 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6514 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6515 lower_omp (&tseq
, ctx
);
6516 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6517 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6518 gimple_seq_add_seq (ilist
, tseq
);
6520 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6521 if (is_simd
&& ctx
->scan_exclusive
)
6524 = create_tmp_var_raw (TREE_TYPE (new_var
));
6525 gimple_add_tmp_var (nv2
);
6526 ctx
->cb
.decl_map
->put (nv
, nv2
);
6527 x
= lang_hooks
.decls
.omp_clause_default_ctor
6528 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6529 gimplify_and_add (x
, ilist
);
6530 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6532 gimplify_and_add (x
, dlist
);
6534 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6536 gimplify_and_add (x
, dlist
);
6539 && ctx
->scan_exclusive
6540 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6542 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6543 gimple_add_tmp_var (nv2
);
6544 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6545 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6547 gimplify_and_add (x
, dlist
);
6549 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6553 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6555 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6556 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6557 && is_omp_target (ctx
->stmt
))
6559 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6560 tree oldv
= NULL_TREE
;
6562 if (DECL_HAS_VALUE_EXPR_P (d
))
6563 oldv
= DECL_VALUE_EXPR (d
);
6564 SET_DECL_VALUE_EXPR (d
, new_vard
);
6565 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6566 lower_omp (&tseq
, ctx
);
6568 SET_DECL_VALUE_EXPR (d
, oldv
);
6571 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6572 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6576 lower_omp (&tseq
, ctx
);
6577 gimple_seq_add_seq (ilist
, tseq
);
6579 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6582 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6583 lower_omp (&tseq
, ctx
);
6584 gimple_seq_add_seq (dlist
, tseq
);
6585 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6587 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6591 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6598 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6599 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6600 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6605 tree lab2
= NULL_TREE
;
6606 /* GOMP_taskgroup_reduction_register memsets the whole
6607 array to zero. If the initializer is zero, we don't
6608 need to initialize it again, just mark it as ever
6609 used unconditionally, i.e. cond = true. */
6610 if (initializer_zerop (x
))
6612 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6614 gimple_seq_add_stmt (ilist
, g
);
6619 if (!cond) { cond = true; new_var = x; } */
6620 if (!is_parallel_ctx (ctx
))
6622 tree condv
= create_tmp_var (boolean_type_node
);
6623 tree m
= build_simple_mem_ref (cond
);
6624 g
= gimple_build_assign (condv
, m
);
6625 gimple_seq_add_stmt (ilist
, g
);
6627 = create_artificial_label (UNKNOWN_LOCATION
);
6628 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6629 g
= gimple_build_cond (NE_EXPR
, condv
,
6632 gimple_seq_add_stmt (ilist
, g
);
6633 gimple_seq_add_stmt (ilist
,
6634 gimple_build_label (lab1
));
6636 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6638 gimple_seq_add_stmt (ilist
, g
);
6639 gimplify_assign (new_var
, x
, ilist
);
6641 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6645 /* reduction(-:var) sums up the partial results, so it
6646 acts identically to reduction(+:var). */
6647 if (code
== MINUS_EXPR
)
6651 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6652 tree new_vard
= new_var
;
6653 if (is_simd
&& omp_privatize_by_reference (var
))
6655 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6656 new_vard
= TREE_OPERAND (new_var
, 0);
6657 gcc_assert (DECL_P (new_vard
));
6659 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6661 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6662 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6665 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6669 if (new_vard
!= new_var
)
6671 SET_DECL_VALUE_EXPR (new_vard
,
6672 build_fold_addr_expr (lvar
));
6673 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6676 tree ref
= build_outer_var_ref (var
, ctx
);
6680 if (ctx
->for_simd_scan_phase
)
6682 gimplify_assign (ivar
, ref
, &llist
[0]);
6683 ref
= build_outer_var_ref (var
, ctx
);
6684 gimplify_assign (ref
, rvar
, &llist
[3]);
6688 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6693 simt_lane
= create_tmp_var (unsigned_type_node
);
6694 x
= build_call_expr_internal_loc
6695 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6696 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6697 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6698 gimplify_assign (ivar
, x
, &llist
[2]);
6704 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6705 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6706 boolean_type_node
, ivar
,
6708 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6709 boolean_type_node
, ref
,
6712 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6714 x
= fold_convert (TREE_TYPE (ref
), x
);
6715 ref
= build_outer_var_ref (var
, ctx
);
6716 gimplify_assign (ref
, x
, &llist
[1]);
6721 lower_private_allocate (var
, new_var
, allocator
,
6722 allocate_ptr
, ilist
, ctx
,
6724 if (omp_privatize_by_reference (var
) && is_simd
)
6725 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6726 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6727 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6729 gimplify_assign (new_var
, x
, ilist
);
6732 tree ref
= build_outer_var_ref (var
, ctx
);
6733 tree new_var2
= new_var
;
6737 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6739 = fold_build2_loc (clause_loc
, NE_EXPR
,
6740 boolean_type_node
, new_var
,
6742 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6743 boolean_type_node
, ref
,
6746 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6748 x
= fold_convert (TREE_TYPE (new_var
), x
);
6749 ref
= build_outer_var_ref (var
, ctx
);
6750 gimplify_assign (ref
, x
, dlist
);
6765 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6766 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6769 if (known_eq (sctx
.max_vf
, 1U))
6771 sctx
.is_simt
= false;
6772 if (ctx
->lastprivate_conditional_map
)
6774 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6776 /* Signal to lower_omp_1 that it should use parent context. */
6777 ctx
->combined_into_simd_safelen1
= true;
6778 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6779 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6780 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6782 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6783 omp_context
*outer
= ctx
->outer
;
6784 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6785 outer
= outer
->outer
;
6786 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6787 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6788 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6794 /* When not vectorized, treat lastprivate(conditional:) like
6795 normal lastprivate, as there will be just one simd lane
6796 writing the privatized variable. */
6797 delete ctx
->lastprivate_conditional_map
;
6798 ctx
->lastprivate_conditional_map
= NULL
;
6803 if (nonconst_simd_if
)
6805 if (sctx
.lane
== NULL_TREE
)
6807 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6808 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6810 /* FIXME: For now. */
6811 sctx
.is_simt
= false;
6814 if (sctx
.lane
|| sctx
.is_simt
)
6816 uid
= create_tmp_var (ptr_type_node
, "simduid");
6817 /* Don't want uninit warnings on simduid, it is always uninitialized,
6818 but we use it not for the value, but for the DECL_UID only. */
6819 suppress_warning (uid
, OPT_Wuninitialized
);
6820 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6821 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6822 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6823 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6825 /* Emit calls denoting privatized variables and initializing a pointer to
6826 structure that holds private variables as fields after ompdevlow pass. */
6829 sctx
.simt_eargs
[0] = uid
;
6831 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6832 gimple_call_set_lhs (g
, uid
);
6833 gimple_seq_add_stmt (ilist
, g
);
6834 sctx
.simt_eargs
.release ();
6836 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6837 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6838 gimple_call_set_lhs (g
, simtrec
);
6839 gimple_seq_add_stmt (ilist
, g
);
6843 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6844 2 + (nonconst_simd_if
!= NULL
),
6845 uid
, integer_zero_node
,
6847 gimple_call_set_lhs (g
, sctx
.lane
);
6848 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6849 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6850 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6851 build_int_cst (unsigned_type_node
, 0));
6852 gimple_seq_add_stmt (ilist
, g
);
6855 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6857 gimple_call_set_lhs (g
, sctx
.lastlane
);
6858 gimple_seq_add_stmt (dlist
, g
);
6859 gimple_seq_add_seq (dlist
, llist
[3]);
6861 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6864 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6865 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6866 gimple_call_set_lhs (g
, simt_vf
);
6867 gimple_seq_add_stmt (dlist
, g
);
6869 tree t
= build_int_cst (unsigned_type_node
, 1);
6870 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6871 gimple_seq_add_stmt (dlist
, g
);
6873 t
= build_int_cst (unsigned_type_node
, 0);
6874 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6875 gimple_seq_add_stmt (dlist
, g
);
6877 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6878 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6879 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6880 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6881 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6883 gimple_seq_add_seq (dlist
, llist
[2]);
6885 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6886 gimple_seq_add_stmt (dlist
, g
);
6888 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6889 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6890 gimple_seq_add_stmt (dlist
, g
);
6892 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6894 for (int i
= 0; i
< 2; i
++)
6897 tree vf
= create_tmp_var (unsigned_type_node
);
6898 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6899 gimple_call_set_lhs (g
, vf
);
6900 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6901 gimple_seq_add_stmt (seq
, g
);
6902 tree t
= build_int_cst (unsigned_type_node
, 0);
6903 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6904 gimple_seq_add_stmt (seq
, g
);
6905 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6906 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6907 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6908 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6909 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6910 gimple_seq_add_seq (seq
, llist
[i
]);
6911 t
= build_int_cst (unsigned_type_node
, 1);
6912 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6913 gimple_seq_add_stmt (seq
, g
);
6914 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6915 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6916 gimple_seq_add_stmt (seq
, g
);
6917 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6922 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6924 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6925 gimple_seq_add_stmt (dlist
, g
);
6928 /* The copyin sequence is not to be executed by the main thread, since
6929 that would result in self-copies. Perhaps not visible to scalars,
6930 but it certainly is to C++ operator=. */
6933 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6935 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6936 build_int_cst (TREE_TYPE (x
), 0));
6937 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6938 gimplify_and_add (x
, ilist
);
6941 /* If any copyin variable is passed by reference, we must ensure the
6942 master thread doesn't modify it before it is copied over in all
6943 threads. Similarly for variables in both firstprivate and
6944 lastprivate clauses we need to ensure the lastprivate copying
6945 happens after firstprivate copying in all threads. And similarly
6946 for UDRs if initializer expression refers to omp_orig. */
6947 if (copyin_by_ref
|| lastprivate_firstprivate
6948 || (reduction_omp_orig_ref
6949 && !ctx
->scan_inclusive
6950 && !ctx
->scan_exclusive
))
6952 /* Don't add any barrier for #pragma omp simd or
6953 #pragma omp distribute. */
6954 if (!is_task_ctx (ctx
)
6955 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6956 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6957 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6960 /* If max_vf is non-zero, then we can use only a vectorization factor
6961 up to the max_vf we chose. So stick it into the safelen clause. */
6962 if (maybe_ne (sctx
.max_vf
, 0U))
6964 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6965 OMP_CLAUSE_SAFELEN
);
6966 poly_uint64 safe_len
;
6968 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6969 && maybe_gt (safe_len
, sctx
.max_vf
)))
6971 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6972 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6974 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6975 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6980 /* Create temporary variables for lastprivate(conditional:) implementation
6981 in context CTX with CLAUSES. */
6984 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6986 tree iter_type
= NULL_TREE
;
6987 tree cond_ptr
= NULL_TREE
;
6988 tree iter_var
= NULL_TREE
;
6989 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6990 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6991 tree next
= *clauses
;
6992 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6993 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6994 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6998 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7000 if (iter_type
== NULL_TREE
)
7002 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7003 iter_var
= create_tmp_var_raw (iter_type
);
7004 DECL_CONTEXT (iter_var
) = current_function_decl
;
7005 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7006 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7007 ctx
->block_vars
= iter_var
;
7009 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7010 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7011 OMP_CLAUSE_DECL (c3
) = iter_var
;
7012 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7014 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7016 next
= OMP_CLAUSE_CHAIN (cc
);
7017 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7018 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7019 ctx
->lastprivate_conditional_map
->put (o
, v
);
7022 if (iter_type
== NULL
)
7024 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7026 struct omp_for_data fd
;
7027 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7029 iter_type
= unsigned_type_for (fd
.iter_type
);
7031 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7032 iter_type
= unsigned_type_node
;
7033 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7037 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7038 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7042 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7043 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7044 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7045 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7046 ctx
->block_vars
= cond_ptr
;
7047 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7048 OMP_CLAUSE__CONDTEMP_
);
7049 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7050 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7053 iter_var
= create_tmp_var_raw (iter_type
);
7054 DECL_CONTEXT (iter_var
) = current_function_decl
;
7055 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7056 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7057 ctx
->block_vars
= iter_var
;
7059 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7060 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7061 OMP_CLAUSE_DECL (c3
) = iter_var
;
7062 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7063 OMP_CLAUSE_CHAIN (c2
) = c3
;
7064 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7066 tree v
= create_tmp_var_raw (iter_type
);
7067 DECL_CONTEXT (v
) = current_function_decl
;
7068 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7069 DECL_CHAIN (v
) = ctx
->block_vars
;
7070 ctx
->block_vars
= v
;
7071 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7072 ctx
->lastprivate_conditional_map
->put (o
, v
);
7077 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7078 both parallel and workshare constructs. PREDICATE may be NULL if it's
7079 always true. BODY_P is the sequence to insert early initialization
7080 if needed, STMT_LIST is where the non-conditional lastprivate handling
7081 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7085 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7086 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7089 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7090 bool par_clauses
= false;
7091 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7092 unsigned HOST_WIDE_INT conditional_off
= 0;
7093 gimple_seq post_stmt_list
= NULL
;
7095 /* Early exit if there are no lastprivate or linear clauses. */
7096 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7097 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7098 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7099 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7101 if (clauses
== NULL
)
7103 /* If this was a workshare clause, see if it had been combined
7104 with its parallel. In that case, look for the clauses on the
7105 parallel statement itself. */
7106 if (is_parallel_ctx (ctx
))
7110 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7113 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7114 OMP_CLAUSE_LASTPRIVATE
);
7115 if (clauses
== NULL
)
7120 bool maybe_simt
= false;
7121 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7122 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7124 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7125 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7127 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7133 tree label_true
, arm1
, arm2
;
7134 enum tree_code pred_code
= TREE_CODE (predicate
);
7136 label
= create_artificial_label (UNKNOWN_LOCATION
);
7137 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7138 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7140 arm1
= TREE_OPERAND (predicate
, 0);
7141 arm2
= TREE_OPERAND (predicate
, 1);
7142 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7143 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7148 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7149 arm2
= boolean_false_node
;
7150 pred_code
= NE_EXPR
;
7154 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7155 c
= fold_convert (integer_type_node
, c
);
7156 simtcond
= create_tmp_var (integer_type_node
);
7157 gimplify_assign (simtcond
, c
, stmt_list
);
7158 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7160 c
= create_tmp_var (integer_type_node
);
7161 gimple_call_set_lhs (g
, c
);
7162 gimple_seq_add_stmt (stmt_list
, g
);
7163 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7167 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7168 gimple_seq_add_stmt (stmt_list
, stmt
);
7169 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7172 tree cond_ptr
= NULL_TREE
;
7173 for (c
= clauses
; c
;)
7176 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7177 gimple_seq
*this_stmt_list
= stmt_list
;
7178 tree lab2
= NULL_TREE
;
7180 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7181 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7182 && ctx
->lastprivate_conditional_map
7183 && !ctx
->combined_into_simd_safelen1
)
7185 gcc_assert (body_p
);
7188 if (cond_ptr
== NULL_TREE
)
7190 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7191 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7193 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7194 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7195 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7196 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7197 this_stmt_list
= cstmt_list
;
7199 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7201 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7202 build_int_cst (TREE_TYPE (cond_ptr
),
7204 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7207 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7208 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7209 tree mem2
= copy_node (mem
);
7210 gimple_seq seq
= NULL
;
7211 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7212 gimple_seq_add_seq (this_stmt_list
, seq
);
7213 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7214 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7215 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7216 gimple_seq_add_stmt (this_stmt_list
, g
);
7217 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7218 gimplify_assign (mem2
, v
, this_stmt_list
);
7221 && ctx
->combined_into_simd_safelen1
7222 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7223 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7224 && ctx
->lastprivate_conditional_map
)
7225 this_stmt_list
= &post_stmt_list
;
7227 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7228 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7229 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7231 var
= OMP_CLAUSE_DECL (c
);
7232 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7233 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7234 && is_taskloop_ctx (ctx
))
7236 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7237 new_var
= lookup_decl (var
, ctx
->outer
);
7241 new_var
= lookup_decl (var
, ctx
);
7242 /* Avoid uninitialized warnings for lastprivate and
7243 for linear iterators. */
7245 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7246 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7247 suppress_warning (new_var
, OPT_Wuninitialized
);
7250 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7252 tree val
= DECL_VALUE_EXPR (new_var
);
7253 if (TREE_CODE (val
) == ARRAY_REF
7254 && VAR_P (TREE_OPERAND (val
, 0))
7255 && lookup_attribute ("omp simd array",
7256 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7259 if (lastlane
== NULL
)
7261 lastlane
= create_tmp_var (unsigned_type_node
);
7263 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7265 TREE_OPERAND (val
, 1));
7266 gimple_call_set_lhs (g
, lastlane
);
7267 gimple_seq_add_stmt (this_stmt_list
, g
);
7269 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7270 TREE_OPERAND (val
, 0), lastlane
,
7271 NULL_TREE
, NULL_TREE
);
7272 TREE_THIS_NOTRAP (new_var
) = 1;
7275 else if (maybe_simt
)
7277 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7278 ? DECL_VALUE_EXPR (new_var
)
7280 if (simtlast
== NULL
)
7282 simtlast
= create_tmp_var (unsigned_type_node
);
7283 gcall
*g
= gimple_build_call_internal
7284 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7285 gimple_call_set_lhs (g
, simtlast
);
7286 gimple_seq_add_stmt (this_stmt_list
, g
);
7288 x
= build_call_expr_internal_loc
7289 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7290 TREE_TYPE (val
), 2, val
, simtlast
);
7291 new_var
= unshare_expr (new_var
);
7292 gimplify_assign (new_var
, x
, this_stmt_list
);
7293 new_var
= unshare_expr (new_var
);
7296 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7297 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7299 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7300 gimple_seq_add_seq (this_stmt_list
,
7301 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7302 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7304 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7305 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7307 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7308 gimple_seq_add_seq (this_stmt_list
,
7309 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7310 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7314 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7315 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7316 && is_taskloop_ctx (ctx
))
7318 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7320 if (is_global_var (ovar
))
7324 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7325 if (omp_privatize_by_reference (var
))
7326 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7327 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7328 gimplify_and_add (x
, this_stmt_list
);
7331 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7335 c
= OMP_CLAUSE_CHAIN (c
);
7336 if (c
== NULL
&& !par_clauses
)
7338 /* If this was a workshare clause, see if it had been combined
7339 with its parallel. In that case, continue looking for the
7340 clauses also on the parallel statement itself. */
7341 if (is_parallel_ctx (ctx
))
7345 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7348 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7349 OMP_CLAUSE_LASTPRIVATE
);
7355 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7356 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7359 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7360 (which might be a placeholder). INNER is true if this is an inner
7361 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7362 join markers. Generate the before-loop forking sequence in
7363 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7364 general form of these sequences is
7366 GOACC_REDUCTION_SETUP
7368 GOACC_REDUCTION_INIT
7370 GOACC_REDUCTION_FINI
7372 GOACC_REDUCTION_TEARDOWN. */
7375 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7376 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7377 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7380 gimple_seq before_fork
= NULL
;
7381 gimple_seq after_fork
= NULL
;
7382 gimple_seq before_join
= NULL
;
7383 gimple_seq after_join
= NULL
;
7384 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7385 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7386 unsigned offset
= 0;
7388 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7389 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7391 /* No 'reduction' clauses on OpenACC 'kernels'. */
7392 gcc_checking_assert (!is_oacc_kernels (ctx
));
7393 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7394 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7396 tree orig
= OMP_CLAUSE_DECL (c
);
7397 tree var
= maybe_lookup_decl (orig
, ctx
);
7398 tree ref_to_res
= NULL_TREE
;
7399 tree incoming
, outgoing
, v1
, v2
, v3
;
7400 bool is_private
= false;
7402 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7403 if (rcode
== MINUS_EXPR
)
7405 else if (rcode
== TRUTH_ANDIF_EXPR
)
7406 rcode
= BIT_AND_EXPR
;
7407 else if (rcode
== TRUTH_ORIF_EXPR
)
7408 rcode
= BIT_IOR_EXPR
;
7409 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7414 incoming
= outgoing
= var
;
7418 /* See if an outer construct also reduces this variable. */
7419 omp_context
*outer
= ctx
;
7421 while (omp_context
*probe
= outer
->outer
)
7423 enum gimple_code type
= gimple_code (probe
->stmt
);
7428 case GIMPLE_OMP_FOR
:
7429 cls
= gimple_omp_for_clauses (probe
->stmt
);
7432 case GIMPLE_OMP_TARGET
:
7433 /* No 'reduction' clauses inside OpenACC 'kernels'
7435 gcc_checking_assert (!is_oacc_kernels (probe
));
7437 if (!is_gimple_omp_offloaded (probe
->stmt
))
7440 cls
= gimple_omp_target_clauses (probe
->stmt
);
7448 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7449 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7450 && orig
== OMP_CLAUSE_DECL (cls
))
7452 incoming
= outgoing
= lookup_decl (orig
, probe
);
7453 goto has_outer_reduction
;
7455 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7456 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7457 && orig
== OMP_CLAUSE_DECL (cls
))
7465 /* This is the outermost construct with this reduction,
7466 see if there's a mapping for it. */
7467 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7468 && maybe_lookup_field (orig
, outer
) && !is_private
)
7470 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7471 if (omp_privatize_by_reference (orig
))
7472 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7474 tree type
= TREE_TYPE (var
);
7475 if (POINTER_TYPE_P (type
))
7476 type
= TREE_TYPE (type
);
7479 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7483 /* Try to look at enclosing contexts for reduction var,
7484 use original if no mapping found. */
7486 omp_context
*c
= ctx
->outer
;
7489 t
= maybe_lookup_decl (orig
, c
);
7492 incoming
= outgoing
= (t
? t
: orig
);
7495 has_outer_reduction
:;
7499 ref_to_res
= integer_zero_node
;
7501 if (omp_privatize_by_reference (orig
))
7503 tree type
= TREE_TYPE (var
);
7504 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7508 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7509 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7512 v1
= create_tmp_var (type
, id
);
7513 v2
= create_tmp_var (type
, id
);
7514 v3
= create_tmp_var (type
, id
);
7516 gimplify_assign (v1
, var
, fork_seq
);
7517 gimplify_assign (v2
, var
, fork_seq
);
7518 gimplify_assign (v3
, var
, fork_seq
);
7520 var
= build_simple_mem_ref (var
);
7521 v1
= build_simple_mem_ref (v1
);
7522 v2
= build_simple_mem_ref (v2
);
7523 v3
= build_simple_mem_ref (v3
);
7524 outgoing
= build_simple_mem_ref (outgoing
);
7526 if (!TREE_CONSTANT (incoming
))
7527 incoming
= build_simple_mem_ref (incoming
);
7532 /* Determine position in reduction buffer, which may be used
7533 by target. The parser has ensured that this is not a
7534 variable-sized type. */
7535 fixed_size_mode mode
7536 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7537 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7538 offset
= (offset
+ align
- 1) & ~(align
- 1);
7539 tree off
= build_int_cst (sizetype
, offset
);
7540 offset
+= GET_MODE_SIZE (mode
);
7544 init_code
= build_int_cst (integer_type_node
,
7545 IFN_GOACC_REDUCTION_INIT
);
7546 fini_code
= build_int_cst (integer_type_node
,
7547 IFN_GOACC_REDUCTION_FINI
);
7548 setup_code
= build_int_cst (integer_type_node
,
7549 IFN_GOACC_REDUCTION_SETUP
);
7550 teardown_code
= build_int_cst (integer_type_node
,
7551 IFN_GOACC_REDUCTION_TEARDOWN
);
7555 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7556 TREE_TYPE (var
), 6, setup_code
,
7557 unshare_expr (ref_to_res
),
7558 incoming
, level
, op
, off
);
7560 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7561 TREE_TYPE (var
), 6, init_code
,
7562 unshare_expr (ref_to_res
),
7563 v1
, level
, op
, off
);
7565 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7566 TREE_TYPE (var
), 6, fini_code
,
7567 unshare_expr (ref_to_res
),
7568 v2
, level
, op
, off
);
7570 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7571 TREE_TYPE (var
), 6, teardown_code
,
7572 ref_to_res
, v3
, level
, op
, off
);
7574 gimplify_assign (v1
, setup_call
, &before_fork
);
7575 gimplify_assign (v2
, init_call
, &after_fork
);
7576 gimplify_assign (v3
, fini_call
, &before_join
);
7577 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7580 /* Now stitch things together. */
7581 gimple_seq_add_seq (fork_seq
, before_fork
);
7583 gimple_seq_add_stmt (fork_seq
, private_marker
);
7585 gimple_seq_add_stmt (fork_seq
, fork
);
7586 gimple_seq_add_seq (fork_seq
, after_fork
);
7588 gimple_seq_add_seq (join_seq
, before_join
);
7590 gimple_seq_add_stmt (join_seq
, join
);
7591 gimple_seq_add_seq (join_seq
, after_join
);
7594 /* Generate code to implement the REDUCTION clauses, append it
7595 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7596 that should be emitted also inside of the critical section,
7597 in that case clear *CLIST afterwards, otherwise leave it as is
7598 and let the caller emit it itself. */
7601 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7602 gimple_seq
*clist
, omp_context
*ctx
)
7604 gimple_seq sub_seq
= NULL
;
7609 /* OpenACC loop reductions are handled elsewhere. */
7610 if (is_gimple_omp_oacc (ctx
->stmt
))
7613 /* SIMD reductions are handled in lower_rec_input_clauses. */
7614 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7615 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7618 /* inscan reductions are handled elsewhere. */
7619 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7622 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7623 update in that case, otherwise use a lock. */
7624 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7625 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7626 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7628 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7629 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7631 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7641 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7643 tree var
, ref
, new_var
, orig_var
;
7644 enum tree_code code
;
7645 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7647 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7648 || OMP_CLAUSE_REDUCTION_TASK (c
))
7651 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7652 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7653 if (TREE_CODE (var
) == MEM_REF
)
7655 var
= TREE_OPERAND (var
, 0);
7656 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7657 var
= TREE_OPERAND (var
, 0);
7658 if (TREE_CODE (var
) == ADDR_EXPR
)
7659 var
= TREE_OPERAND (var
, 0);
7662 /* If this is a pointer or referenced based array
7663 section, the var could be private in the outer
7664 context e.g. on orphaned loop construct. Pretend this
7665 is private variable's outer reference. */
7666 ccode
= OMP_CLAUSE_PRIVATE
;
7667 if (TREE_CODE (var
) == INDIRECT_REF
)
7668 var
= TREE_OPERAND (var
, 0);
7671 if (is_variable_sized (var
))
7673 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7674 var
= DECL_VALUE_EXPR (var
);
7675 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7676 var
= TREE_OPERAND (var
, 0);
7677 gcc_assert (DECL_P (var
));
7680 new_var
= lookup_decl (var
, ctx
);
7681 if (var
== OMP_CLAUSE_DECL (c
)
7682 && omp_privatize_by_reference (var
))
7683 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7684 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7685 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7687 /* reduction(-:var) sums up the partial results, so it acts
7688 identically to reduction(+:var). */
7689 if (code
== MINUS_EXPR
)
7692 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7695 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7697 addr
= save_expr (addr
);
7698 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7699 tree new_var2
= new_var
;
7703 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7704 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7705 boolean_type_node
, new_var
, zero
);
7706 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7709 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7712 x
= fold_convert (TREE_TYPE (new_var
), x
);
7713 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7714 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7715 gimplify_and_add (x
, stmt_seqp
);
7718 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7720 tree d
= OMP_CLAUSE_DECL (c
);
7721 tree type
= TREE_TYPE (d
);
7722 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7723 tree i
= create_tmp_var (TREE_TYPE (v
));
7724 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7725 tree bias
= TREE_OPERAND (d
, 1);
7726 d
= TREE_OPERAND (d
, 0);
7727 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7729 tree b
= TREE_OPERAND (d
, 1);
7730 b
= maybe_lookup_decl (b
, ctx
);
7733 b
= TREE_OPERAND (d
, 1);
7734 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7736 if (integer_zerop (bias
))
7740 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7741 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7742 TREE_TYPE (b
), b
, bias
);
7744 d
= TREE_OPERAND (d
, 0);
7746 /* For ref build_outer_var_ref already performs this, so
7747 only new_var needs a dereference. */
7748 if (TREE_CODE (d
) == INDIRECT_REF
)
7750 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7751 gcc_assert (omp_privatize_by_reference (var
)
7752 && var
== orig_var
);
7754 else if (TREE_CODE (d
) == ADDR_EXPR
)
7756 if (orig_var
== var
)
7758 new_var
= build_fold_addr_expr (new_var
);
7759 ref
= build_fold_addr_expr (ref
);
7764 gcc_assert (orig_var
== var
);
7765 if (omp_privatize_by_reference (var
))
7766 ref
= build_fold_addr_expr (ref
);
7770 tree t
= maybe_lookup_decl (v
, ctx
);
7774 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7775 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7777 if (!integer_zerop (bias
))
7779 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7780 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7781 TREE_TYPE (new_var
), new_var
,
7782 unshare_expr (bias
));
7783 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7784 TREE_TYPE (ref
), ref
, bias
);
7786 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7787 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7788 tree m
= create_tmp_var (ptype
);
7789 gimplify_assign (m
, new_var
, stmt_seqp
);
7791 m
= create_tmp_var (ptype
);
7792 gimplify_assign (m
, ref
, stmt_seqp
);
7794 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7795 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7796 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7797 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7798 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7799 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7800 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7802 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7803 tree decl_placeholder
7804 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7805 SET_DECL_VALUE_EXPR (placeholder
, out
);
7806 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7807 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7808 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7809 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7810 gimple_seq_add_seq (&sub_seq
,
7811 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7812 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7813 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7814 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7822 tree zero
= build_zero_cst (TREE_TYPE (out
));
7823 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7824 boolean_type_node
, out
, zero
);
7825 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7826 boolean_type_node
, priv
, zero
);
7828 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7830 x
= fold_convert (TREE_TYPE (out
), x
);
7831 out
= unshare_expr (out
);
7832 gimplify_assign (out
, x
, &sub_seq
);
7834 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7835 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7836 gimple_seq_add_stmt (&sub_seq
, g
);
7837 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7838 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7839 gimple_seq_add_stmt (&sub_seq
, g
);
7840 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7841 build_int_cst (TREE_TYPE (i
), 1));
7842 gimple_seq_add_stmt (&sub_seq
, g
);
7843 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7844 gimple_seq_add_stmt (&sub_seq
, g
);
7845 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7847 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7849 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7851 if (omp_privatize_by_reference (var
)
7852 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7854 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7855 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7856 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7857 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7858 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7859 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7860 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7864 tree new_var2
= new_var
;
7868 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7869 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7870 boolean_type_node
, new_var
, zero
);
7871 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7874 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7876 x
= fold_convert (TREE_TYPE (new_var
), x
);
7877 ref
= build_outer_var_ref (var
, ctx
);
7878 gimplify_assign (ref
, x
, &sub_seq
);
7882 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7884 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7886 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7890 gimple_seq_add_seq (stmt_seqp
, *clist
);
7894 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7896 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7900 /* Generate code to implement the COPYPRIVATE clauses. */
7903 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7908 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7910 tree var
, new_var
, ref
, x
;
7912 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7914 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7917 var
= OMP_CLAUSE_DECL (c
);
7918 by_ref
= use_pointer_for_field (var
, NULL
);
7920 ref
= build_sender_ref (var
, ctx
);
7921 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7924 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7925 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7927 gimplify_assign (ref
, x
, slist
);
7929 ref
= build_receiver_ref (var
, false, ctx
);
7932 ref
= fold_convert_loc (clause_loc
,
7933 build_pointer_type (TREE_TYPE (new_var
)),
7935 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7937 if (omp_privatize_by_reference (var
))
7939 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7940 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7941 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7943 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7944 gimplify_and_add (x
, rlist
);
7949 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7950 and REDUCTION from the sender (aka parent) side. */
7953 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7957 int ignored_looptemp
= 0;
7958 bool is_taskloop
= false;
7960 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7961 by GOMP_taskloop. */
7962 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7964 ignored_looptemp
= 2;
7968 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7970 tree val
, ref
, x
, var
;
7971 bool by_ref
, do_in
= false, do_out
= false;
7972 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7974 switch (OMP_CLAUSE_CODE (c
))
7976 case OMP_CLAUSE_PRIVATE
:
7977 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7980 case OMP_CLAUSE_FIRSTPRIVATE
:
7981 case OMP_CLAUSE_COPYIN
:
7982 case OMP_CLAUSE_LASTPRIVATE
:
7983 case OMP_CLAUSE_IN_REDUCTION
:
7984 case OMP_CLAUSE__REDUCTEMP_
:
7986 case OMP_CLAUSE_REDUCTION
:
7987 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7990 case OMP_CLAUSE_SHARED
:
7991 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7994 case OMP_CLAUSE__LOOPTEMP_
:
7995 if (ignored_looptemp
)
8005 val
= OMP_CLAUSE_DECL (c
);
8006 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8007 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8008 && TREE_CODE (val
) == MEM_REF
)
8010 val
= TREE_OPERAND (val
, 0);
8011 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8012 val
= TREE_OPERAND (val
, 0);
8013 if (TREE_CODE (val
) == INDIRECT_REF
8014 || TREE_CODE (val
) == ADDR_EXPR
)
8015 val
= TREE_OPERAND (val
, 0);
8016 if (is_variable_sized (val
))
8020 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8021 outer taskloop region. */
8022 omp_context
*ctx_for_o
= ctx
;
8024 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8025 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8026 ctx_for_o
= ctx
->outer
;
8028 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8030 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8031 && is_global_var (var
)
8032 && (val
== OMP_CLAUSE_DECL (c
)
8033 || !is_task_ctx (ctx
)
8034 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8035 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8036 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8037 != POINTER_TYPE
)))))
8040 t
= omp_member_access_dummy_var (var
);
8043 var
= DECL_VALUE_EXPR (var
);
8044 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8046 var
= unshare_and_remap (var
, t
, o
);
8048 var
= unshare_expr (var
);
8051 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8053 /* Handle taskloop firstprivate/lastprivate, where the
8054 lastprivate on GIMPLE_OMP_TASK is represented as
8055 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8056 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8057 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8058 if (use_pointer_for_field (val
, ctx
))
8059 var
= build_fold_addr_expr (var
);
8060 gimplify_assign (x
, var
, ilist
);
8061 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8065 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8066 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8067 || val
== OMP_CLAUSE_DECL (c
))
8068 && is_variable_sized (val
))
8070 by_ref
= use_pointer_for_field (val
, NULL
);
8072 switch (OMP_CLAUSE_CODE (c
))
8074 case OMP_CLAUSE_FIRSTPRIVATE
:
8075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8077 && is_task_ctx (ctx
))
8078 suppress_warning (var
);
8082 case OMP_CLAUSE_PRIVATE
:
8083 case OMP_CLAUSE_COPYIN
:
8084 case OMP_CLAUSE__LOOPTEMP_
:
8085 case OMP_CLAUSE__REDUCTEMP_
:
8089 case OMP_CLAUSE_LASTPRIVATE
:
8090 if (by_ref
|| omp_privatize_by_reference (val
))
8092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8099 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8104 case OMP_CLAUSE_REDUCTION
:
8105 case OMP_CLAUSE_IN_REDUCTION
:
8107 if (val
== OMP_CLAUSE_DECL (c
))
8109 if (is_task_ctx (ctx
))
8110 by_ref
= use_pointer_for_field (val
, ctx
);
8112 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8115 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8124 ref
= build_sender_ref (val
, ctx
);
8125 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8126 gimplify_assign (ref
, x
, ilist
);
8127 if (is_task_ctx (ctx
))
8128 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8133 ref
= build_sender_ref (val
, ctx
);
8134 gimplify_assign (var
, ref
, olist
);
8139 /* Generate code to implement SHARED from the sender (aka parent)
8140 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8141 list things that got automatically shared. */
8144 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8146 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8148 if (ctx
->record_type
== NULL
)
8151 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8152 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8154 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8155 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8158 nvar
= maybe_lookup_decl (ovar
, ctx
);
8160 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8161 || (ctx
->allocate_map
8162 && ctx
->allocate_map
->get (ovar
)))
8165 /* If CTX is a nested parallel directive. Find the immediately
8166 enclosing parallel or workshare construct that contains a
8167 mapping for OVAR. */
8168 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8170 t
= omp_member_access_dummy_var (var
);
8173 var
= DECL_VALUE_EXPR (var
);
8174 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8176 var
= unshare_and_remap (var
, t
, o
);
8178 var
= unshare_expr (var
);
8181 if (use_pointer_for_field (ovar
, ctx
))
8183 x
= build_sender_ref (ovar
, ctx
);
8184 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8185 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8187 gcc_assert (is_parallel_ctx (ctx
)
8188 && DECL_ARTIFICIAL (ovar
));
8189 /* _condtemp_ clause. */
8190 var
= build_constructor (TREE_TYPE (x
), NULL
);
8193 var
= build_fold_addr_expr (var
);
8194 gimplify_assign (x
, var
, ilist
);
8198 x
= build_sender_ref (ovar
, ctx
);
8199 gimplify_assign (x
, var
, ilist
);
8201 if (!TREE_READONLY (var
)
8202 /* We don't need to receive a new reference to a result
8203 or parm decl. In fact we may not store to it as we will
8204 invalidate any pending RSO and generate wrong gimple
8206 && !((TREE_CODE (var
) == RESULT_DECL
8207 || TREE_CODE (var
) == PARM_DECL
)
8208 && DECL_BY_REFERENCE (var
)))
8210 x
= build_sender_ref (ovar
, ctx
);
8211 gimplify_assign (var
, x
, olist
);
8217 /* Emit an OpenACC head marker call, encapulating the partitioning and
8218 other information that must be processed by the target compiler.
8219 Return the maximum number of dimensions the associated loop might
8220 be partitioned over. */
8223 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8224 gimple_seq
*seq
, omp_context
*ctx
)
8226 unsigned levels
= 0;
8228 tree gang_static
= NULL_TREE
;
8229 auto_vec
<tree
, 5> args
;
8231 args
.quick_push (build_int_cst
8232 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8233 args
.quick_push (ddvar
);
8234 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8236 switch (OMP_CLAUSE_CODE (c
))
8238 case OMP_CLAUSE_GANG
:
8239 tag
|= OLF_DIM_GANG
;
8240 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8241 /* static:* is represented by -1, and we can ignore it, as
8242 scheduling is always static. */
8243 if (gang_static
&& integer_minus_onep (gang_static
))
8244 gang_static
= NULL_TREE
;
8248 case OMP_CLAUSE_WORKER
:
8249 tag
|= OLF_DIM_WORKER
;
8253 case OMP_CLAUSE_VECTOR
:
8254 tag
|= OLF_DIM_VECTOR
;
8258 case OMP_CLAUSE_SEQ
:
8262 case OMP_CLAUSE_AUTO
:
8266 case OMP_CLAUSE_INDEPENDENT
:
8267 tag
|= OLF_INDEPENDENT
;
8270 case OMP_CLAUSE_TILE
:
8281 if (DECL_P (gang_static
))
8282 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8283 tag
|= OLF_GANG_STATIC
;
8286 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8287 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8289 else if (is_oacc_kernels (tgt
))
8290 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8292 else if (is_oacc_kernels_decomposed_part (tgt
))
8297 /* In a parallel region, loops are implicitly INDEPENDENT. */
8298 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8299 tag
|= OLF_INDEPENDENT
;
8301 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8302 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8303 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8305 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8306 gcc_assert (!(tag
& OLF_AUTO
));
8310 /* Tiling could use all 3 levels. */
8314 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8315 Ensure at least one level, or 2 for possible auto
8317 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8318 << OLF_DIM_BASE
) | OLF_SEQ
));
8320 if (levels
< 1u + maybe_auto
)
8321 levels
= 1u + maybe_auto
;
8324 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8325 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8327 args
.quick_push (gang_static
);
8329 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8330 gimple_set_location (call
, loc
);
8331 gimple_set_lhs (call
, ddvar
);
8332 gimple_seq_add_stmt (seq
, call
);
8337 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8338 partitioning level of the enclosed region. */
8341 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8342 tree tofollow
, gimple_seq
*seq
)
8344 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8345 : IFN_UNIQUE_OACC_TAIL_MARK
);
8346 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8347 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8348 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8349 marker
, ddvar
, tofollow
);
8350 gimple_set_location (call
, loc
);
8351 gimple_set_lhs (call
, ddvar
);
8352 gimple_seq_add_stmt (seq
, call
);
8355 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8356 the loop clauses, from which we extract reductions. Initialize
8360 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8361 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8364 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8365 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8367 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8371 gimple_set_location (private_marker
, loc
);
8372 gimple_call_set_lhs (private_marker
, ddvar
);
8373 gimple_call_set_arg (private_marker
, 1, ddvar
);
8376 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8377 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8380 for (unsigned done
= 1; count
; count
--, done
++)
8382 gimple_seq fork_seq
= NULL
;
8383 gimple_seq join_seq
= NULL
;
8385 tree place
= build_int_cst (integer_type_node
, -1);
8386 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8387 fork_kind
, ddvar
, place
);
8388 gimple_set_location (fork
, loc
);
8389 gimple_set_lhs (fork
, ddvar
);
8391 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8392 join_kind
, ddvar
, place
);
8393 gimple_set_location (join
, loc
);
8394 gimple_set_lhs (join
, ddvar
);
8396 /* Mark the beginning of this level sequence. */
8398 lower_oacc_loop_marker (loc
, ddvar
, true,
8399 build_int_cst (integer_type_node
, count
),
8401 lower_oacc_loop_marker (loc
, ddvar
, false,
8402 build_int_cst (integer_type_node
, done
),
8405 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8406 fork
, (count
== 1) ? private_marker
: NULL
,
8407 join
, &fork_seq
, &join_seq
, ctx
);
8409 /* Append this level to head. */
8410 gimple_seq_add_seq (head
, fork_seq
);
8411 /* Prepend it to tail. */
8412 gimple_seq_add_seq (&join_seq
, *tail
);
8418 /* Mark the end of the sequence. */
8419 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8420 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8423 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8424 catch handler and return it. This prevents programs from violating the
8425 structured block semantics with throws. */
8428 maybe_catch_exception (gimple_seq body
)
8433 if (!flag_exceptions
)
8436 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8437 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8439 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8441 g
= gimple_build_eh_must_not_throw (decl
);
8442 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8445 return gimple_seq_alloc_with_stmt (g
);
8449 /* Routines to lower OMP directives into OMP-GIMPLE. */
8451 /* If ctx is a worksharing context inside of a cancellable parallel
8452 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8453 and conditional branch to parallel's cancel_label to handle
8454 cancellation in the implicit barrier. */
8457 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8460 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8461 if (gimple_omp_return_nowait_p (omp_return
))
8463 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8464 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8465 && outer
->cancellable
)
8467 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8468 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8469 tree lhs
= create_tmp_var (c_bool_type
);
8470 gimple_omp_return_set_lhs (omp_return
, lhs
);
8471 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8472 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8473 fold_convert (c_bool_type
,
8474 boolean_false_node
),
8475 outer
->cancel_label
, fallthru_label
);
8476 gimple_seq_add_stmt (body
, g
);
8477 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8479 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8480 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8484 /* Find the first task_reduction or reduction clause or return NULL
8485 if there are none. */
8488 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8489 enum omp_clause_code ccode
)
8493 clauses
= omp_find_clause (clauses
, ccode
);
8494 if (clauses
== NULL_TREE
)
8496 if (ccode
!= OMP_CLAUSE_REDUCTION
8497 || code
== OMP_TASKLOOP
8498 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8500 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8504 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8505 gimple_seq
*, gimple_seq
*);
8507 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8508 CTX is the enclosing OMP context for the current statement. */
8511 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8513 tree block
, control
;
8514 gimple_stmt_iterator tgsi
;
8515 gomp_sections
*stmt
;
8517 gbind
*new_stmt
, *bind
;
8518 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8520 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8522 push_gimplify_context ();
8528 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8529 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8530 tree rtmp
= NULL_TREE
;
8533 tree type
= build_pointer_type (pointer_sized_int_node
);
8534 tree temp
= create_tmp_var (type
);
8535 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8536 OMP_CLAUSE_DECL (c
) = temp
;
8537 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8538 gimple_omp_sections_set_clauses (stmt
, c
);
8539 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8540 gimple_omp_sections_clauses (stmt
),
8541 &ilist
, &tred_dlist
);
8543 rtmp
= make_ssa_name (type
);
8544 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8547 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8548 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8550 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8551 &ilist
, &dlist
, ctx
, NULL
);
8553 control
= create_tmp_var (unsigned_type_node
, ".section");
8554 gimple_omp_sections_set_control (stmt
, control
);
8556 new_body
= gimple_omp_body (stmt
);
8557 gimple_omp_set_body (stmt
, NULL
);
8558 tgsi
= gsi_start (new_body
);
8559 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8564 sec_start
= gsi_stmt (tgsi
);
8565 sctx
= maybe_lookup_ctx (sec_start
);
8568 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8569 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8570 GSI_CONTINUE_LINKING
);
8571 gimple_omp_set_body (sec_start
, NULL
);
8573 if (gsi_one_before_end_p (tgsi
))
8575 gimple_seq l
= NULL
;
8576 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8577 &ilist
, &l
, &clist
, ctx
);
8578 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8579 gimple_omp_section_set_last (sec_start
);
8582 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8583 GSI_CONTINUE_LINKING
);
8586 block
= make_node (BLOCK
);
8587 bind
= gimple_build_bind (NULL
, new_body
, block
);
8590 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8594 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8595 gcall
*g
= gimple_build_call (fndecl
, 0);
8596 gimple_seq_add_stmt (&olist
, g
);
8597 gimple_seq_add_seq (&olist
, clist
);
8598 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8599 g
= gimple_build_call (fndecl
, 0);
8600 gimple_seq_add_stmt (&olist
, g
);
8603 block
= make_node (BLOCK
);
8604 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8605 gsi_replace (gsi_p
, new_stmt
, true);
8607 pop_gimplify_context (new_stmt
);
8608 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8609 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8610 if (BLOCK_VARS (block
))
8611 TREE_USED (block
) = 1;
8614 gimple_seq_add_seq (&new_body
, ilist
);
8615 gimple_seq_add_stmt (&new_body
, stmt
);
8616 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8617 gimple_seq_add_stmt (&new_body
, bind
);
8619 t
= gimple_build_omp_continue (control
, control
);
8620 gimple_seq_add_stmt (&new_body
, t
);
8622 gimple_seq_add_seq (&new_body
, olist
);
8623 if (ctx
->cancellable
)
8624 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8625 gimple_seq_add_seq (&new_body
, dlist
);
8627 new_body
= maybe_catch_exception (new_body
);
8629 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8630 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8631 t
= gimple_build_omp_return (nowait
);
8632 gimple_seq_add_stmt (&new_body
, t
);
8633 gimple_seq_add_seq (&new_body
, tred_dlist
);
8634 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8637 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8639 gimple_bind_set_body (new_stmt
, new_body
);
8643 /* A subroutine of lower_omp_single. Expand the simple form of
8644 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8646 if (GOMP_single_start ())
8648 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8650 FIXME. It may be better to delay expanding the logic of this until
8651 pass_expand_omp. The expanded logic may make the job more difficult
8652 to a synchronization analysis pass. */
8655 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8657 location_t loc
= gimple_location (single_stmt
);
8658 tree tlabel
= create_artificial_label (loc
);
8659 tree flabel
= create_artificial_label (loc
);
8660 gimple
*call
, *cond
;
8663 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8664 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8665 call
= gimple_build_call (decl
, 0);
8666 gimple_call_set_lhs (call
, lhs
);
8667 gimple_seq_add_stmt (pre_p
, call
);
8669 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8670 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8673 gimple_seq_add_stmt (pre_p
, cond
);
8674 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8675 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8676 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8680 /* A subroutine of lower_omp_single. Expand the simple form of
8681 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8683 #pragma omp single copyprivate (a, b, c)
8685 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8688 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8694 GOMP_single_copy_end (©out);
8705 FIXME. It may be better to delay expanding the logic of this until
8706 pass_expand_omp. The expanded logic may make the job more difficult
8707 to a synchronization analysis pass. */
8710 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8713 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8714 gimple_seq copyin_seq
;
8715 location_t loc
= gimple_location (single_stmt
);
8717 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8719 ptr_type
= build_pointer_type (ctx
->record_type
);
8720 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8722 l0
= create_artificial_label (loc
);
8723 l1
= create_artificial_label (loc
);
8724 l2
= create_artificial_label (loc
);
8726 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8727 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8728 t
= fold_convert_loc (loc
, ptr_type
, t
);
8729 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8731 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8732 build_int_cst (ptr_type
, 0));
8733 t
= build3 (COND_EXPR
, void_type_node
, t
,
8734 build_and_jump (&l0
), build_and_jump (&l1
));
8735 gimplify_and_add (t
, pre_p
);
8737 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8739 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8742 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8745 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8746 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8747 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8748 gimplify_and_add (t
, pre_p
);
8750 t
= build_and_jump (&l2
);
8751 gimplify_and_add (t
, pre_p
);
8753 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8755 gimple_seq_add_seq (pre_p
, copyin_seq
);
8757 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8761 /* Expand code for an OpenMP single directive. */
8764 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8767 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8769 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8771 push_gimplify_context ();
8773 block
= make_node (BLOCK
);
8774 bind
= gimple_build_bind (NULL
, NULL
, block
);
8775 gsi_replace (gsi_p
, bind
, true);
8778 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8779 &bind_body
, &dlist
, ctx
, NULL
);
8780 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8782 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8784 if (ctx
->record_type
)
8785 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8787 lower_omp_single_simple (single_stmt
, &bind_body
);
8789 gimple_omp_set_body (single_stmt
, NULL
);
8791 gimple_seq_add_seq (&bind_body
, dlist
);
8793 bind_body
= maybe_catch_exception (bind_body
);
8795 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8796 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8797 gimple
*g
= gimple_build_omp_return (nowait
);
8798 gimple_seq_add_stmt (&bind_body_tail
, g
);
8799 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8800 if (ctx
->record_type
)
8802 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8803 tree clobber
= build_clobber (ctx
->record_type
);
8804 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8805 clobber
), GSI_SAME_STMT
);
8807 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8808 gimple_bind_set_body (bind
, bind_body
);
8810 pop_gimplify_context (bind
);
8812 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8813 BLOCK_VARS (block
) = ctx
->block_vars
;
8814 if (BLOCK_VARS (block
))
8815 TREE_USED (block
) = 1;
8819 /* Lower code for an OMP scope directive. */
8822 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8825 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8827 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8828 gimple_seq tred_dlist
= NULL
;
8830 push_gimplify_context ();
8832 block
= make_node (BLOCK
);
8833 bind
= gimple_build_bind (NULL
, NULL
, block
);
8834 gsi_replace (gsi_p
, bind
, true);
8839 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8840 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8843 tree type
= build_pointer_type (pointer_sized_int_node
);
8844 tree temp
= create_tmp_var (type
);
8845 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8846 OMP_CLAUSE_DECL (c
) = temp
;
8847 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8848 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8849 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8850 gimple_omp_scope_clauses (scope_stmt
),
8851 &bind_body
, &tred_dlist
);
8853 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8854 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8855 gimple_seq_add_stmt (&bind_body
, stmt
);
8858 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8859 &bind_body
, &dlist
, ctx
, NULL
);
8860 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8862 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8864 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8866 gimple_omp_set_body (scope_stmt
, NULL
);
8868 gimple_seq clist
= NULL
;
8869 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8870 &bind_body
, &clist
, ctx
);
8873 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8874 gcall
*g
= gimple_build_call (fndecl
, 0);
8875 gimple_seq_add_stmt (&bind_body
, g
);
8876 gimple_seq_add_seq (&bind_body
, clist
);
8877 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8878 g
= gimple_build_call (fndecl
, 0);
8879 gimple_seq_add_stmt (&bind_body
, g
);
8882 gimple_seq_add_seq (&bind_body
, dlist
);
8884 bind_body
= maybe_catch_exception (bind_body
);
8886 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8887 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8888 gimple
*g
= gimple_build_omp_return (nowait
);
8889 gimple_seq_add_stmt (&bind_body_tail
, g
);
8890 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8891 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8892 if (ctx
->record_type
)
8894 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8895 tree clobber
= build_clobber (ctx
->record_type
);
8896 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8897 clobber
), GSI_SAME_STMT
);
8899 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8901 gimple_bind_set_body (bind
, bind_body
);
8903 pop_gimplify_context (bind
);
8905 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8906 BLOCK_VARS (block
) = ctx
->block_vars
;
8907 if (BLOCK_VARS (block
))
8908 TREE_USED (block
) = 1;
8910 /* Expand code for an OpenMP master or masked directive. */
8913 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8915 tree block
, lab
= NULL
, x
, bfn_decl
;
8916 gimple
*stmt
= gsi_stmt (*gsi_p
);
8918 location_t loc
= gimple_location (stmt
);
8920 tree filter
= integer_zero_node
;
8922 push_gimplify_context ();
8924 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
8926 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
8929 filter
= fold_convert (integer_type_node
,
8930 OMP_CLAUSE_FILTER_EXPR (filter
));
8932 filter
= integer_zero_node
;
8934 block
= make_node (BLOCK
);
8935 bind
= gimple_build_bind (NULL
, NULL
, block
);
8936 gsi_replace (gsi_p
, bind
, true);
8937 gimple_bind_add_stmt (bind
, stmt
);
8939 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8940 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8941 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
8942 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8944 gimplify_and_add (x
, &tseq
);
8945 gimple_bind_add_seq (bind
, tseq
);
8947 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8948 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8949 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8950 gimple_omp_set_body (stmt
, NULL
);
8952 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8954 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8956 pop_gimplify_context (bind
);
8958 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8959 BLOCK_VARS (block
) = ctx
->block_vars
;
8962 /* Helper function for lower_omp_task_reductions. For a specific PASS
8963 find out the current clause it should be processed, or return false
8964 if all have been processed already. */
8967 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8968 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8969 tree
*type
, tree
*next
)
8971 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8973 if (ccode
== OMP_CLAUSE_REDUCTION
8974 && code
!= OMP_TASKLOOP
8975 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8977 *decl
= OMP_CLAUSE_DECL (*c
);
8978 *type
= TREE_TYPE (*decl
);
8979 if (TREE_CODE (*decl
) == MEM_REF
)
8986 if (omp_privatize_by_reference (*decl
))
8987 *type
= TREE_TYPE (*type
);
8988 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8991 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9000 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9001 OMP_TASKGROUP only with task modifier). Register mapping of those in
9002 START sequence and reducing them and unregister them in the END sequence. */
9005 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9006 gimple_seq
*start
, gimple_seq
*end
)
9008 enum omp_clause_code ccode
9009 = (code
== OMP_TASKGROUP
9010 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9011 tree cancellable
= NULL_TREE
;
9012 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9013 if (clauses
== NULL_TREE
)
9015 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9017 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9018 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9019 && outer
->cancellable
)
9021 cancellable
= error_mark_node
;
9024 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9025 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9028 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9029 tree
*last
= &TYPE_FIELDS (record_type
);
9033 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9035 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9038 DECL_CHAIN (field
) = ifield
;
9039 last
= &DECL_CHAIN (ifield
);
9040 DECL_CONTEXT (field
) = record_type
;
9041 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9042 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9043 DECL_CONTEXT (ifield
) = record_type
;
9044 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9045 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9047 for (int pass
= 0; pass
< 2; pass
++)
9049 tree decl
, type
, next
;
9050 for (tree c
= clauses
;
9051 omp_task_reduction_iterate (pass
, code
, ccode
,
9052 &c
, &decl
, &type
, &next
); c
= next
)
9055 tree new_type
= type
;
9057 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9059 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9060 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9062 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9064 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9065 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9066 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9069 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9070 DECL_CONTEXT (field
) = record_type
;
9071 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9072 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9074 last
= &DECL_CHAIN (field
);
9076 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9078 DECL_CONTEXT (bfield
) = record_type
;
9079 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9080 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9082 last
= &DECL_CHAIN (bfield
);
9086 layout_type (record_type
);
9088 /* Build up an array which registers with the runtime all the reductions
9089 and deregisters them at the end. Format documented in libgomp/task.c. */
9090 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9091 tree avar
= create_tmp_var_raw (atype
);
9092 gimple_add_tmp_var (avar
);
9093 TREE_ADDRESSABLE (avar
) = 1;
9094 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9095 NULL_TREE
, NULL_TREE
);
9096 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9097 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9098 gimple_seq seq
= NULL
;
9099 tree sz
= fold_convert (pointer_sized_int_node
,
9100 TYPE_SIZE_UNIT (record_type
));
9102 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9103 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9104 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9105 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9106 ctx
->task_reductions
.create (1 + cnt
);
9107 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9108 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9110 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9111 gimple_seq_add_seq (start
, seq
);
9112 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9113 NULL_TREE
, NULL_TREE
);
9114 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9115 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9116 NULL_TREE
, NULL_TREE
);
9117 t
= build_int_cst (pointer_sized_int_node
,
9118 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9119 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9120 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9121 NULL_TREE
, NULL_TREE
);
9122 t
= build_int_cst (pointer_sized_int_node
, -1);
9123 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9124 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9125 NULL_TREE
, NULL_TREE
);
9126 t
= build_int_cst (pointer_sized_int_node
, 0);
9127 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9129 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9130 and for each task reduction checks a bool right after the private variable
9131 within that thread's chunk; if the bool is clear, it hasn't been
9132 initialized and thus isn't going to be reduced nor destructed, otherwise
9133 reduce and destruct it. */
9134 tree idx
= create_tmp_var (size_type_node
);
9135 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9136 tree num_thr_sz
= create_tmp_var (size_type_node
);
9137 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9138 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9139 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9141 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9143 /* For worksharing constructs or scope, only perform it in the master
9144 thread, with the exception of cancelled implicit barriers - then only
9145 handle the current thread. */
9146 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9147 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9148 tree thr_num
= create_tmp_var (integer_type_node
);
9149 g
= gimple_build_call (t
, 0);
9150 gimple_call_set_lhs (g
, thr_num
);
9151 gimple_seq_add_stmt (end
, g
);
9155 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9156 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9157 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9158 if (code
== OMP_FOR
)
9159 c
= gimple_omp_for_clauses (ctx
->stmt
);
9160 else if (code
== OMP_SECTIONS
)
9161 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9162 else /* if (code == OMP_SCOPE) */
9163 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9164 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9166 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9168 gimple_seq_add_stmt (end
, g
);
9169 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9170 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9171 gimple_seq_add_stmt (end
, g
);
9172 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9173 build_one_cst (TREE_TYPE (idx
)));
9174 gimple_seq_add_stmt (end
, g
);
9175 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9176 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9178 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9179 gimple_seq_add_stmt (end
, g
);
9180 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9182 if (code
!= OMP_PARALLEL
)
9184 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9185 tree num_thr
= create_tmp_var (integer_type_node
);
9186 g
= gimple_build_call (t
, 0);
9187 gimple_call_set_lhs (g
, num_thr
);
9188 gimple_seq_add_stmt (end
, g
);
9189 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9190 gimple_seq_add_stmt (end
, g
);
9192 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9196 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9197 OMP_CLAUSE__REDUCTEMP_
);
9198 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9199 t
= fold_convert (size_type_node
, t
);
9200 gimplify_assign (num_thr_sz
, t
, end
);
9202 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9203 NULL_TREE
, NULL_TREE
);
9204 tree data
= create_tmp_var (pointer_sized_int_node
);
9205 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9206 if (code
== OMP_TASKLOOP
)
9208 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9209 g
= gimple_build_cond (NE_EXPR
, data
,
9210 build_zero_cst (pointer_sized_int_node
),
9212 gimple_seq_add_stmt (end
, g
);
9214 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9216 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9217 ptr
= create_tmp_var (build_pointer_type (record_type
));
9219 ptr
= create_tmp_var (ptr_type_node
);
9220 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9222 tree field
= TYPE_FIELDS (record_type
);
9225 field
= DECL_CHAIN (DECL_CHAIN (field
));
9226 for (int pass
= 0; pass
< 2; pass
++)
9228 tree decl
, type
, next
;
9229 for (tree c
= clauses
;
9230 omp_task_reduction_iterate (pass
, code
, ccode
,
9231 &c
, &decl
, &type
, &next
); c
= next
)
9233 tree var
= decl
, ref
;
9234 if (TREE_CODE (decl
) == MEM_REF
)
9236 var
= TREE_OPERAND (var
, 0);
9237 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9238 var
= TREE_OPERAND (var
, 0);
9240 if (TREE_CODE (var
) == ADDR_EXPR
)
9241 var
= TREE_OPERAND (var
, 0);
9242 else if (TREE_CODE (var
) == INDIRECT_REF
)
9243 var
= TREE_OPERAND (var
, 0);
9244 tree orig_var
= var
;
9245 if (is_variable_sized (var
))
9247 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9248 var
= DECL_VALUE_EXPR (var
);
9249 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9250 var
= TREE_OPERAND (var
, 0);
9251 gcc_assert (DECL_P (var
));
9253 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9254 if (orig_var
!= var
)
9255 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9256 else if (TREE_CODE (v
) == ADDR_EXPR
)
9257 t
= build_fold_addr_expr (t
);
9258 else if (TREE_CODE (v
) == INDIRECT_REF
)
9259 t
= build_fold_indirect_ref (t
);
9260 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9262 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9263 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9264 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9266 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9267 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9268 fold_convert (size_type_node
,
9269 TREE_OPERAND (decl
, 1)));
9273 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9274 if (!omp_privatize_by_reference (decl
))
9275 t
= build_fold_addr_expr (t
);
9277 t
= fold_convert (pointer_sized_int_node
, t
);
9279 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9280 gimple_seq_add_seq (start
, seq
);
9281 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9282 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9283 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9284 t
= unshare_expr (byte_position (field
));
9285 t
= fold_convert (pointer_sized_int_node
, t
);
9286 ctx
->task_reduction_map
->put (c
, cnt
);
9287 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9290 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9291 gimple_seq_add_seq (start
, seq
);
9292 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9293 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9294 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9296 tree bfield
= DECL_CHAIN (field
);
9298 if (code
== OMP_PARALLEL
9300 || code
== OMP_SECTIONS
9301 || code
== OMP_SCOPE
)
9302 /* In parallel, worksharing or scope all threads unconditionally
9303 initialize all their task reduction private variables. */
9304 cond
= boolean_true_node
;
9305 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9307 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9308 unshare_expr (byte_position (bfield
)));
9310 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9311 gimple_seq_add_seq (end
, seq
);
9312 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9313 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9314 build_int_cst (pbool
, 0));
9317 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9318 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9319 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9320 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9321 tree condv
= create_tmp_var (boolean_type_node
);
9322 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9323 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9325 gimple_seq_add_stmt (end
, g
);
9326 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9327 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9329 /* If this reduction doesn't need destruction and parallel
9330 has been cancelled, there is nothing to do for this
9331 reduction, so jump around the merge operation. */
9332 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9333 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9334 build_zero_cst (TREE_TYPE (cancellable
)),
9336 gimple_seq_add_stmt (end
, g
);
9337 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9341 if (TREE_TYPE (ptr
) == ptr_type_node
)
9343 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9344 unshare_expr (byte_position (field
)));
9346 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9347 gimple_seq_add_seq (end
, seq
);
9348 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9349 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9350 build_int_cst (pbool
, 0));
9353 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9354 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9356 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9357 if (TREE_CODE (decl
) != MEM_REF
9358 && omp_privatize_by_reference (decl
))
9359 ref
= build_simple_mem_ref (ref
);
9360 /* reduction(-:var) sums up the partial results, so it acts
9361 identically to reduction(+:var). */
9362 if (rcode
== MINUS_EXPR
)
9364 if (TREE_CODE (decl
) == MEM_REF
)
9366 tree type
= TREE_TYPE (new_var
);
9367 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9368 tree i
= create_tmp_var (TREE_TYPE (v
));
9369 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9372 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9373 tree vv
= create_tmp_var (TREE_TYPE (v
));
9374 gimplify_assign (vv
, v
, start
);
9377 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9378 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9379 new_var
= build_fold_addr_expr (new_var
);
9380 new_var
= fold_convert (ptype
, new_var
);
9381 ref
= fold_convert (ptype
, ref
);
9382 tree m
= create_tmp_var (ptype
);
9383 gimplify_assign (m
, new_var
, end
);
9385 m
= create_tmp_var (ptype
);
9386 gimplify_assign (m
, ref
, end
);
9388 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9389 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9390 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9391 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9392 tree priv
= build_simple_mem_ref (new_var
);
9393 tree out
= build_simple_mem_ref (ref
);
9394 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9396 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9397 tree decl_placeholder
9398 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9399 tree lab6
= NULL_TREE
;
9402 /* If this reduction needs destruction and parallel
9403 has been cancelled, jump around the merge operation
9404 to the destruction. */
9405 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9406 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9407 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9408 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9410 gimple_seq_add_stmt (end
, g
);
9411 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9413 SET_DECL_VALUE_EXPR (placeholder
, out
);
9414 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9415 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9416 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9417 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9418 gimple_seq_add_seq (end
,
9419 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9420 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9421 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9423 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9424 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9427 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9428 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9431 gimple_seq tseq
= NULL
;
9432 gimplify_stmt (&x
, &tseq
);
9433 gimple_seq_add_seq (end
, tseq
);
9438 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9439 out
= unshare_expr (out
);
9440 gimplify_assign (out
, x
, end
);
9443 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9444 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9445 gimple_seq_add_stmt (end
, g
);
9446 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9447 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9448 gimple_seq_add_stmt (end
, g
);
9449 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9450 build_int_cst (TREE_TYPE (i
), 1));
9451 gimple_seq_add_stmt (end
, g
);
9452 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9453 gimple_seq_add_stmt (end
, g
);
9454 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9456 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9458 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9459 tree oldv
= NULL_TREE
;
9460 tree lab6
= NULL_TREE
;
9463 /* If this reduction needs destruction and parallel
9464 has been cancelled, jump around the merge operation
9465 to the destruction. */
9466 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9467 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9468 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9469 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9471 gimple_seq_add_stmt (end
, g
);
9472 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9474 if (omp_privatize_by_reference (decl
)
9475 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9477 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9478 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9479 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9480 gimplify_assign (refv
, ref
, end
);
9481 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9482 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9483 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9484 tree d
= maybe_lookup_decl (decl
, ctx
);
9486 if (DECL_HAS_VALUE_EXPR_P (d
))
9487 oldv
= DECL_VALUE_EXPR (d
);
9488 if (omp_privatize_by_reference (var
))
9490 tree v
= fold_convert (TREE_TYPE (d
),
9491 build_fold_addr_expr (new_var
));
9492 SET_DECL_VALUE_EXPR (d
, v
);
9495 SET_DECL_VALUE_EXPR (d
, new_var
);
9496 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9497 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9499 SET_DECL_VALUE_EXPR (d
, oldv
);
9502 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9503 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9505 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9506 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9507 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9508 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9510 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9511 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9514 gimple_seq tseq
= NULL
;
9515 gimplify_stmt (&x
, &tseq
);
9516 gimple_seq_add_seq (end
, tseq
);
9521 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9522 ref
= unshare_expr (ref
);
9523 gimplify_assign (ref
, x
, end
);
9525 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9527 field
= DECL_CHAIN (bfield
);
9531 if (code
== OMP_TASKGROUP
)
9533 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9534 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9535 gimple_seq_add_stmt (start
, g
);
9540 if (code
== OMP_FOR
)
9541 c
= gimple_omp_for_clauses (ctx
->stmt
);
9542 else if (code
== OMP_SECTIONS
)
9543 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9544 else if (code
== OMP_SCOPE
)
9545 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9547 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9548 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9549 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9550 build_fold_addr_expr (avar
));
9551 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9554 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9555 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9557 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9558 gimple_seq_add_stmt (end
, g
);
9559 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9560 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9562 enum built_in_function bfn
9563 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9564 t
= builtin_decl_explicit (bfn
);
9565 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9569 arg
= create_tmp_var (c_bool_type
);
9570 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9574 arg
= build_int_cst (c_bool_type
, 0);
9575 g
= gimple_build_call (t
, 1, arg
);
9579 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9580 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9582 gimple_seq_add_stmt (end
, g
);
9584 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9585 t
= build_constructor (atype
, NULL
);
9586 TREE_THIS_VOLATILE (t
) = 1;
9587 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9590 /* Expand code for an OpenMP taskgroup directive. */
9593 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9595 gimple
*stmt
= gsi_stmt (*gsi_p
);
9598 gimple_seq dseq
= NULL
;
9599 tree block
= make_node (BLOCK
);
9601 bind
= gimple_build_bind (NULL
, NULL
, block
);
9602 gsi_replace (gsi_p
, bind
, true);
9603 gimple_bind_add_stmt (bind
, stmt
);
9605 push_gimplify_context ();
9607 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9609 gimple_bind_add_stmt (bind
, x
);
9611 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9612 gimple_omp_taskgroup_clauses (stmt
),
9613 gimple_bind_body_ptr (bind
), &dseq
);
9615 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9616 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9617 gimple_omp_set_body (stmt
, NULL
);
9619 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9620 gimple_bind_add_seq (bind
, dseq
);
9622 pop_gimplify_context (bind
);
9624 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9625 BLOCK_VARS (block
) = ctx
->block_vars
;
9629 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9632 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9635 struct omp_for_data fd
;
9636 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9639 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9640 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9641 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9645 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9646 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9647 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
9648 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9650 /* Merge depend clauses from multiple adjacent
9651 #pragma omp ordered depend(sink:...) constructs
9652 into one #pragma omp ordered depend(sink:...), so that
9653 we can optimize them together. */
9654 gimple_stmt_iterator gsi
= *gsi_p
;
9656 while (!gsi_end_p (gsi
))
9658 gimple
*stmt
= gsi_stmt (gsi
);
9659 if (is_gimple_debug (stmt
)
9660 || gimple_code (stmt
) == GIMPLE_NOP
)
9665 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9667 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9668 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9670 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
9671 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9674 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9676 gsi_remove (&gsi
, true);
9680 /* Canonicalize sink dependence clauses into one folded clause if
9683 The basic algorithm is to create a sink vector whose first
9684 element is the GCD of all the first elements, and whose remaining
9685 elements are the minimum of the subsequent columns.
9687 We ignore dependence vectors whose first element is zero because
9688 such dependencies are known to be executed by the same thread.
9690 We take into account the direction of the loop, so a minimum
9691 becomes a maximum if the loop is iterating forwards. We also
9692 ignore sink clauses where the loop direction is unknown, or where
9693 the offsets are clearly invalid because they are not a multiple
9694 of the loop increment.
9698 #pragma omp for ordered(2)
9699 for (i=0; i < N; ++i)
9700 for (j=0; j < M; ++j)
9702 #pragma omp ordered \
9703 depend(sink:i-8,j-2) \
9704 depend(sink:i,j-1) \ // Completely ignored because i+0.
9705 depend(sink:i-4,j-3) \
9706 depend(sink:i-6,j-4)
9707 #pragma omp ordered depend(source)
9712 depend(sink:-gcd(8,4,6),-min(2,3,4))
9717 /* FIXME: Computing GCD's where the first element is zero is
9718 non-trivial in the presence of collapsed loops. Do this later. */
9719 if (fd
.collapse
> 1)
9722 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9724 /* wide_int is not a POD so it must be default-constructed. */
9725 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9726 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9728 tree folded_dep
= NULL_TREE
;
9729 /* TRUE if the first dimension's offset is negative. */
9730 bool neg_offset_p
= false;
9732 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9734 while ((c
= *list_p
) != NULL
)
9736 bool remove
= false;
9738 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
9739 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9740 goto next_ordered_clause
;
9743 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9744 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9745 vec
= TREE_CHAIN (vec
), ++i
)
9747 gcc_assert (i
< len
);
9749 /* omp_extract_for_data has canonicalized the condition. */
9750 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9751 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9752 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9753 bool maybe_lexically_later
= true;
9755 /* While the committee makes up its mind, bail if we have any
9756 non-constant steps. */
9757 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9758 goto lower_omp_ordered_ret
;
9760 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9761 if (POINTER_TYPE_P (itype
))
9763 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9764 TYPE_PRECISION (itype
),
9767 /* Ignore invalid offsets that are not multiples of the step. */
9768 if (!wi::multiple_of_p (wi::abs (offset
),
9769 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9772 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9773 "ignoring sink clause with offset that is not "
9774 "a multiple of the loop step");
9776 goto next_ordered_clause
;
9779 /* Calculate the first dimension. The first dimension of
9780 the folded dependency vector is the GCD of the first
9781 elements, while ignoring any first elements whose offset
9785 /* Ignore dependence vectors whose first dimension is 0. */
9789 goto next_ordered_clause
;
9793 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9795 error_at (OMP_CLAUSE_LOCATION (c
),
9796 "first offset must be in opposite direction "
9797 "of loop iterations");
9798 goto lower_omp_ordered_ret
;
9802 neg_offset_p
= forward
;
9803 /* Initialize the first time around. */
9804 if (folded_dep
== NULL_TREE
)
9807 folded_deps
[0] = offset
;
9810 folded_deps
[0] = wi::gcd (folded_deps
[0],
9814 /* Calculate minimum for the remaining dimensions. */
9817 folded_deps
[len
+ i
- 1] = offset
;
9818 if (folded_dep
== c
)
9819 folded_deps
[i
] = offset
;
9820 else if (maybe_lexically_later
9821 && !wi::eq_p (folded_deps
[i
], offset
))
9823 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9827 for (j
= 1; j
<= i
; j
++)
9828 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9831 maybe_lexically_later
= false;
9835 gcc_assert (i
== len
);
9839 next_ordered_clause
:
9841 *list_p
= OMP_CLAUSE_CHAIN (c
);
9843 list_p
= &OMP_CLAUSE_CHAIN (c
);
9849 folded_deps
[0] = -folded_deps
[0];
9851 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9852 if (POINTER_TYPE_P (itype
))
9855 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9856 = wide_int_to_tree (itype
, folded_deps
[0]);
9857 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9858 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9861 lower_omp_ordered_ret
:
9863 /* Ordered without clauses is #pragma omp threads, while we want
9864 a nop instead if we remove all clauses. */
9865 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9866 gsi_replace (gsi_p
, gimple_build_nop (), true);
9870 /* Expand code for an OpenMP ordered directive. */
9873 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9876 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9877 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9880 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9882 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9885 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9886 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9887 OMP_CLAUSE_THREADS
);
9889 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9892 /* FIXME: This is needs to be moved to the expansion to verify various
9893 conditions only testable on cfg with dominators computed, and also
9894 all the depend clauses to be merged still might need to be available
9895 for the runtime checks. */
9897 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9901 push_gimplify_context ();
9903 block
= make_node (BLOCK
);
9904 bind
= gimple_build_bind (NULL
, NULL
, block
);
9905 gsi_replace (gsi_p
, bind
, true);
9906 gimple_bind_add_stmt (bind
, stmt
);
9910 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9911 build_int_cst (NULL_TREE
, threads
));
9912 cfun
->has_simduid_loops
= true;
9915 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9917 gimple_bind_add_stmt (bind
, x
);
9919 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9922 counter
= create_tmp_var (integer_type_node
);
9923 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9924 gimple_call_set_lhs (g
, counter
);
9925 gimple_bind_add_stmt (bind
, g
);
9927 body
= create_artificial_label (UNKNOWN_LOCATION
);
9928 test
= create_artificial_label (UNKNOWN_LOCATION
);
9929 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9931 tree simt_pred
= create_tmp_var (integer_type_node
);
9932 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9933 gimple_call_set_lhs (g
, simt_pred
);
9934 gimple_bind_add_stmt (bind
, g
);
9936 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9937 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9938 gimple_bind_add_stmt (bind
, g
);
9940 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9942 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9943 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9944 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9945 gimple_omp_set_body (stmt
, NULL
);
9949 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9950 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9951 gimple_bind_add_stmt (bind
, g
);
9953 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9954 tree nonneg
= create_tmp_var (integer_type_node
);
9955 gimple_seq tseq
= NULL
;
9956 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9957 gimple_bind_add_seq (bind
, tseq
);
9959 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9960 gimple_call_set_lhs (g
, nonneg
);
9961 gimple_bind_add_stmt (bind
, g
);
9963 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9964 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9965 gimple_bind_add_stmt (bind
, g
);
9967 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9970 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9971 build_int_cst (NULL_TREE
, threads
));
9973 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9975 gimple_bind_add_stmt (bind
, x
);
9977 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9979 pop_gimplify_context (bind
);
9981 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9982 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9986 /* Expand code for an OpenMP scan directive and the structured block
9987 before the scan directive. */
9990 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9992 gimple
*stmt
= gsi_stmt (*gsi_p
);
9994 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9995 tree lane
= NULL_TREE
;
9996 gimple_seq before
= NULL
;
9997 omp_context
*octx
= ctx
->outer
;
9999 if (octx
->scan_exclusive
&& !has_clauses
)
10001 gimple_stmt_iterator gsi2
= *gsi_p
;
10003 gimple
*stmt2
= gsi_stmt (gsi2
);
10004 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10005 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10006 the one with exclusive clause(s), comes first. */
10008 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10009 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10011 gsi_remove (gsi_p
, false);
10012 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10013 ctx
= maybe_lookup_ctx (stmt2
);
10015 lower_omp_scan (gsi_p
, ctx
);
10020 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10021 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10022 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10023 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10024 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10025 && !gimple_omp_for_combined_p (octx
->stmt
));
10026 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10027 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10030 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10031 OMP_CLAUSE__SIMDUID_
))
10033 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10034 lane
= create_tmp_var (unsigned_type_node
);
10035 tree t
= build_int_cst (integer_type_node
,
10037 : octx
->scan_inclusive
? 2 : 3);
10039 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10040 gimple_call_set_lhs (g
, lane
);
10041 gimple_seq_add_stmt (&before
, g
);
10044 if (is_simd
|| is_for
)
10046 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10047 c
; c
= OMP_CLAUSE_CHAIN (c
))
10048 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10049 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10051 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10052 tree var
= OMP_CLAUSE_DECL (c
);
10053 tree new_var
= lookup_decl (var
, octx
);
10054 tree val
= new_var
;
10055 tree var2
= NULL_TREE
;
10056 tree var3
= NULL_TREE
;
10057 tree var4
= NULL_TREE
;
10058 tree lane0
= NULL_TREE
;
10059 tree new_vard
= new_var
;
10060 if (omp_privatize_by_reference (var
))
10062 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10065 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10067 val
= DECL_VALUE_EXPR (new_vard
);
10068 if (new_vard
!= new_var
)
10070 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10071 val
= TREE_OPERAND (val
, 0);
10073 if (TREE_CODE (val
) == ARRAY_REF
10074 && VAR_P (TREE_OPERAND (val
, 0)))
10076 tree v
= TREE_OPERAND (val
, 0);
10077 if (lookup_attribute ("omp simd array",
10078 DECL_ATTRIBUTES (v
)))
10080 val
= unshare_expr (val
);
10081 lane0
= TREE_OPERAND (val
, 1);
10082 TREE_OPERAND (val
, 1) = lane
;
10083 var2
= lookup_decl (v
, octx
);
10084 if (octx
->scan_exclusive
)
10085 var4
= lookup_decl (var2
, octx
);
10087 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10088 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10091 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10092 var2
, lane
, NULL_TREE
, NULL_TREE
);
10093 TREE_THIS_NOTRAP (var2
) = 1;
10094 if (octx
->scan_exclusive
)
10096 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10097 var4
, lane
, NULL_TREE
,
10099 TREE_THIS_NOTRAP (var4
) = 1;
10110 var2
= build_outer_var_ref (var
, octx
);
10111 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10113 var3
= maybe_lookup_decl (new_vard
, octx
);
10114 if (var3
== new_vard
|| var3
== NULL_TREE
)
10116 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10118 var4
= maybe_lookup_decl (var3
, octx
);
10119 if (var4
== var3
|| var4
== NULL_TREE
)
10121 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10132 && octx
->scan_exclusive
10134 && var4
== NULL_TREE
)
10135 var4
= create_tmp_var (TREE_TYPE (val
));
10137 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10139 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10144 /* If we've added a separate identity element
10145 variable, copy it over into val. */
10146 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10148 gimplify_and_add (x
, &before
);
10150 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10152 /* Otherwise, assign to it the identity element. */
10153 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10155 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10156 tree ref
= build_outer_var_ref (var
, octx
);
10157 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10158 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10161 if (new_vard
!= new_var
)
10162 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10163 SET_DECL_VALUE_EXPR (new_vard
, val
);
10165 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10166 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10167 lower_omp (&tseq
, octx
);
10169 SET_DECL_VALUE_EXPR (new_vard
, x
);
10170 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10171 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10172 gimple_seq_add_seq (&before
, tseq
);
10174 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10180 if (octx
->scan_exclusive
)
10182 tree v4
= unshare_expr (var4
);
10183 tree v2
= unshare_expr (var2
);
10184 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10185 gimplify_and_add (x
, &before
);
10187 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10188 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10189 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10191 if (x
&& new_vard
!= new_var
)
10192 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10194 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10195 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10196 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10197 lower_omp (&tseq
, octx
);
10198 gimple_seq_add_seq (&before
, tseq
);
10199 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10201 SET_DECL_VALUE_EXPR (new_vard
, x
);
10202 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10203 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10204 if (octx
->scan_inclusive
)
10206 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10208 gimplify_and_add (x
, &before
);
10210 else if (lane0
== NULL_TREE
)
10212 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10214 gimplify_and_add (x
, &before
);
10222 /* input phase. Set val to initializer before
10224 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10225 gimplify_assign (val
, x
, &before
);
10230 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10231 if (code
== MINUS_EXPR
)
10234 tree x
= build2 (code
, TREE_TYPE (var2
),
10235 unshare_expr (var2
), unshare_expr (val
));
10236 if (octx
->scan_inclusive
)
10238 gimplify_assign (unshare_expr (var2
), x
, &before
);
10239 gimplify_assign (val
, var2
, &before
);
10243 gimplify_assign (unshare_expr (var4
),
10244 unshare_expr (var2
), &before
);
10245 gimplify_assign (var2
, x
, &before
);
10246 if (lane0
== NULL_TREE
)
10247 gimplify_assign (val
, var4
, &before
);
10251 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10253 tree vexpr
= unshare_expr (var4
);
10254 TREE_OPERAND (vexpr
, 1) = lane0
;
10255 if (new_vard
!= new_var
)
10256 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10257 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10261 if (is_simd
&& !is_for_simd
)
10263 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10264 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10265 gsi_replace (gsi_p
, gimple_build_nop (), true);
10268 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10271 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
10272 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10277 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10278 substitution of a couple of function calls. But in the NAMED case,
10279 requires that languages coordinate a symbol name. It is therefore
10280 best put here in common code. */
10282 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10285 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10288 tree name
, lock
, unlock
;
10289 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10291 location_t loc
= gimple_location (stmt
);
10294 name
= gimple_omp_critical_name (stmt
);
10299 if (!critical_name_mutexes
)
10300 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10302 tree
*n
= critical_name_mutexes
->get (name
);
10307 decl
= create_tmp_var_raw (ptr_type_node
);
10309 new_str
= ACONCAT ((".gomp_critical_user_",
10310 IDENTIFIER_POINTER (name
), NULL
));
10311 DECL_NAME (decl
) = get_identifier (new_str
);
10312 TREE_PUBLIC (decl
) = 1;
10313 TREE_STATIC (decl
) = 1;
10314 DECL_COMMON (decl
) = 1;
10315 DECL_ARTIFICIAL (decl
) = 1;
10316 DECL_IGNORED_P (decl
) = 1;
10318 varpool_node::finalize_decl (decl
);
10320 critical_name_mutexes
->put (name
, decl
);
10325 /* If '#pragma omp critical' is inside offloaded region or
10326 inside function marked as offloadable, the symbol must be
10327 marked as offloadable too. */
10329 if (cgraph_node::get (current_function_decl
)->offloadable
)
10330 varpool_node::get_create (decl
)->offloadable
= 1;
10332 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10333 if (is_gimple_omp_offloaded (octx
->stmt
))
10335 varpool_node::get_create (decl
)->offloadable
= 1;
10339 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10340 lock
= build_call_expr_loc (loc
, lock
, 1,
10341 build_fold_addr_expr_loc (loc
, decl
));
10343 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10344 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10345 build_fold_addr_expr_loc (loc
, decl
));
10349 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10350 lock
= build_call_expr_loc (loc
, lock
, 0);
10352 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10353 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10356 push_gimplify_context ();
10358 block
= make_node (BLOCK
);
10359 bind
= gimple_build_bind (NULL
, NULL
, block
);
10360 gsi_replace (gsi_p
, bind
, true);
10361 gimple_bind_add_stmt (bind
, stmt
);
10363 tbody
= gimple_bind_body (bind
);
10364 gimplify_and_add (lock
, &tbody
);
10365 gimple_bind_set_body (bind
, tbody
);
10367 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10368 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10369 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10370 gimple_omp_set_body (stmt
, NULL
);
10372 tbody
= gimple_bind_body (bind
);
10373 gimplify_and_add (unlock
, &tbody
);
10374 gimple_bind_set_body (bind
, tbody
);
10376 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10378 pop_gimplify_context (bind
);
10379 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10380 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10383 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10384 for a lastprivate clause. Given a loop control predicate of (V
10385 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10386 is appended to *DLIST, iterator initialization is appended to
10387 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10388 to be emitted in a critical section. */
10391 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10392 gimple_seq
*dlist
, gimple_seq
*clist
,
10393 struct omp_context
*ctx
)
10395 tree clauses
, cond
, vinit
;
10396 enum tree_code cond_code
;
10399 cond_code
= fd
->loop
.cond_code
;
10400 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10402 /* When possible, use a strict equality expression. This can let VRP
10403 type optimizations deduce the value and remove a copy. */
10404 if (tree_fits_shwi_p (fd
->loop
.step
))
10406 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10407 if (step
== 1 || step
== -1)
10408 cond_code
= EQ_EXPR
;
10411 tree n2
= fd
->loop
.n2
;
10412 if (fd
->collapse
> 1
10413 && TREE_CODE (n2
) != INTEGER_CST
10414 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10416 struct omp_context
*taskreg_ctx
= NULL
;
10417 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10419 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10420 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10421 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10423 if (gimple_omp_for_combined_into_p (gfor
))
10425 gcc_assert (ctx
->outer
->outer
10426 && is_parallel_ctx (ctx
->outer
->outer
));
10427 taskreg_ctx
= ctx
->outer
->outer
;
10431 struct omp_for_data outer_fd
;
10432 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10433 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10436 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10437 taskreg_ctx
= ctx
->outer
->outer
;
10439 else if (is_taskreg_ctx (ctx
->outer
))
10440 taskreg_ctx
= ctx
->outer
;
10444 tree taskreg_clauses
10445 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10446 tree innerc
= omp_find_clause (taskreg_clauses
,
10447 OMP_CLAUSE__LOOPTEMP_
);
10448 gcc_assert (innerc
);
10449 int count
= fd
->collapse
;
10451 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10452 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10453 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10455 for (i
= 0; i
< count
; i
++)
10457 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10458 OMP_CLAUSE__LOOPTEMP_
);
10459 gcc_assert (innerc
);
10461 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10462 OMP_CLAUSE__LOOPTEMP_
);
10464 n2
= fold_convert (TREE_TYPE (n2
),
10465 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10469 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10471 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10473 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10474 if (!gimple_seq_empty_p (stmts
))
10476 gimple_seq_add_seq (&stmts
, *dlist
);
10479 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10480 vinit
= fd
->loop
.n1
;
10481 if (cond_code
== EQ_EXPR
10482 && tree_fits_shwi_p (fd
->loop
.n2
)
10483 && ! integer_zerop (fd
->loop
.n2
))
10484 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10486 vinit
= unshare_expr (vinit
);
10488 /* Initialize the iterator variable, so that threads that don't execute
10489 any iterations don't execute the lastprivate clauses by accident. */
10490 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10494 /* OpenACC privatization.
10496 Or, in other words, *sharing* at the respective OpenACC level of
10499 From a correctness perspective, a non-addressable variable can't be accessed
10500 outside the current thread, so it can go in a (faster than shared memory)
10501 register -- though that register may need to be broadcast in some
10502 circumstances. A variable can only meaningfully be "shared" across workers
10503 or vector lanes if its address is taken, e.g. by a call to an atomic
10506 From an optimisation perspective, the answer might be fuzzier: maybe
10507 sometimes, using shared memory directly would be faster than
10511 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10512 const location_t loc
, const tree c
,
10515 const dump_user_location_t d_u_loc
10516 = dump_user_location_t::from_location_t (loc
);
10517 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10519 # pragma GCC diagnostic push
10520 # pragma GCC diagnostic ignored "-Wformat"
10522 dump_printf_loc (l_dump_flags
, d_u_loc
,
10523 "variable %<%T%> ", decl
);
10525 # pragma GCC diagnostic pop
10528 dump_printf (l_dump_flags
,
10530 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10532 dump_printf (l_dump_flags
,
10533 "declared in block ");
10537 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10540 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10542 /* There is some differentiation depending on block vs. clause. */
10547 if (res
&& !VAR_P (decl
))
10551 if (dump_enabled_p ())
10553 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10554 dump_printf (l_dump_flags
,
10555 "potentially has improper OpenACC privatization level: %qs\n",
10556 get_tree_code_name (TREE_CODE (decl
)));
10560 if (res
&& block
&& TREE_STATIC (decl
))
10564 if (dump_enabled_p ())
10566 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10567 dump_printf (l_dump_flags
,
10568 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10573 if (res
&& block
&& DECL_EXTERNAL (decl
))
10577 if (dump_enabled_p ())
10579 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10580 dump_printf (l_dump_flags
,
10581 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10586 if (res
&& !TREE_ADDRESSABLE (decl
))
10590 if (dump_enabled_p ())
10592 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10593 dump_printf (l_dump_flags
,
10594 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10595 "not addressable");
10601 if (dump_enabled_p ())
10603 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10604 dump_printf (l_dump_flags
,
10605 "is candidate for adjusting OpenACC privatization level\n");
10609 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10611 print_generic_decl (dump_file
, decl
, dump_flags
);
10612 fprintf (dump_file
, "\n");
10618 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10622 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10624 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10625 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10627 tree decl
= OMP_CLAUSE_DECL (c
);
10629 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
, decl
))
10632 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10633 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10637 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10641 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10643 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10645 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
, decl
))
10648 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10649 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10653 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10656 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10657 struct walk_stmt_info
*wi
)
10659 gimple
*stmt
= gsi_stmt (*gsi_p
);
10661 *handled_ops_p
= true;
10662 switch (gimple_code (stmt
))
10666 case GIMPLE_OMP_FOR
:
10667 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10668 && gimple_omp_for_combined_into_p (stmt
))
10669 *handled_ops_p
= false;
10672 case GIMPLE_OMP_SCAN
:
10673 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10674 return integer_zero_node
;
10681 /* Helper function for lower_omp_for, add transformations for a worksharing
10682 loop with scan directives inside of it.
10683 For worksharing loop not combined with simd, transform:
10684 #pragma omp for reduction(inscan,+:r) private(i)
10685 for (i = 0; i < n; i = i + 1)
10690 #pragma omp scan inclusive(r)
10696 into two worksharing loops + code to merge results:
10698 num_threads = omp_get_num_threads ();
10699 thread_num = omp_get_thread_num ();
10700 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10705 // For UDRs this is UDR init, or if ctors are needed, copy from
10706 // var3 that has been constructed to contain the neutral element.
10710 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10711 // a shared array with num_threads elements and rprivb to a local array
10712 // number of elements equal to the number of (contiguous) iterations the
10713 // current thread will perform. controlb and controlp variables are
10714 // temporaries to handle deallocation of rprivb at the end of second
10716 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10717 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10718 for (i = 0; i < n; i = i + 1)
10721 // For UDRs this is UDR init or copy from var3.
10723 // This is the input phase from user code.
10727 // For UDRs this is UDR merge.
10729 // Rather than handing it over to the user, save to local thread's
10731 rprivb[ivar] = var2;
10732 // For exclusive scan, the above two statements are swapped.
10736 // And remember the final value from this thread's into the shared
10738 rpriva[(sizetype) thread_num] = var2;
10739 // If more than one thread, compute using Work-Efficient prefix sum
10740 // the inclusive parallel scan of the rpriva array.
10741 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10746 num_threadsu = (unsigned int) num_threads;
10747 thread_numup1 = (unsigned int) thread_num + 1;
10750 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10754 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10759 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10760 mul = REALPART_EXPR <cplx>;
10761 ovf = IMAGPART_EXPR <cplx>;
10762 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10765 andvm1 = andv + 4294967295;
10767 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10769 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10770 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10771 rpriva[l] = rpriva[l - k] + rpriva[l];
10773 if (down == 0) goto <D.2121>; else goto <D.2122>;
10781 if (k != 0) goto <D.2108>; else goto <D.2103>;
10783 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10785 // For UDRs this is UDR init or copy from var3.
10789 var2 = rpriva[thread_num - 1];
10792 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10793 reduction(inscan,+:r) private(i)
10794 for (i = 0; i < n; i = i + 1)
10797 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10798 r = var2 + rprivb[ivar];
10801 // This is the scan phase from user code.
10803 // Plus a bump of the iterator.
10809 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10810 struct omp_for_data
*fd
, omp_context
*ctx
)
10812 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10813 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10815 gimple_seq body
= gimple_omp_body (stmt
);
10816 gimple_stmt_iterator input1_gsi
= gsi_none ();
10817 struct walk_stmt_info wi
;
10818 memset (&wi
, 0, sizeof (wi
));
10819 wi
.val_only
= true;
10820 wi
.info
= (void *) &input1_gsi
;
10821 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10822 gcc_assert (!gsi_end_p (input1_gsi
));
10824 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10825 gimple_stmt_iterator gsi
= input1_gsi
;
10827 gimple_stmt_iterator scan1_gsi
= gsi
;
10828 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10829 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10831 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10832 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10833 gimple_omp_set_body (input_stmt1
, NULL
);
10834 gimple_omp_set_body (scan_stmt1
, NULL
);
10835 gimple_omp_set_body (stmt
, NULL
);
10837 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10838 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10839 gimple_omp_set_body (stmt
, body
);
10840 gimple_omp_set_body (input_stmt1
, input_body
);
10842 gimple_stmt_iterator input2_gsi
= gsi_none ();
10843 memset (&wi
, 0, sizeof (wi
));
10844 wi
.val_only
= true;
10845 wi
.info
= (void *) &input2_gsi
;
10846 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10847 gcc_assert (!gsi_end_p (input2_gsi
));
10849 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10852 gimple_stmt_iterator scan2_gsi
= gsi
;
10853 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10854 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10855 gimple_omp_set_body (scan_stmt2
, scan_body
);
10857 gimple_stmt_iterator input3_gsi
= gsi_none ();
10858 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10859 gimple_stmt_iterator input4_gsi
= gsi_none ();
10860 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10861 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10862 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10863 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10866 memset (&wi
, 0, sizeof (wi
));
10867 wi
.val_only
= true;
10868 wi
.info
= (void *) &input3_gsi
;
10869 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10870 gcc_assert (!gsi_end_p (input3_gsi
));
10872 input_stmt3
= gsi_stmt (input3_gsi
);
10876 scan_stmt3
= gsi_stmt (gsi
);
10877 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10879 memset (&wi
, 0, sizeof (wi
));
10880 wi
.val_only
= true;
10881 wi
.info
= (void *) &input4_gsi
;
10882 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10883 gcc_assert (!gsi_end_p (input4_gsi
));
10885 input_stmt4
= gsi_stmt (input4_gsi
);
10889 scan_stmt4
= gsi_stmt (gsi
);
10890 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10892 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10893 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
10896 tree num_threads
= create_tmp_var (integer_type_node
);
10897 tree thread_num
= create_tmp_var (integer_type_node
);
10898 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
10899 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
10900 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
10901 gimple_call_set_lhs (g
, num_threads
);
10902 gimple_seq_add_stmt (body_p
, g
);
10903 g
= gimple_build_call (threadnum_decl
, 0);
10904 gimple_call_set_lhs (g
, thread_num
);
10905 gimple_seq_add_stmt (body_p
, g
);
10907 tree ivar
= create_tmp_var (sizetype
);
10908 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
10909 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
10910 tree k
= create_tmp_var (unsigned_type_node
);
10911 tree l
= create_tmp_var (unsigned_type_node
);
10913 gimple_seq clist
= NULL
, mdlist
= NULL
;
10914 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
10915 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
10916 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
10917 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
10918 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10919 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10920 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10922 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10923 tree var
= OMP_CLAUSE_DECL (c
);
10924 tree new_var
= lookup_decl (var
, ctx
);
10925 tree var3
= NULL_TREE
;
10926 tree new_vard
= new_var
;
10927 if (omp_privatize_by_reference (var
))
10928 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10929 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10931 var3
= maybe_lookup_decl (new_vard
, ctx
);
10932 if (var3
== new_vard
)
10936 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
10937 tree rpriva
= create_tmp_var (ptype
);
10938 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10939 OMP_CLAUSE_DECL (nc
) = rpriva
;
10941 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10943 tree rprivb
= create_tmp_var (ptype
);
10944 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10945 OMP_CLAUSE_DECL (nc
) = rprivb
;
10946 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
10948 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10950 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
10951 if (new_vard
!= new_var
)
10952 TREE_ADDRESSABLE (var2
) = 1;
10953 gimple_add_tmp_var (var2
);
10955 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
10956 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10957 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10958 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10959 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10961 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
10962 thread_num
, integer_minus_one_node
);
10963 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10964 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10965 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10966 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10967 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10969 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
10970 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10971 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10972 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10973 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10975 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
10976 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10977 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10978 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10979 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10980 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10982 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
10983 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10984 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
10985 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10987 tree var4
= is_for_simd
? new_var
: var2
;
10988 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
10991 var5
= lookup_decl (var
, input_simd_ctx
);
10992 var6
= lookup_decl (var
, scan_simd_ctx
);
10993 if (new_vard
!= new_var
)
10995 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
10996 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
10999 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11001 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11004 x
= lang_hooks
.decls
.omp_clause_default_ctor
11005 (c
, var2
, build_outer_var_ref (var
, ctx
));
11007 gimplify_and_add (x
, &clist
);
11009 x
= build_outer_var_ref (var
, ctx
);
11010 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11012 gimplify_and_add (x
, &thr01_list
);
11014 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11015 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11018 x
= unshare_expr (var4
);
11019 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11020 gimplify_and_add (x
, &thrn1_list
);
11021 x
= unshare_expr (var4
);
11022 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11023 gimplify_and_add (x
, &thr02_list
);
11025 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11027 /* Otherwise, assign to it the identity element. */
11028 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11029 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11032 if (new_vard
!= new_var
)
11033 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11034 SET_DECL_VALUE_EXPR (new_vard
, val
);
11035 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11037 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11038 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11039 lower_omp (&tseq
, ctx
);
11040 gimple_seq_add_seq (&thrn1_list
, tseq
);
11041 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11042 lower_omp (&tseq
, ctx
);
11043 gimple_seq_add_seq (&thr02_list
, tseq
);
11044 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11045 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11046 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11048 SET_DECL_VALUE_EXPR (new_vard
, y
);
11051 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11052 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11056 x
= unshare_expr (var4
);
11057 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11058 gimplify_and_add (x
, &thrn2_list
);
11062 x
= unshare_expr (rprivb_ref
);
11063 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11064 gimplify_and_add (x
, &scan1_list
);
11068 if (ctx
->scan_exclusive
)
11070 x
= unshare_expr (rprivb_ref
);
11071 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11072 gimplify_and_add (x
, &scan1_list
);
11075 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11076 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11077 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11078 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11079 lower_omp (&tseq
, ctx
);
11080 gimple_seq_add_seq (&scan1_list
, tseq
);
11082 if (ctx
->scan_inclusive
)
11084 x
= unshare_expr (rprivb_ref
);
11085 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11086 gimplify_and_add (x
, &scan1_list
);
11090 x
= unshare_expr (rpriva_ref
);
11091 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11092 unshare_expr (var4
));
11093 gimplify_and_add (x
, &mdlist
);
11095 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11096 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11097 gimplify_and_add (x
, &input2_list
);
11100 if (new_vard
!= new_var
)
11101 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11103 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11104 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11105 SET_DECL_VALUE_EXPR (new_vard
, val
);
11106 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11109 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11110 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11113 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11114 lower_omp (&tseq
, ctx
);
11116 SET_DECL_VALUE_EXPR (new_vard
, y
);
11119 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11120 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11124 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11125 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11126 lower_omp (&tseq
, ctx
);
11128 gimple_seq_add_seq (&input2_list
, tseq
);
11130 x
= build_outer_var_ref (var
, ctx
);
11131 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11132 gimplify_and_add (x
, &last_list
);
11134 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11135 gimplify_and_add (x
, &reduc_list
);
11136 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11137 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11139 if (new_vard
!= new_var
)
11140 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11141 SET_DECL_VALUE_EXPR (new_vard
, val
);
11142 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11143 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11144 lower_omp (&tseq
, ctx
);
11145 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11146 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11147 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11149 SET_DECL_VALUE_EXPR (new_vard
, y
);
11152 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11153 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11155 gimple_seq_add_seq (&reduc_list
, tseq
);
11156 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11157 gimplify_and_add (x
, &reduc_list
);
11159 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11161 gimplify_and_add (x
, dlist
);
11165 x
= build_outer_var_ref (var
, ctx
);
11166 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11168 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11169 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11171 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11173 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11175 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11176 if (code
== MINUS_EXPR
)
11180 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11183 if (ctx
->scan_exclusive
)
11184 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11186 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11187 gimplify_assign (var2
, x
, &scan1_list
);
11188 if (ctx
->scan_inclusive
)
11189 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11193 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11196 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11197 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11199 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11202 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11203 unshare_expr (rprival_ref
));
11204 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11208 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11209 gimple_seq_add_stmt (&scan1_list
, g
);
11210 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11211 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11212 ? scan_stmt4
: scan_stmt2
), g
);
11214 tree controlb
= create_tmp_var (boolean_type_node
);
11215 tree controlp
= create_tmp_var (ptr_type_node
);
11216 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11217 OMP_CLAUSE_DECL (nc
) = controlb
;
11218 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11220 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11221 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11222 OMP_CLAUSE_DECL (nc
) = controlp
;
11223 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11225 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11226 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11227 OMP_CLAUSE_DECL (nc
) = controlb
;
11228 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11230 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11231 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11232 OMP_CLAUSE_DECL (nc
) = controlp
;
11233 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11235 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11237 *cp1
= gimple_omp_for_clauses (stmt
);
11238 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11239 *cp2
= gimple_omp_for_clauses (new_stmt
);
11240 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11244 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11245 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11247 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11249 gsi_remove (&input3_gsi
, true);
11250 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11252 gsi_remove (&scan3_gsi
, true);
11253 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11255 gsi_remove (&input4_gsi
, true);
11256 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11258 gsi_remove (&scan4_gsi
, true);
11262 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11263 gimple_omp_set_body (input_stmt2
, input2_list
);
11266 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11268 gsi_remove (&input1_gsi
, true);
11269 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11271 gsi_remove (&scan1_gsi
, true);
11272 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11274 gsi_remove (&input2_gsi
, true);
11275 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11277 gsi_remove (&scan2_gsi
, true);
11279 gimple_seq_add_seq (body_p
, clist
);
11281 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11282 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11283 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11284 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11285 gimple_seq_add_stmt (body_p
, g
);
11286 g
= gimple_build_label (lab1
);
11287 gimple_seq_add_stmt (body_p
, g
);
11288 gimple_seq_add_seq (body_p
, thr01_list
);
11289 g
= gimple_build_goto (lab3
);
11290 gimple_seq_add_stmt (body_p
, g
);
11291 g
= gimple_build_label (lab2
);
11292 gimple_seq_add_stmt (body_p
, g
);
11293 gimple_seq_add_seq (body_p
, thrn1_list
);
11294 g
= gimple_build_label (lab3
);
11295 gimple_seq_add_stmt (body_p
, g
);
11297 g
= gimple_build_assign (ivar
, size_zero_node
);
11298 gimple_seq_add_stmt (body_p
, g
);
11300 gimple_seq_add_stmt (body_p
, stmt
);
11301 gimple_seq_add_seq (body_p
, body
);
11302 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11305 g
= gimple_build_omp_return (true);
11306 gimple_seq_add_stmt (body_p
, g
);
11307 gimple_seq_add_seq (body_p
, mdlist
);
11309 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11310 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11311 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11312 gimple_seq_add_stmt (body_p
, g
);
11313 g
= gimple_build_label (lab1
);
11314 gimple_seq_add_stmt (body_p
, g
);
11316 g
= omp_build_barrier (NULL
);
11317 gimple_seq_add_stmt (body_p
, g
);
11319 tree down
= create_tmp_var (unsigned_type_node
);
11320 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11321 gimple_seq_add_stmt (body_p
, g
);
11323 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11324 gimple_seq_add_stmt (body_p
, g
);
11326 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11327 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11328 gimple_seq_add_stmt (body_p
, g
);
11330 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11331 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11332 gimple_seq_add_stmt (body_p
, g
);
11334 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11335 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11336 build_int_cst (unsigned_type_node
, 1));
11337 gimple_seq_add_stmt (body_p
, g
);
11339 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11340 g
= gimple_build_label (lab3
);
11341 gimple_seq_add_stmt (body_p
, g
);
11343 tree twok
= create_tmp_var (unsigned_type_node
);
11344 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11345 gimple_seq_add_stmt (body_p
, g
);
11347 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11348 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11349 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11350 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11351 gimple_seq_add_stmt (body_p
, g
);
11352 g
= gimple_build_label (lab4
);
11353 gimple_seq_add_stmt (body_p
, g
);
11354 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11355 gimple_seq_add_stmt (body_p
, g
);
11356 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11357 gimple_seq_add_stmt (body_p
, g
);
11359 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11360 gimple_seq_add_stmt (body_p
, g
);
11361 g
= gimple_build_label (lab6
);
11362 gimple_seq_add_stmt (body_p
, g
);
11364 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11365 gimple_seq_add_stmt (body_p
, g
);
11367 g
= gimple_build_label (lab5
);
11368 gimple_seq_add_stmt (body_p
, g
);
11370 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11371 gimple_seq_add_stmt (body_p
, g
);
11373 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11374 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11375 gimple_call_set_lhs (g
, cplx
);
11376 gimple_seq_add_stmt (body_p
, g
);
11377 tree mul
= create_tmp_var (unsigned_type_node
);
11378 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11379 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11380 gimple_seq_add_stmt (body_p
, g
);
11381 tree ovf
= create_tmp_var (unsigned_type_node
);
11382 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11383 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11384 gimple_seq_add_stmt (body_p
, g
);
11386 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11387 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11388 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11390 gimple_seq_add_stmt (body_p
, g
);
11391 g
= gimple_build_label (lab7
);
11392 gimple_seq_add_stmt (body_p
, g
);
11394 tree andv
= create_tmp_var (unsigned_type_node
);
11395 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11396 gimple_seq_add_stmt (body_p
, g
);
11397 tree andvm1
= create_tmp_var (unsigned_type_node
);
11398 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11399 build_minus_one_cst (unsigned_type_node
));
11400 gimple_seq_add_stmt (body_p
, g
);
11402 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11403 gimple_seq_add_stmt (body_p
, g
);
11405 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11406 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11407 gimple_seq_add_stmt (body_p
, g
);
11408 g
= gimple_build_label (lab9
);
11409 gimple_seq_add_stmt (body_p
, g
);
11410 gimple_seq_add_seq (body_p
, reduc_list
);
11411 g
= gimple_build_label (lab8
);
11412 gimple_seq_add_stmt (body_p
, g
);
11414 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11415 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11416 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11417 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11419 gimple_seq_add_stmt (body_p
, g
);
11420 g
= gimple_build_label (lab10
);
11421 gimple_seq_add_stmt (body_p
, g
);
11422 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11423 gimple_seq_add_stmt (body_p
, g
);
11424 g
= gimple_build_goto (lab12
);
11425 gimple_seq_add_stmt (body_p
, g
);
11426 g
= gimple_build_label (lab11
);
11427 gimple_seq_add_stmt (body_p
, g
);
11428 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11429 gimple_seq_add_stmt (body_p
, g
);
11430 g
= gimple_build_label (lab12
);
11431 gimple_seq_add_stmt (body_p
, g
);
11433 g
= omp_build_barrier (NULL
);
11434 gimple_seq_add_stmt (body_p
, g
);
11436 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11438 gimple_seq_add_stmt (body_p
, g
);
11440 g
= gimple_build_label (lab2
);
11441 gimple_seq_add_stmt (body_p
, g
);
11443 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11444 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11445 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11446 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11447 gimple_seq_add_stmt (body_p
, g
);
11448 g
= gimple_build_label (lab1
);
11449 gimple_seq_add_stmt (body_p
, g
);
11450 gimple_seq_add_seq (body_p
, thr02_list
);
11451 g
= gimple_build_goto (lab3
);
11452 gimple_seq_add_stmt (body_p
, g
);
11453 g
= gimple_build_label (lab2
);
11454 gimple_seq_add_stmt (body_p
, g
);
11455 gimple_seq_add_seq (body_p
, thrn2_list
);
11456 g
= gimple_build_label (lab3
);
11457 gimple_seq_add_stmt (body_p
, g
);
11459 g
= gimple_build_assign (ivar
, size_zero_node
);
11460 gimple_seq_add_stmt (body_p
, g
);
11461 gimple_seq_add_stmt (body_p
, new_stmt
);
11462 gimple_seq_add_seq (body_p
, new_body
);
11464 gimple_seq new_dlist
= NULL
;
11465 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11466 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11467 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11468 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11469 integer_minus_one_node
);
11470 gimple_seq_add_stmt (&new_dlist
, g
);
11471 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11472 gimple_seq_add_stmt (&new_dlist
, g
);
11473 g
= gimple_build_label (lab1
);
11474 gimple_seq_add_stmt (&new_dlist
, g
);
11475 gimple_seq_add_seq (&new_dlist
, last_list
);
11476 g
= gimple_build_label (lab2
);
11477 gimple_seq_add_stmt (&new_dlist
, g
);
11478 gimple_seq_add_seq (&new_dlist
, *dlist
);
11479 *dlist
= new_dlist
;
11482 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11483 the addresses of variables to be made private at the surrounding
11484 parallelism level. Such functions appear in the gimple code stream in two
11485 forms, e.g. for a partitioned loop:
11487 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11488 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11489 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11490 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11492 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11493 not as part of a HEAD_MARK sequence:
11495 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11497 For such stand-alone appearances, the 3rd argument is always 0, denoting
11498 gang partitioning. */
11501 lower_oacc_private_marker (omp_context
*ctx
)
11503 if (ctx
->oacc_privatization_candidates
.length () == 0)
11506 auto_vec
<tree
, 5> args
;
11508 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11509 args
.quick_push (integer_zero_node
);
11510 args
.quick_push (integer_minus_one_node
);
11514 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11516 for (omp_context
*thisctx
= ctx
; thisctx
; thisctx
= thisctx
->outer
)
11518 tree inner_decl
= maybe_lookup_decl (decl
, thisctx
);
11525 gcc_checking_assert (decl
);
11527 tree addr
= build_fold_addr_expr (decl
);
11528 args
.safe_push (addr
);
11531 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11534 /* Lower code for an OMP loop directive. */
11537 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11539 tree
*rhs_p
, block
;
11540 struct omp_for_data fd
, *fdp
= NULL
;
11541 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11543 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11544 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11545 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11548 push_gimplify_context ();
11550 if (is_gimple_omp_oacc (ctx
->stmt
))
11551 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11553 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11555 block
= make_node (BLOCK
);
11556 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11557 /* Replace at gsi right away, so that 'stmt' is no member
11558 of a sequence anymore as we're going to add to a different
11560 gsi_replace (gsi_p
, new_stmt
, true);
11562 /* Move declaration of temporaries in the loop body before we make
11564 omp_for_body
= gimple_omp_body (stmt
);
11565 if (!gimple_seq_empty_p (omp_for_body
)
11566 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11569 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11570 tree vars
= gimple_bind_vars (inner_bind
);
11571 if (is_gimple_omp_oacc (ctx
->stmt
))
11572 oacc_privatization_scan_decl_chain (ctx
, vars
);
11573 gimple_bind_append_vars (new_stmt
, vars
);
11574 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11575 keep them on the inner_bind and it's block. */
11576 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11577 if (gimple_bind_block (inner_bind
))
11578 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11581 if (gimple_omp_for_combined_into_p (stmt
))
11583 omp_extract_for_data (stmt
, &fd
, NULL
);
11586 /* We need two temporaries with fd.loop.v type (istart/iend)
11587 and then (fd.collapse - 1) temporaries with the same
11588 type for count2 ... countN-1 vars if not constant. */
11590 tree type
= fd
.iter_type
;
11591 if (fd
.collapse
> 1
11592 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11593 count
+= fd
.collapse
- 1;
11595 tree type2
= NULL_TREE
;
11597 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11598 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11599 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11601 tree clauses
= *pc
;
11602 if (fd
.collapse
> 1
11604 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11605 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11606 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11607 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11609 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11610 type2
= TREE_TYPE (v
);
11616 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11617 OMP_CLAUSE__LOOPTEMP_
);
11618 if (ctx
->simt_stmt
)
11619 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11620 OMP_CLAUSE__LOOPTEMP_
);
11621 for (i
= 0; i
< count
+ count2
; i
++)
11626 gcc_assert (outerc
);
11627 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11628 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11629 OMP_CLAUSE__LOOPTEMP_
);
11633 /* If there are 2 adjacent SIMD stmts, one with _simt_
11634 clause, another without, make sure they have the same
11635 decls in _looptemp_ clauses, because the outer stmt
11636 they are combined into will look up just one inner_stmt. */
11637 if (ctx
->simt_stmt
)
11638 temp
= OMP_CLAUSE_DECL (simtc
);
11640 temp
= create_tmp_var (i
>= count
? type2
: type
);
11641 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11643 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11644 OMP_CLAUSE_DECL (*pc
) = temp
;
11645 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11646 if (ctx
->simt_stmt
)
11647 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11648 OMP_CLAUSE__LOOPTEMP_
);
11653 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11657 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11658 OMP_CLAUSE_REDUCTION
);
11659 tree rtmp
= NULL_TREE
;
11662 tree type
= build_pointer_type (pointer_sized_int_node
);
11663 tree temp
= create_tmp_var (type
);
11664 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11665 OMP_CLAUSE_DECL (c
) = temp
;
11666 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11667 gimple_omp_for_set_clauses (stmt
, c
);
11668 lower_omp_task_reductions (ctx
, OMP_FOR
,
11669 gimple_omp_for_clauses (stmt
),
11670 &tred_ilist
, &tred_dlist
);
11672 rtmp
= make_ssa_name (type
);
11673 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11676 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11679 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11681 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11682 gimple_omp_for_pre_body (stmt
));
11684 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11686 gcall
*private_marker
= NULL
;
11687 if (is_gimple_omp_oacc (ctx
->stmt
)
11688 && !gimple_seq_empty_p (omp_for_body
))
11689 private_marker
= lower_oacc_private_marker (ctx
);
11691 /* Lower the header expressions. At this point, we can assume that
11692 the header is of the form:
11694 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11696 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11697 using the .omp_data_s mapping, if needed. */
11698 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11700 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11701 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11703 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11704 TREE_VEC_ELT (*rhs_p
, 1)
11705 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11706 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11707 TREE_VEC_ELT (*rhs_p
, 2)
11708 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11710 else if (!is_gimple_min_invariant (*rhs_p
))
11711 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11712 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11713 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11715 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11716 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11718 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11719 TREE_VEC_ELT (*rhs_p
, 1)
11720 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11721 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11722 TREE_VEC_ELT (*rhs_p
, 2)
11723 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11725 else if (!is_gimple_min_invariant (*rhs_p
))
11726 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11727 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11728 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11730 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11731 if (!is_gimple_min_invariant (*rhs_p
))
11732 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11735 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11737 gimple_seq_add_seq (&body
, cnt_list
);
11739 /* Once lowered, extract the bounds and clauses. */
11740 omp_extract_for_data (stmt
, &fd
, NULL
);
11742 if (is_gimple_omp_oacc (ctx
->stmt
)
11743 && !ctx_in_oacc_kernels_region (ctx
))
11744 lower_oacc_head_tail (gimple_location (stmt
),
11745 gimple_omp_for_clauses (stmt
), private_marker
,
11746 &oacc_head
, &oacc_tail
, ctx
);
11748 /* Add OpenACC partitioning and reduction markers just before the loop. */
11750 gimple_seq_add_seq (&body
, oacc_head
);
11752 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11754 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11755 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11756 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11757 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11759 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11760 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11761 OMP_CLAUSE_LINEAR_STEP (c
)
11762 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11766 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11767 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11768 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11771 gimple_seq_add_stmt (&body
, stmt
);
11772 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11775 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11778 /* After the loop, add exit clauses. */
11779 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11783 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11784 gcall
*g
= gimple_build_call (fndecl
, 0);
11785 gimple_seq_add_stmt (&body
, g
);
11786 gimple_seq_add_seq (&body
, clist
);
11787 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11788 g
= gimple_build_call (fndecl
, 0);
11789 gimple_seq_add_stmt (&body
, g
);
11792 if (ctx
->cancellable
)
11793 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11795 gimple_seq_add_seq (&body
, dlist
);
11799 gimple_seq_add_seq (&tred_ilist
, body
);
11803 body
= maybe_catch_exception (body
);
11805 /* Region exit marker goes at the end of the loop body. */
11806 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11807 gimple_seq_add_stmt (&body
, g
);
11809 gimple_seq_add_seq (&body
, tred_dlist
);
11811 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11814 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11816 /* Add OpenACC joining and reduction markers just after the loop. */
11818 gimple_seq_add_seq (&body
, oacc_tail
);
11820 pop_gimplify_context (new_stmt
);
11822 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11823 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11824 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11825 if (BLOCK_VARS (block
))
11826 TREE_USED (block
) = 1;
11828 gimple_bind_set_body (new_stmt
, body
);
11829 gimple_omp_set_body (stmt
, NULL
);
11830 gimple_omp_for_set_pre_body (stmt
, NULL
);
11833 /* Callback for walk_stmts. Check if the current statement only contains
11834 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11837 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11838 bool *handled_ops_p
,
11839 struct walk_stmt_info
*wi
)
11841 int *info
= (int *) wi
->info
;
11842 gimple
*stmt
= gsi_stmt (*gsi_p
);
11844 *handled_ops_p
= true;
11845 switch (gimple_code (stmt
))
11851 case GIMPLE_OMP_FOR
:
11852 case GIMPLE_OMP_SECTIONS
:
11853 *info
= *info
== 0 ? 1 : -1;
11862 struct omp_taskcopy_context
11864 /* This field must be at the beginning, as we do "inheritance": Some
11865 callback functions for tree-inline.c (e.g., omp_copy_decl)
11866 receive a copy_body_data pointer that is up-casted to an
11867 omp_context pointer. */
11873 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11875 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11877 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11878 return create_tmp_var (TREE_TYPE (var
));
11884 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11886 tree name
, new_fields
= NULL
, type
, f
;
11888 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11889 name
= DECL_NAME (TYPE_NAME (orig_type
));
11890 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11891 TYPE_DECL
, name
, type
);
11892 TYPE_NAME (type
) = name
;
11894 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
11896 tree new_f
= copy_node (f
);
11897 DECL_CONTEXT (new_f
) = type
;
11898 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
11899 TREE_CHAIN (new_f
) = new_fields
;
11900 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11901 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11902 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
11904 new_fields
= new_f
;
11905 tcctx
->cb
.decl_map
->put (f
, new_f
);
11907 TYPE_FIELDS (type
) = nreverse (new_fields
);
11908 layout_type (type
);
11912 /* Create task copyfn. */
11915 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
11917 struct function
*child_cfun
;
11918 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
11919 tree record_type
, srecord_type
, bind
, list
;
11920 bool record_needs_remap
= false, srecord_needs_remap
= false;
11922 struct omp_taskcopy_context tcctx
;
11923 location_t loc
= gimple_location (task_stmt
);
11924 size_t looptempno
= 0;
11926 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
11927 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
11928 gcc_assert (child_cfun
->cfg
== NULL
);
11929 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
11931 /* Reset DECL_CONTEXT on function arguments. */
11932 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
11933 DECL_CONTEXT (t
) = child_fn
;
11935 /* Populate the function. */
11936 push_gimplify_context ();
11937 push_cfun (child_cfun
);
11939 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11940 TREE_SIDE_EFFECTS (bind
) = 1;
11942 DECL_SAVED_TREE (child_fn
) = bind
;
11943 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
11945 /* Remap src and dst argument types if needed. */
11946 record_type
= ctx
->record_type
;
11947 srecord_type
= ctx
->srecord_type
;
11948 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
11949 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11951 record_needs_remap
= true;
11954 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
11955 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11957 srecord_needs_remap
= true;
11961 if (record_needs_remap
|| srecord_needs_remap
)
11963 memset (&tcctx
, '\0', sizeof (tcctx
));
11964 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
11965 tcctx
.cb
.dst_fn
= child_fn
;
11966 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
11967 gcc_checking_assert (tcctx
.cb
.src_node
);
11968 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
11969 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
11970 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
11971 tcctx
.cb
.eh_lp_nr
= 0;
11972 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
11973 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
11976 if (record_needs_remap
)
11977 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
11978 if (srecord_needs_remap
)
11979 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
11982 tcctx
.cb
.decl_map
= NULL
;
11984 arg
= DECL_ARGUMENTS (child_fn
);
11985 TREE_TYPE (arg
) = build_pointer_type (record_type
);
11986 sarg
= DECL_CHAIN (arg
);
11987 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
11989 /* First pass: initialize temporaries used in record_type and srecord_type
11990 sizes and field offsets. */
11991 if (tcctx
.cb
.decl_map
)
11992 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11993 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11997 decl
= OMP_CLAUSE_DECL (c
);
11998 p
= tcctx
.cb
.decl_map
->get (decl
);
12001 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12002 sf
= (tree
) n
->value
;
12003 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12004 src
= build_simple_mem_ref_loc (loc
, sarg
);
12005 src
= omp_build_component_ref (src
, sf
);
12006 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12007 append_to_statement_list (t
, &list
);
12010 /* Second pass: copy shared var pointers and copy construct non-VLA
12011 firstprivate vars. */
12012 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12013 switch (OMP_CLAUSE_CODE (c
))
12015 splay_tree_key key
;
12016 case OMP_CLAUSE_SHARED
:
12017 decl
= OMP_CLAUSE_DECL (c
);
12018 key
= (splay_tree_key
) decl
;
12019 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12020 key
= (splay_tree_key
) &DECL_UID (decl
);
12021 n
= splay_tree_lookup (ctx
->field_map
, key
);
12024 f
= (tree
) n
->value
;
12025 if (tcctx
.cb
.decl_map
)
12026 f
= *tcctx
.cb
.decl_map
->get (f
);
12027 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12028 sf
= (tree
) n
->value
;
12029 if (tcctx
.cb
.decl_map
)
12030 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12031 src
= build_simple_mem_ref_loc (loc
, sarg
);
12032 src
= omp_build_component_ref (src
, sf
);
12033 dst
= build_simple_mem_ref_loc (loc
, arg
);
12034 dst
= omp_build_component_ref (dst
, f
);
12035 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12036 append_to_statement_list (t
, &list
);
12038 case OMP_CLAUSE_REDUCTION
:
12039 case OMP_CLAUSE_IN_REDUCTION
:
12040 decl
= OMP_CLAUSE_DECL (c
);
12041 if (TREE_CODE (decl
) == MEM_REF
)
12043 decl
= TREE_OPERAND (decl
, 0);
12044 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12045 decl
= TREE_OPERAND (decl
, 0);
12046 if (TREE_CODE (decl
) == INDIRECT_REF
12047 || TREE_CODE (decl
) == ADDR_EXPR
)
12048 decl
= TREE_OPERAND (decl
, 0);
12050 key
= (splay_tree_key
) decl
;
12051 n
= splay_tree_lookup (ctx
->field_map
, key
);
12054 f
= (tree
) n
->value
;
12055 if (tcctx
.cb
.decl_map
)
12056 f
= *tcctx
.cb
.decl_map
->get (f
);
12057 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12058 sf
= (tree
) n
->value
;
12059 if (tcctx
.cb
.decl_map
)
12060 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12061 src
= build_simple_mem_ref_loc (loc
, sarg
);
12062 src
= omp_build_component_ref (src
, sf
);
12063 if (decl
!= OMP_CLAUSE_DECL (c
)
12064 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12065 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12066 src
= build_simple_mem_ref_loc (loc
, src
);
12067 dst
= build_simple_mem_ref_loc (loc
, arg
);
12068 dst
= omp_build_component_ref (dst
, f
);
12069 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12070 append_to_statement_list (t
, &list
);
12072 case OMP_CLAUSE__LOOPTEMP_
:
12073 /* Fields for first two _looptemp_ clauses are initialized by
12074 GOMP_taskloop*, the rest are handled like firstprivate. */
12075 if (looptempno
< 2)
12081 case OMP_CLAUSE__REDUCTEMP_
:
12082 case OMP_CLAUSE_FIRSTPRIVATE
:
12083 decl
= OMP_CLAUSE_DECL (c
);
12084 if (is_variable_sized (decl
))
12086 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12089 f
= (tree
) n
->value
;
12090 if (tcctx
.cb
.decl_map
)
12091 f
= *tcctx
.cb
.decl_map
->get (f
);
12092 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12095 sf
= (tree
) n
->value
;
12096 if (tcctx
.cb
.decl_map
)
12097 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12098 src
= build_simple_mem_ref_loc (loc
, sarg
);
12099 src
= omp_build_component_ref (src
, sf
);
12100 if (use_pointer_for_field (decl
, NULL
)
12101 || omp_privatize_by_reference (decl
))
12102 src
= build_simple_mem_ref_loc (loc
, src
);
12106 dst
= build_simple_mem_ref_loc (loc
, arg
);
12107 dst
= omp_build_component_ref (dst
, f
);
12108 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12109 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12112 if (ctx
->allocate_map
)
12113 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12115 tree allocator
= *allocatorp
;
12116 HOST_WIDE_INT ialign
= 0;
12117 if (TREE_CODE (allocator
) == TREE_LIST
)
12119 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12120 allocator
= TREE_PURPOSE (allocator
);
12122 if (TREE_CODE (allocator
) != INTEGER_CST
)
12124 n
= splay_tree_lookup (ctx
->sfield_map
,
12125 (splay_tree_key
) allocator
);
12126 allocator
= (tree
) n
->value
;
12127 if (tcctx
.cb
.decl_map
)
12128 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12129 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12130 allocator
= omp_build_component_ref (a
, allocator
);
12132 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12133 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12134 tree align
= build_int_cst (size_type_node
,
12136 DECL_ALIGN_UNIT (decl
)));
12137 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12138 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12140 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12141 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12142 append_to_statement_list (t
, &list
);
12143 dst
= build_simple_mem_ref_loc (loc
, dst
);
12145 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12147 append_to_statement_list (t
, &list
);
12149 case OMP_CLAUSE_PRIVATE
:
12150 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12152 decl
= OMP_CLAUSE_DECL (c
);
12153 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12154 f
= (tree
) n
->value
;
12155 if (tcctx
.cb
.decl_map
)
12156 f
= *tcctx
.cb
.decl_map
->get (f
);
12157 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12160 sf
= (tree
) n
->value
;
12161 if (tcctx
.cb
.decl_map
)
12162 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12163 src
= build_simple_mem_ref_loc (loc
, sarg
);
12164 src
= omp_build_component_ref (src
, sf
);
12165 if (use_pointer_for_field (decl
, NULL
))
12166 src
= build_simple_mem_ref_loc (loc
, src
);
12170 dst
= build_simple_mem_ref_loc (loc
, arg
);
12171 dst
= omp_build_component_ref (dst
, f
);
12172 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12173 append_to_statement_list (t
, &list
);
12179 /* Last pass: handle VLA firstprivates. */
12180 if (tcctx
.cb
.decl_map
)
12181 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12182 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12186 decl
= OMP_CLAUSE_DECL (c
);
12187 if (!is_variable_sized (decl
))
12189 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12192 f
= (tree
) n
->value
;
12193 f
= *tcctx
.cb
.decl_map
->get (f
);
12194 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12195 ind
= DECL_VALUE_EXPR (decl
);
12196 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12197 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12198 n
= splay_tree_lookup (ctx
->sfield_map
,
12199 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12200 sf
= (tree
) n
->value
;
12201 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12202 src
= build_simple_mem_ref_loc (loc
, sarg
);
12203 src
= omp_build_component_ref (src
, sf
);
12204 src
= build_simple_mem_ref_loc (loc
, src
);
12205 dst
= build_simple_mem_ref_loc (loc
, arg
);
12206 dst
= omp_build_component_ref (dst
, f
);
12207 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12208 append_to_statement_list (t
, &list
);
12209 n
= splay_tree_lookup (ctx
->field_map
,
12210 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12211 df
= (tree
) n
->value
;
12212 df
= *tcctx
.cb
.decl_map
->get (df
);
12213 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12214 ptr
= omp_build_component_ref (ptr
, df
);
12215 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12216 build_fold_addr_expr_loc (loc
, dst
));
12217 append_to_statement_list (t
, &list
);
12220 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12221 append_to_statement_list (t
, &list
);
12223 if (tcctx
.cb
.decl_map
)
12224 delete tcctx
.cb
.decl_map
;
12225 pop_gimplify_context (NULL
);
12226 BIND_EXPR_BODY (bind
) = list
;
12231 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12235 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
12237 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12238 gcc_assert (clauses
);
12239 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12240 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12241 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12243 case OMP_CLAUSE_DEPEND_LAST
:
12244 /* Lowering already done at gimplification. */
12246 case OMP_CLAUSE_DEPEND_IN
:
12249 case OMP_CLAUSE_DEPEND_OUT
:
12250 case OMP_CLAUSE_DEPEND_INOUT
:
12253 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12256 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12259 case OMP_CLAUSE_DEPEND_SOURCE
:
12260 case OMP_CLAUSE_DEPEND_SINK
:
12263 gcc_unreachable ();
12265 if (cnt
[1] || cnt
[3])
12267 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
12268 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
12269 tree array
= create_tmp_var (type
);
12270 TREE_ADDRESSABLE (array
) = 1;
12271 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12275 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12276 gimple_seq_add_stmt (iseq
, g
);
12277 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12280 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12281 gimple_seq_add_stmt (iseq
, g
);
12282 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12284 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12285 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12286 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12287 gimple_seq_add_stmt (iseq
, g
);
12289 for (i
= 0; i
< 4; i
++)
12293 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12294 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12298 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12300 case OMP_CLAUSE_DEPEND_IN
:
12304 case OMP_CLAUSE_DEPEND_OUT
:
12305 case OMP_CLAUSE_DEPEND_INOUT
:
12309 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12313 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12318 gcc_unreachable ();
12320 tree t
= OMP_CLAUSE_DECL (c
);
12321 t
= fold_convert (ptr_type_node
, t
);
12322 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12323 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12324 NULL_TREE
, NULL_TREE
);
12325 g
= gimple_build_assign (r
, t
);
12326 gimple_seq_add_stmt (iseq
, g
);
12329 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12330 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12331 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12332 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12334 tree clobber
= build_clobber (type
);
12335 g
= gimple_build_assign (array
, clobber
);
12336 gimple_seq_add_stmt (oseq
, g
);
12339 /* Lower the OpenMP parallel or task directive in the current statement
12340 in GSI_P. CTX holds context information for the directive. */
12343 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12347 gimple
*stmt
= gsi_stmt (*gsi_p
);
12348 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12349 gimple_seq par_body
;
12350 location_t loc
= gimple_location (stmt
);
12352 clauses
= gimple_omp_taskreg_clauses (stmt
);
12353 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12354 && gimple_omp_task_taskwait_p (stmt
))
12362 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12363 par_body
= gimple_bind_body (par_bind
);
12365 child_fn
= ctx
->cb
.dst_fn
;
12366 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12367 && !gimple_omp_parallel_combined_p (stmt
))
12369 struct walk_stmt_info wi
;
12372 memset (&wi
, 0, sizeof (wi
));
12374 wi
.val_only
= true;
12375 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12377 gimple_omp_parallel_set_combined_p (stmt
, true);
12379 gimple_seq dep_ilist
= NULL
;
12380 gimple_seq dep_olist
= NULL
;
12381 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12382 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12384 push_gimplify_context ();
12385 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12386 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12387 &dep_ilist
, &dep_olist
);
12390 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12391 && gimple_omp_task_taskwait_p (stmt
))
12395 gsi_replace (gsi_p
, dep_bind
, true);
12396 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12397 gimple_bind_add_stmt (dep_bind
, stmt
);
12398 gimple_bind_add_seq (dep_bind
, dep_olist
);
12399 pop_gimplify_context (dep_bind
);
12404 if (ctx
->srecord_type
)
12405 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12407 gimple_seq tskred_ilist
= NULL
;
12408 gimple_seq tskred_olist
= NULL
;
12409 if ((is_task_ctx (ctx
)
12410 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12411 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12412 OMP_CLAUSE_REDUCTION
))
12413 || (is_parallel_ctx (ctx
)
12414 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12415 OMP_CLAUSE__REDUCTEMP_
)))
12417 if (dep_bind
== NULL
)
12419 push_gimplify_context ();
12420 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12422 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12424 gimple_omp_taskreg_clauses (ctx
->stmt
),
12425 &tskred_ilist
, &tskred_olist
);
12428 push_gimplify_context ();
12430 gimple_seq par_olist
= NULL
;
12431 gimple_seq par_ilist
= NULL
;
12432 gimple_seq par_rlist
= NULL
;
12433 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12434 lower_omp (&par_body
, ctx
);
12435 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12436 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12438 /* Declare all the variables created by mapping and the variables
12439 declared in the scope of the parallel body. */
12440 record_vars_into (ctx
->block_vars
, child_fn
);
12441 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12442 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12444 if (ctx
->record_type
)
12447 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12448 : ctx
->record_type
, ".omp_data_o");
12449 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12450 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12451 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12454 gimple_seq olist
= NULL
;
12455 gimple_seq ilist
= NULL
;
12456 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12457 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12459 if (ctx
->record_type
)
12461 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12462 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12466 /* Once all the expansions are done, sequence all the different
12467 fragments inside gimple_omp_body. */
12469 gimple_seq new_body
= NULL
;
12471 if (ctx
->record_type
)
12473 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12474 /* fixup_child_record_type might have changed receiver_decl's type. */
12475 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12476 gimple_seq_add_stmt (&new_body
,
12477 gimple_build_assign (ctx
->receiver_decl
, t
));
12480 gimple_seq_add_seq (&new_body
, par_ilist
);
12481 gimple_seq_add_seq (&new_body
, par_body
);
12482 gimple_seq_add_seq (&new_body
, par_rlist
);
12483 if (ctx
->cancellable
)
12484 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12485 gimple_seq_add_seq (&new_body
, par_olist
);
12486 new_body
= maybe_catch_exception (new_body
);
12487 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12488 gimple_seq_add_stmt (&new_body
,
12489 gimple_build_omp_continue (integer_zero_node
,
12490 integer_zero_node
));
12491 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12492 gimple_omp_set_body (stmt
, new_body
);
12494 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12495 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12497 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12498 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12499 gimple_bind_add_seq (bind
, ilist
);
12500 gimple_bind_add_stmt (bind
, stmt
);
12501 gimple_bind_add_seq (bind
, olist
);
12503 pop_gimplify_context (NULL
);
12507 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12508 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12509 gimple_bind_add_stmt (dep_bind
, bind
);
12510 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12511 gimple_bind_add_seq (dep_bind
, dep_olist
);
12512 pop_gimplify_context (dep_bind
);
12516 /* Lower the GIMPLE_OMP_TARGET in the current statement
12517 in GSI_P. CTX holds context information for the directive. */
12520 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12523 tree child_fn
, t
, c
;
12524 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12525 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12526 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12527 location_t loc
= gimple_location (stmt
);
12528 bool offloaded
, data_region
;
12529 unsigned int map_cnt
= 0;
12530 tree in_reduction_clauses
= NULL_TREE
;
12532 offloaded
= is_gimple_omp_offloaded (stmt
);
12533 switch (gimple_omp_target_kind (stmt
))
12535 case GF_OMP_TARGET_KIND_REGION
:
12537 q
= &in_reduction_clauses
;
12538 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12539 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12542 q
= &OMP_CLAUSE_CHAIN (*q
);
12543 *p
= OMP_CLAUSE_CHAIN (*p
);
12546 p
= &OMP_CLAUSE_CHAIN (*p
);
12548 *p
= in_reduction_clauses
;
12550 case GF_OMP_TARGET_KIND_UPDATE
:
12551 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12552 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12553 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12554 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12555 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12556 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12557 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12558 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12559 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12560 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12561 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12562 data_region
= false;
12564 case GF_OMP_TARGET_KIND_DATA
:
12565 case GF_OMP_TARGET_KIND_OACC_DATA
:
12566 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12567 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12568 data_region
= true;
12571 gcc_unreachable ();
12574 clauses
= gimple_omp_target_clauses (stmt
);
12576 gimple_seq dep_ilist
= NULL
;
12577 gimple_seq dep_olist
= NULL
;
12578 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12579 if (has_depend
|| in_reduction_clauses
)
12581 push_gimplify_context ();
12582 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12584 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12585 &dep_ilist
, &dep_olist
);
12586 if (in_reduction_clauses
)
12587 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12595 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12596 tgt_body
= gimple_bind_body (tgt_bind
);
12598 else if (data_region
)
12599 tgt_body
= gimple_omp_body (stmt
);
12600 child_fn
= ctx
->cb
.dst_fn
;
12602 push_gimplify_context ();
12605 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12606 switch (OMP_CLAUSE_CODE (c
))
12612 case OMP_CLAUSE_MAP
:
12614 /* First check what we're prepared to handle in the following. */
12615 switch (OMP_CLAUSE_MAP_KIND (c
))
12617 case GOMP_MAP_ALLOC
:
12619 case GOMP_MAP_FROM
:
12620 case GOMP_MAP_TOFROM
:
12621 case GOMP_MAP_POINTER
:
12622 case GOMP_MAP_TO_PSET
:
12623 case GOMP_MAP_DELETE
:
12624 case GOMP_MAP_RELEASE
:
12625 case GOMP_MAP_ALWAYS_TO
:
12626 case GOMP_MAP_ALWAYS_FROM
:
12627 case GOMP_MAP_ALWAYS_TOFROM
:
12628 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12629 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12630 case GOMP_MAP_STRUCT
:
12631 case GOMP_MAP_ALWAYS_POINTER
:
12632 case GOMP_MAP_ATTACH
:
12633 case GOMP_MAP_DETACH
:
12635 case GOMP_MAP_IF_PRESENT
:
12636 case GOMP_MAP_FORCE_ALLOC
:
12637 case GOMP_MAP_FORCE_TO
:
12638 case GOMP_MAP_FORCE_FROM
:
12639 case GOMP_MAP_FORCE_TOFROM
:
12640 case GOMP_MAP_FORCE_PRESENT
:
12641 case GOMP_MAP_FORCE_DEVICEPTR
:
12642 case GOMP_MAP_DEVICE_RESIDENT
:
12643 case GOMP_MAP_LINK
:
12644 case GOMP_MAP_FORCE_DETACH
:
12645 gcc_assert (is_gimple_omp_oacc (stmt
));
12648 gcc_unreachable ();
12652 case OMP_CLAUSE_TO
:
12653 case OMP_CLAUSE_FROM
:
12655 var
= OMP_CLAUSE_DECL (c
);
12658 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12659 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12660 && (OMP_CLAUSE_MAP_KIND (c
)
12661 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12666 if (DECL_SIZE (var
)
12667 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12669 tree var2
= DECL_VALUE_EXPR (var
);
12670 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12671 var2
= TREE_OPERAND (var2
, 0);
12672 gcc_assert (DECL_P (var2
));
12677 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12678 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12679 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12681 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12683 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12684 && varpool_node::get_create (var
)->offloadable
)
12687 tree type
= build_pointer_type (TREE_TYPE (var
));
12688 tree new_var
= lookup_decl (var
, ctx
);
12689 x
= create_tmp_var_raw (type
, get_name (new_var
));
12690 gimple_add_tmp_var (x
);
12691 x
= build_simple_mem_ref (x
);
12692 SET_DECL_VALUE_EXPR (new_var
, x
);
12693 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12698 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12699 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12700 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12701 && is_omp_target (stmt
))
12703 gcc_assert (maybe_lookup_field (c
, ctx
));
12708 if (!maybe_lookup_field (var
, ctx
))
12711 /* Don't remap compute constructs' reduction variables, because the
12712 intermediate result must be local to each gang. */
12713 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12714 && is_gimple_omp_oacc (ctx
->stmt
)
12715 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12717 x
= build_receiver_ref (var
, true, ctx
);
12718 tree new_var
= lookup_decl (var
, ctx
);
12720 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12721 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12722 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12723 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12724 x
= build_simple_mem_ref (x
);
12725 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12727 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12728 if (omp_privatize_by_reference (new_var
)
12729 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12730 || DECL_BY_REFERENCE (var
)))
12732 /* Create a local object to hold the instance
12734 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12735 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12736 tree inst
= create_tmp_var (type
, id
);
12737 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12738 x
= build_fold_addr_expr (inst
);
12740 gimplify_assign (new_var
, x
, &fplist
);
12742 else if (DECL_P (new_var
))
12744 SET_DECL_VALUE_EXPR (new_var
, x
);
12745 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12748 gcc_unreachable ();
12753 case OMP_CLAUSE_FIRSTPRIVATE
:
12754 gcc_checking_assert (offloaded
);
12755 if (is_gimple_omp_oacc (ctx
->stmt
))
12757 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12758 gcc_checking_assert (!is_oacc_kernels (ctx
));
12759 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12760 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12762 goto oacc_firstprivate
;
12765 var
= OMP_CLAUSE_DECL (c
);
12766 if (!omp_privatize_by_reference (var
)
12767 && !is_gimple_reg_type (TREE_TYPE (var
)))
12769 tree new_var
= lookup_decl (var
, ctx
);
12770 if (is_variable_sized (var
))
12772 tree pvar
= DECL_VALUE_EXPR (var
);
12773 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12774 pvar
= TREE_OPERAND (pvar
, 0);
12775 gcc_assert (DECL_P (pvar
));
12776 tree new_pvar
= lookup_decl (pvar
, ctx
);
12777 x
= build_fold_indirect_ref (new_pvar
);
12778 TREE_THIS_NOTRAP (x
) = 1;
12781 x
= build_receiver_ref (var
, true, ctx
);
12782 SET_DECL_VALUE_EXPR (new_var
, x
);
12783 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12787 case OMP_CLAUSE_PRIVATE
:
12788 gcc_checking_assert (offloaded
);
12789 if (is_gimple_omp_oacc (ctx
->stmt
))
12791 /* No 'private' clauses on OpenACC 'kernels'. */
12792 gcc_checking_assert (!is_oacc_kernels (ctx
));
12793 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12794 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12798 var
= OMP_CLAUSE_DECL (c
);
12799 if (is_variable_sized (var
))
12801 tree new_var
= lookup_decl (var
, ctx
);
12802 tree pvar
= DECL_VALUE_EXPR (var
);
12803 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12804 pvar
= TREE_OPERAND (pvar
, 0);
12805 gcc_assert (DECL_P (pvar
));
12806 tree new_pvar
= lookup_decl (pvar
, ctx
);
12807 x
= build_fold_indirect_ref (new_pvar
);
12808 TREE_THIS_NOTRAP (x
) = 1;
12809 SET_DECL_VALUE_EXPR (new_var
, x
);
12810 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12814 case OMP_CLAUSE_USE_DEVICE_PTR
:
12815 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12816 case OMP_CLAUSE_IS_DEVICE_PTR
:
12817 var
= OMP_CLAUSE_DECL (c
);
12819 if (is_variable_sized (var
))
12821 tree new_var
= lookup_decl (var
, ctx
);
12822 tree pvar
= DECL_VALUE_EXPR (var
);
12823 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12824 pvar
= TREE_OPERAND (pvar
, 0);
12825 gcc_assert (DECL_P (pvar
));
12826 tree new_pvar
= lookup_decl (pvar
, ctx
);
12827 x
= build_fold_indirect_ref (new_pvar
);
12828 TREE_THIS_NOTRAP (x
) = 1;
12829 SET_DECL_VALUE_EXPR (new_var
, x
);
12830 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12832 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12833 && !omp_privatize_by_reference (var
)
12834 && !omp_is_allocatable_or_ptr (var
)
12835 && !lang_hooks
.decls
.omp_array_data (var
, true))
12836 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12838 tree new_var
= lookup_decl (var
, ctx
);
12839 tree type
= build_pointer_type (TREE_TYPE (var
));
12840 x
= create_tmp_var_raw (type
, get_name (new_var
));
12841 gimple_add_tmp_var (x
);
12842 x
= build_simple_mem_ref (x
);
12843 SET_DECL_VALUE_EXPR (new_var
, x
);
12844 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12848 tree new_var
= lookup_decl (var
, ctx
);
12849 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
12850 gimple_add_tmp_var (x
);
12851 SET_DECL_VALUE_EXPR (new_var
, x
);
12852 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12859 target_nesting_level
++;
12860 lower_omp (&tgt_body
, ctx
);
12861 target_nesting_level
--;
12863 else if (data_region
)
12864 lower_omp (&tgt_body
, ctx
);
12868 /* Declare all the variables created by mapping and the variables
12869 declared in the scope of the target body. */
12870 record_vars_into (ctx
->block_vars
, child_fn
);
12871 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
12872 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
12877 if (ctx
->record_type
)
12880 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
12881 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12882 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12883 t
= make_tree_vec (3);
12884 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
12885 TREE_VEC_ELT (t
, 1)
12886 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
12887 ".omp_data_sizes");
12888 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
12889 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
12890 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
12891 tree tkind_type
= short_unsigned_type_node
;
12892 int talign_shift
= 8;
12893 TREE_VEC_ELT (t
, 2)
12894 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
12895 ".omp_data_kinds");
12896 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
12897 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
12898 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
12899 gimple_omp_target_set_data_arg (stmt
, t
);
12901 vec
<constructor_elt
, va_gc
> *vsize
;
12902 vec
<constructor_elt
, va_gc
> *vkind
;
12903 vec_alloc (vsize
, map_cnt
);
12904 vec_alloc (vkind
, map_cnt
);
12905 unsigned int map_idx
= 0;
12907 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12908 switch (OMP_CLAUSE_CODE (c
))
12910 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
12911 unsigned int talign
;
12916 case OMP_CLAUSE_MAP
:
12917 case OMP_CLAUSE_TO
:
12918 case OMP_CLAUSE_FROM
:
12919 oacc_firstprivate_map
:
12921 ovar
= OMP_CLAUSE_DECL (c
);
12922 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12923 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12924 || (OMP_CLAUSE_MAP_KIND (c
)
12925 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12927 if (!DECL_P (ovar
))
12929 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12930 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
12932 nc
= OMP_CLAUSE_CHAIN (c
);
12933 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
12934 == get_base_address (ovar
));
12935 ovar
= OMP_CLAUSE_DECL (nc
);
12939 tree x
= build_sender_ref (ovar
, ctx
);
12941 if (in_reduction_clauses
12942 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12943 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12945 v
= unshare_expr (v
);
12947 while (handled_component_p (*p
)
12948 || TREE_CODE (*p
) == INDIRECT_REF
12949 || TREE_CODE (*p
) == ADDR_EXPR
12950 || TREE_CODE (*p
) == MEM_REF
12951 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
12952 p
= &TREE_OPERAND (*p
, 0);
12954 if (is_variable_sized (d
))
12956 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
12957 d
= DECL_VALUE_EXPR (d
);
12958 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
12959 d
= TREE_OPERAND (d
, 0);
12960 gcc_assert (DECL_P (d
));
12963 = (splay_tree_key
) &DECL_CONTEXT (d
);
12964 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
12969 *p
= build_fold_indirect_ref (nd
);
12971 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
12972 gimplify_assign (x
, v
, &ilist
);
12978 if (DECL_SIZE (ovar
)
12979 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
12981 tree ovar2
= DECL_VALUE_EXPR (ovar
);
12982 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
12983 ovar2
= TREE_OPERAND (ovar2
, 0);
12984 gcc_assert (DECL_P (ovar2
));
12987 if (!maybe_lookup_field (ovar
, ctx
)
12988 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12989 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12990 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
12994 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
12995 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
12996 talign
= DECL_ALIGN_UNIT (ovar
);
13001 if (in_reduction_clauses
13002 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13003 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13006 if (is_variable_sized (d
))
13008 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13009 d
= DECL_VALUE_EXPR (d
);
13010 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13011 d
= TREE_OPERAND (d
, 0);
13012 gcc_assert (DECL_P (d
));
13015 = (splay_tree_key
) &DECL_CONTEXT (d
);
13016 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13021 var
= build_fold_indirect_ref (nd
);
13024 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13027 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13028 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13029 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13030 && is_omp_target (stmt
))
13032 x
= build_sender_ref (c
, ctx
);
13033 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13037 x
= build_sender_ref (ovar
, ctx
);
13039 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13040 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13041 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13042 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13044 gcc_assert (offloaded
);
13046 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13047 mark_addressable (avar
);
13048 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13049 talign
= DECL_ALIGN_UNIT (avar
);
13050 avar
= build_fold_addr_expr (avar
);
13051 gimplify_assign (x
, avar
, &ilist
);
13053 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13055 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13056 if (!omp_privatize_by_reference (var
))
13058 if (is_gimple_reg (var
)
13059 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13060 suppress_warning (var
);
13061 var
= build_fold_addr_expr (var
);
13064 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13065 gimplify_assign (x
, var
, &ilist
);
13067 else if (is_gimple_reg (var
))
13069 gcc_assert (offloaded
);
13070 tree avar
= create_tmp_var (TREE_TYPE (var
));
13071 mark_addressable (avar
);
13072 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13073 if (GOMP_MAP_COPY_TO_P (map_kind
)
13074 || map_kind
== GOMP_MAP_POINTER
13075 || map_kind
== GOMP_MAP_TO_PSET
13076 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13078 /* If we need to initialize a temporary
13079 with VAR because it is not addressable, and
13080 the variable hasn't been initialized yet, then
13081 we'll get a warning for the store to avar.
13082 Don't warn in that case, the mapping might
13084 suppress_warning (var
, OPT_Wuninitialized
);
13085 gimplify_assign (avar
, var
, &ilist
);
13087 avar
= build_fold_addr_expr (avar
);
13088 gimplify_assign (x
, avar
, &ilist
);
13089 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13090 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13091 && !TYPE_READONLY (TREE_TYPE (var
)))
13093 x
= unshare_expr (x
);
13094 x
= build_simple_mem_ref (x
);
13095 gimplify_assign (var
, x
, &olist
);
13100 /* While MAP is handled explicitly by the FE,
13101 for 'target update', only the identified is passed. */
13102 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13103 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13104 && (omp_is_allocatable_or_ptr (var
)
13105 && omp_check_optional_argument (var
, false)))
13106 var
= build_fold_indirect_ref (var
);
13107 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13108 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13109 || (!omp_is_allocatable_or_ptr (var
)
13110 && !omp_check_optional_argument (var
, false)))
13111 var
= build_fold_addr_expr (var
);
13112 gimplify_assign (x
, var
, &ilist
);
13116 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13118 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13119 s
= TREE_TYPE (ovar
);
13120 if (TREE_CODE (s
) == REFERENCE_TYPE
13121 || omp_check_optional_argument (ovar
, false))
13123 s
= TYPE_SIZE_UNIT (s
);
13126 s
= OMP_CLAUSE_SIZE (c
);
13127 if (s
== NULL_TREE
)
13128 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13129 s
= fold_convert (size_type_node
, s
);
13130 purpose
= size_int (map_idx
++);
13131 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13132 if (TREE_CODE (s
) != INTEGER_CST
)
13133 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13135 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13136 switch (OMP_CLAUSE_CODE (c
))
13138 case OMP_CLAUSE_MAP
:
13139 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13140 tkind_zero
= tkind
;
13141 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13144 case GOMP_MAP_ALLOC
:
13145 case GOMP_MAP_IF_PRESENT
:
13147 case GOMP_MAP_FROM
:
13148 case GOMP_MAP_TOFROM
:
13149 case GOMP_MAP_ALWAYS_TO
:
13150 case GOMP_MAP_ALWAYS_FROM
:
13151 case GOMP_MAP_ALWAYS_TOFROM
:
13152 case GOMP_MAP_RELEASE
:
13153 case GOMP_MAP_FORCE_TO
:
13154 case GOMP_MAP_FORCE_FROM
:
13155 case GOMP_MAP_FORCE_TOFROM
:
13156 case GOMP_MAP_FORCE_PRESENT
:
13157 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13159 case GOMP_MAP_DELETE
:
13160 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13164 if (tkind_zero
!= tkind
)
13166 if (integer_zerop (s
))
13167 tkind
= tkind_zero
;
13168 else if (integer_nonzerop (s
))
13169 tkind_zero
= tkind
;
13172 case OMP_CLAUSE_FIRSTPRIVATE
:
13173 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13174 tkind
= GOMP_MAP_TO
;
13175 tkind_zero
= tkind
;
13177 case OMP_CLAUSE_TO
:
13178 tkind
= GOMP_MAP_TO
;
13179 tkind_zero
= tkind
;
13181 case OMP_CLAUSE_FROM
:
13182 tkind
= GOMP_MAP_FROM
;
13183 tkind_zero
= tkind
;
13186 gcc_unreachable ();
13188 gcc_checking_assert (tkind
13189 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13190 gcc_checking_assert (tkind_zero
13191 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13192 talign
= ceil_log2 (talign
);
13193 tkind
|= talign
<< talign_shift
;
13194 tkind_zero
|= talign
<< talign_shift
;
13195 gcc_checking_assert (tkind
13196 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13197 gcc_checking_assert (tkind_zero
13198 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13199 if (tkind
== tkind_zero
)
13200 x
= build_int_cstu (tkind_type
, tkind
);
13203 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13204 x
= build3 (COND_EXPR
, tkind_type
,
13205 fold_build2 (EQ_EXPR
, boolean_type_node
,
13206 unshare_expr (s
), size_zero_node
),
13207 build_int_cstu (tkind_type
, tkind_zero
),
13208 build_int_cstu (tkind_type
, tkind
));
13210 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13215 case OMP_CLAUSE_FIRSTPRIVATE
:
13216 if (is_gimple_omp_oacc (ctx
->stmt
))
13217 goto oacc_firstprivate_map
;
13218 ovar
= OMP_CLAUSE_DECL (c
);
13219 if (omp_privatize_by_reference (ovar
))
13220 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13222 talign
= DECL_ALIGN_UNIT (ovar
);
13223 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13224 x
= build_sender_ref (ovar
, ctx
);
13225 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13226 type
= TREE_TYPE (ovar
);
13227 if (omp_privatize_by_reference (ovar
))
13228 type
= TREE_TYPE (type
);
13229 if ((INTEGRAL_TYPE_P (type
)
13230 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13231 || TREE_CODE (type
) == POINTER_TYPE
)
13233 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13235 if (omp_privatize_by_reference (var
))
13236 t
= build_simple_mem_ref (var
);
13237 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13238 suppress_warning (var
);
13239 if (TREE_CODE (type
) != POINTER_TYPE
)
13240 t
= fold_convert (pointer_sized_int_node
, t
);
13241 t
= fold_convert (TREE_TYPE (x
), t
);
13242 gimplify_assign (x
, t
, &ilist
);
13244 else if (omp_privatize_by_reference (var
))
13245 gimplify_assign (x
, var
, &ilist
);
13246 else if (is_gimple_reg (var
))
13248 tree avar
= create_tmp_var (TREE_TYPE (var
));
13249 mark_addressable (avar
);
13250 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13251 suppress_warning (var
);
13252 gimplify_assign (avar
, var
, &ilist
);
13253 avar
= build_fold_addr_expr (avar
);
13254 gimplify_assign (x
, avar
, &ilist
);
13258 var
= build_fold_addr_expr (var
);
13259 gimplify_assign (x
, var
, &ilist
);
13261 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13263 else if (omp_privatize_by_reference (ovar
))
13264 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13266 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13267 s
= fold_convert (size_type_node
, s
);
13268 purpose
= size_int (map_idx
++);
13269 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13270 if (TREE_CODE (s
) != INTEGER_CST
)
13271 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13273 gcc_checking_assert (tkind
13274 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13275 talign
= ceil_log2 (talign
);
13276 tkind
|= talign
<< talign_shift
;
13277 gcc_checking_assert (tkind
13278 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13279 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13280 build_int_cstu (tkind_type
, tkind
));
13283 case OMP_CLAUSE_USE_DEVICE_PTR
:
13284 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13285 case OMP_CLAUSE_IS_DEVICE_PTR
:
13286 ovar
= OMP_CLAUSE_DECL (c
);
13287 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13289 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13291 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13292 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13293 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13295 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13297 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13298 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13302 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13303 x
= build_sender_ref (ovar
, ctx
);
13306 if (is_gimple_omp_oacc (ctx
->stmt
))
13308 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13310 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13311 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13314 type
= TREE_TYPE (ovar
);
13315 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13316 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
13317 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13318 && !omp_privatize_by_reference (ovar
)
13319 && !omp_is_allocatable_or_ptr (ovar
))
13320 || TREE_CODE (type
) == ARRAY_TYPE
)
13321 var
= build_fold_addr_expr (var
);
13324 if (omp_privatize_by_reference (ovar
)
13325 || omp_check_optional_argument (ovar
, false)
13326 || omp_is_allocatable_or_ptr (ovar
))
13328 type
= TREE_TYPE (type
);
13329 if (POINTER_TYPE_P (type
)
13330 && TREE_CODE (type
) != ARRAY_TYPE
13331 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13332 && !omp_is_allocatable_or_ptr (ovar
))
13333 || (omp_privatize_by_reference (ovar
)
13334 && omp_is_allocatable_or_ptr (ovar
))))
13335 var
= build_simple_mem_ref (var
);
13336 var
= fold_convert (TREE_TYPE (x
), var
);
13340 present
= omp_check_optional_argument (ovar
, true);
13343 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13344 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13345 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13346 tree new_x
= unshare_expr (x
);
13347 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13349 gcond
*cond
= gimple_build_cond_from_tree (present
,
13352 gimple_seq_add_stmt (&ilist
, cond
);
13353 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13354 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13355 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13356 gimple_seq_add_stmt (&ilist
,
13357 gimple_build_label (notnull_label
));
13358 gimplify_assign (x
, var
, &ilist
);
13359 gimple_seq_add_stmt (&ilist
,
13360 gimple_build_label (opt_arg_label
));
13363 gimplify_assign (x
, var
, &ilist
);
13365 purpose
= size_int (map_idx
++);
13366 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13367 gcc_checking_assert (tkind
13368 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13369 gcc_checking_assert (tkind
13370 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13371 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13372 build_int_cstu (tkind_type
, tkind
));
13376 gcc_assert (map_idx
== map_cnt
);
13378 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13379 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13380 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13381 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13382 for (int i
= 1; i
<= 2; i
++)
13383 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13385 gimple_seq initlist
= NULL
;
13386 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13387 TREE_VEC_ELT (t
, i
)),
13388 &initlist
, true, NULL_TREE
);
13389 gimple_seq_add_seq (&ilist
, initlist
);
13391 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13392 gimple_seq_add_stmt (&olist
,
13393 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13396 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13398 tree id
= get_identifier ("omp declare target");
13399 tree decl
= TREE_VEC_ELT (t
, i
);
13400 DECL_ATTRIBUTES (decl
)
13401 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13402 varpool_node
*node
= varpool_node::get (decl
);
13405 node
->offloadable
= 1;
13406 if (ENABLE_OFFLOADING
)
13408 g
->have_offload
= true;
13409 vec_safe_push (offload_vars
, t
);
13414 tree clobber
= build_clobber (ctx
->record_type
);
13415 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13419 /* Once all the expansions are done, sequence all the different
13420 fragments inside gimple_omp_body. */
13425 && ctx
->record_type
)
13427 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13428 /* fixup_child_record_type might have changed receiver_decl's type. */
13429 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13430 gimple_seq_add_stmt (&new_body
,
13431 gimple_build_assign (ctx
->receiver_decl
, t
));
13433 gimple_seq_add_seq (&new_body
, fplist
);
13435 if (offloaded
|| data_region
)
13437 tree prev
= NULL_TREE
;
13438 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13439 switch (OMP_CLAUSE_CODE (c
))
13444 case OMP_CLAUSE_FIRSTPRIVATE
:
13445 if (is_gimple_omp_oacc (ctx
->stmt
))
13447 var
= OMP_CLAUSE_DECL (c
);
13448 if (omp_privatize_by_reference (var
)
13449 || is_gimple_reg_type (TREE_TYPE (var
)))
13451 tree new_var
= lookup_decl (var
, ctx
);
13453 type
= TREE_TYPE (var
);
13454 if (omp_privatize_by_reference (var
))
13455 type
= TREE_TYPE (type
);
13456 if ((INTEGRAL_TYPE_P (type
)
13457 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13458 || TREE_CODE (type
) == POINTER_TYPE
)
13460 x
= build_receiver_ref (var
, false, ctx
);
13461 if (TREE_CODE (type
) != POINTER_TYPE
)
13462 x
= fold_convert (pointer_sized_int_node
, x
);
13463 x
= fold_convert (type
, x
);
13464 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13466 if (omp_privatize_by_reference (var
))
13468 tree v
= create_tmp_var_raw (type
, get_name (var
));
13469 gimple_add_tmp_var (v
);
13470 TREE_ADDRESSABLE (v
) = 1;
13471 gimple_seq_add_stmt (&new_body
,
13472 gimple_build_assign (v
, x
));
13473 x
= build_fold_addr_expr (v
);
13475 gimple_seq_add_stmt (&new_body
,
13476 gimple_build_assign (new_var
, x
));
13480 bool by_ref
= !omp_privatize_by_reference (var
);
13481 x
= build_receiver_ref (var
, by_ref
, ctx
);
13482 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13484 gimple_seq_add_stmt (&new_body
,
13485 gimple_build_assign (new_var
, x
));
13488 else if (is_variable_sized (var
))
13490 tree pvar
= DECL_VALUE_EXPR (var
);
13491 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13492 pvar
= TREE_OPERAND (pvar
, 0);
13493 gcc_assert (DECL_P (pvar
));
13494 tree new_var
= lookup_decl (pvar
, ctx
);
13495 x
= build_receiver_ref (var
, false, ctx
);
13496 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13497 gimple_seq_add_stmt (&new_body
,
13498 gimple_build_assign (new_var
, x
));
13501 case OMP_CLAUSE_PRIVATE
:
13502 if (is_gimple_omp_oacc (ctx
->stmt
))
13504 var
= OMP_CLAUSE_DECL (c
);
13505 if (omp_privatize_by_reference (var
))
13507 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13508 tree new_var
= lookup_decl (var
, ctx
);
13509 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13510 if (TREE_CONSTANT (x
))
13512 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13514 gimple_add_tmp_var (x
);
13515 TREE_ADDRESSABLE (x
) = 1;
13516 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13521 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13522 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13523 gimple_seq_add_stmt (&new_body
,
13524 gimple_build_assign (new_var
, x
));
13527 case OMP_CLAUSE_USE_DEVICE_PTR
:
13528 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13529 case OMP_CLAUSE_IS_DEVICE_PTR
:
13531 gimple_seq assign_body
;
13532 bool is_array_data
;
13533 bool do_optional_check
;
13534 assign_body
= NULL
;
13535 do_optional_check
= false;
13536 var
= OMP_CLAUSE_DECL (c
);
13537 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13539 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13540 x
= build_sender_ref (is_array_data
13541 ? (splay_tree_key
) &DECL_NAME (var
)
13542 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13544 x
= build_receiver_ref (var
, false, ctx
);
13548 bool is_ref
= omp_privatize_by_reference (var
);
13549 do_optional_check
= true;
13550 /* First, we copy the descriptor data from the host; then
13551 we update its data to point to the target address. */
13552 new_var
= lookup_decl (var
, ctx
);
13553 new_var
= DECL_VALUE_EXPR (new_var
);
13558 var
= build_fold_indirect_ref (var
);
13559 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
13561 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
13562 gimple_add_tmp_var (v
);
13563 TREE_ADDRESSABLE (v
) = 1;
13564 gimple_seq_add_stmt (&assign_body
,
13565 gimple_build_assign (v
, var
));
13566 tree rhs
= build_fold_addr_expr (v
);
13567 gimple_seq_add_stmt (&assign_body
,
13568 gimple_build_assign (new_var
, rhs
));
13571 gimple_seq_add_stmt (&assign_body
,
13572 gimple_build_assign (new_var
, var
));
13574 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13576 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13577 gimple_seq_add_stmt (&assign_body
,
13578 gimple_build_assign (v2
, x
));
13580 else if (is_variable_sized (var
))
13582 tree pvar
= DECL_VALUE_EXPR (var
);
13583 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13584 pvar
= TREE_OPERAND (pvar
, 0);
13585 gcc_assert (DECL_P (pvar
));
13586 new_var
= lookup_decl (pvar
, ctx
);
13587 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13588 gimple_seq_add_stmt (&assign_body
,
13589 gimple_build_assign (new_var
, x
));
13591 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13592 && !omp_privatize_by_reference (var
)
13593 && !omp_is_allocatable_or_ptr (var
))
13594 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13596 new_var
= lookup_decl (var
, ctx
);
13597 new_var
= DECL_VALUE_EXPR (new_var
);
13598 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13599 new_var
= TREE_OPERAND (new_var
, 0);
13600 gcc_assert (DECL_P (new_var
));
13601 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13602 gimple_seq_add_stmt (&assign_body
,
13603 gimple_build_assign (new_var
, x
));
13607 tree type
= TREE_TYPE (var
);
13608 new_var
= lookup_decl (var
, ctx
);
13609 if (omp_privatize_by_reference (var
))
13611 type
= TREE_TYPE (type
);
13612 if (POINTER_TYPE_P (type
)
13613 && TREE_CODE (type
) != ARRAY_TYPE
13614 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13615 || (omp_privatize_by_reference (var
)
13616 && omp_is_allocatable_or_ptr (var
))))
13618 tree v
= create_tmp_var_raw (type
, get_name (var
));
13619 gimple_add_tmp_var (v
);
13620 TREE_ADDRESSABLE (v
) = 1;
13621 x
= fold_convert (type
, x
);
13622 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13624 gimple_seq_add_stmt (&assign_body
,
13625 gimple_build_assign (v
, x
));
13626 x
= build_fold_addr_expr (v
);
13627 do_optional_check
= true;
13630 new_var
= DECL_VALUE_EXPR (new_var
);
13631 x
= fold_convert (TREE_TYPE (new_var
), x
);
13632 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13633 gimple_seq_add_stmt (&assign_body
,
13634 gimple_build_assign (new_var
, x
));
13637 present
= (do_optional_check
13638 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13642 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13643 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13644 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13645 glabel
*null_glabel
= gimple_build_label (null_label
);
13646 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13647 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13648 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13650 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13652 gcond
*cond
= gimple_build_cond_from_tree (present
,
13655 gimple_seq_add_stmt (&new_body
, cond
);
13656 gimple_seq_add_stmt (&new_body
, null_glabel
);
13657 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13658 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13659 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13660 gimple_seq_add_seq (&new_body
, assign_body
);
13661 gimple_seq_add_stmt (&new_body
,
13662 gimple_build_label (opt_arg_label
));
13665 gimple_seq_add_seq (&new_body
, assign_body
);
13668 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13669 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13670 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13671 or references to VLAs. */
13672 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13673 switch (OMP_CLAUSE_CODE (c
))
13678 case OMP_CLAUSE_MAP
:
13679 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13680 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13682 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13683 poly_int64 offset
= 0;
13685 var
= OMP_CLAUSE_DECL (c
);
13687 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13688 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13690 && varpool_node::get_create (var
)->offloadable
)
13692 if (TREE_CODE (var
) == INDIRECT_REF
13693 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13694 var
= TREE_OPERAND (var
, 0);
13695 if (TREE_CODE (var
) == COMPONENT_REF
)
13697 var
= get_addr_base_and_unit_offset (var
, &offset
);
13698 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13700 else if (DECL_SIZE (var
)
13701 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13703 tree var2
= DECL_VALUE_EXPR (var
);
13704 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13705 var2
= TREE_OPERAND (var2
, 0);
13706 gcc_assert (DECL_P (var2
));
13709 tree new_var
= lookup_decl (var
, ctx
), x
;
13710 tree type
= TREE_TYPE (new_var
);
13712 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13713 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13716 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13718 new_var
= build2 (MEM_REF
, type
,
13719 build_fold_addr_expr (new_var
),
13720 build_int_cst (build_pointer_type (type
),
13723 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
13725 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
13726 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
13727 new_var
= build2 (MEM_REF
, type
,
13728 build_fold_addr_expr (new_var
),
13729 build_int_cst (build_pointer_type (type
),
13733 is_ref
= omp_privatize_by_reference (var
);
13734 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13736 bool ref_to_array
= false;
13739 type
= TREE_TYPE (type
);
13740 if (TREE_CODE (type
) == ARRAY_TYPE
)
13742 type
= build_pointer_type (type
);
13743 ref_to_array
= true;
13746 else if (TREE_CODE (type
) == ARRAY_TYPE
)
13748 tree decl2
= DECL_VALUE_EXPR (new_var
);
13749 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
13750 decl2
= TREE_OPERAND (decl2
, 0);
13751 gcc_assert (DECL_P (decl2
));
13753 type
= TREE_TYPE (new_var
);
13755 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
13756 x
= fold_convert_loc (clause_loc
, type
, x
);
13757 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
13759 tree bias
= OMP_CLAUSE_SIZE (c
);
13761 bias
= lookup_decl (bias
, ctx
);
13762 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
13763 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
13765 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
13766 TREE_TYPE (x
), x
, bias
);
13769 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13770 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13771 if (is_ref
&& !ref_to_array
)
13773 tree t
= create_tmp_var_raw (type
, get_name (var
));
13774 gimple_add_tmp_var (t
);
13775 TREE_ADDRESSABLE (t
) = 1;
13776 gimple_seq_add_stmt (&new_body
,
13777 gimple_build_assign (t
, x
));
13778 x
= build_fold_addr_expr_loc (clause_loc
, t
);
13780 gimple_seq_add_stmt (&new_body
,
13781 gimple_build_assign (new_var
, x
));
13784 else if (OMP_CLAUSE_CHAIN (c
)
13785 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
13787 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13788 == GOMP_MAP_FIRSTPRIVATE_POINTER
13789 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13790 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13793 case OMP_CLAUSE_PRIVATE
:
13794 var
= OMP_CLAUSE_DECL (c
);
13795 if (is_variable_sized (var
))
13797 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13798 tree new_var
= lookup_decl (var
, ctx
);
13799 tree pvar
= DECL_VALUE_EXPR (var
);
13800 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13801 pvar
= TREE_OPERAND (pvar
, 0);
13802 gcc_assert (DECL_P (pvar
));
13803 tree new_pvar
= lookup_decl (pvar
, ctx
);
13804 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13805 tree al
= size_int (DECL_ALIGN (var
));
13806 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
13807 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13808 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
13809 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13810 gimple_seq_add_stmt (&new_body
,
13811 gimple_build_assign (new_pvar
, x
));
13813 else if (omp_privatize_by_reference (var
)
13814 && !is_gimple_omp_oacc (ctx
->stmt
))
13816 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13817 tree new_var
= lookup_decl (var
, ctx
);
13818 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13819 if (TREE_CONSTANT (x
))
13824 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13825 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
13826 tree al
= size_int (TYPE_ALIGN (rtype
));
13827 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13830 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13831 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13832 gimple_seq_add_stmt (&new_body
,
13833 gimple_build_assign (new_var
, x
));
13838 gimple_seq fork_seq
= NULL
;
13839 gimple_seq join_seq
= NULL
;
13841 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
13843 /* If there are reductions on the offloaded region itself, treat
13844 them as a dummy GANG loop. */
13845 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
13847 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
13849 if (private_marker
)
13850 gimple_call_set_arg (private_marker
, 2, level
);
13852 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
13853 false, NULL
, private_marker
, NULL
, &fork_seq
,
13857 gimple_seq_add_seq (&new_body
, fork_seq
);
13858 gimple_seq_add_seq (&new_body
, tgt_body
);
13859 gimple_seq_add_seq (&new_body
, join_seq
);
13863 new_body
= maybe_catch_exception (new_body
);
13864 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
13866 gimple_omp_set_body (stmt
, new_body
);
13869 bind
= gimple_build_bind (NULL
, NULL
,
13870 tgt_bind
? gimple_bind_block (tgt_bind
)
13872 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
13873 gimple_bind_add_seq (bind
, ilist
);
13874 gimple_bind_add_stmt (bind
, stmt
);
13875 gimple_bind_add_seq (bind
, olist
);
13877 pop_gimplify_context (NULL
);
13881 gimple_bind_add_seq (dep_bind
, dep_ilist
);
13882 gimple_bind_add_stmt (dep_bind
, bind
);
13883 gimple_bind_add_seq (dep_bind
, dep_olist
);
13884 pop_gimplify_context (dep_bind
);
13888 /* Expand code for an OpenMP teams directive. */
13891 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
13893 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
13894 push_gimplify_context ();
13896 tree block
= make_node (BLOCK
);
13897 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
13898 gsi_replace (gsi_p
, bind
, true);
13899 gimple_seq bind_body
= NULL
;
13900 gimple_seq dlist
= NULL
;
13901 gimple_seq olist
= NULL
;
13903 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13904 OMP_CLAUSE_NUM_TEAMS
);
13905 if (num_teams
== NULL_TREE
)
13906 num_teams
= build_int_cst (unsigned_type_node
, 0);
13909 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
13910 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
13911 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
13913 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13914 OMP_CLAUSE_THREAD_LIMIT
);
13915 if (thread_limit
== NULL_TREE
)
13916 thread_limit
= build_int_cst (unsigned_type_node
, 0);
13919 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
13920 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
13921 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
13925 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
13926 &bind_body
, &dlist
, ctx
, NULL
);
13927 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
13928 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
13930 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
13932 location_t loc
= gimple_location (teams_stmt
);
13933 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
13934 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
13935 gimple_set_location (call
, loc
);
13936 gimple_seq_add_stmt (&bind_body
, call
);
13938 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
13939 gimple_omp_set_body (teams_stmt
, NULL
);
13940 gimple_seq_add_seq (&bind_body
, olist
);
13941 gimple_seq_add_seq (&bind_body
, dlist
);
13942 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
13943 gimple_bind_set_body (bind
, bind_body
);
13945 pop_gimplify_context (bind
);
13947 gimple_bind_append_vars (bind
, ctx
->block_vars
);
13948 BLOCK_VARS (block
) = ctx
->block_vars
;
13949 if (BLOCK_VARS (block
))
13950 TREE_USED (block
) = 1;
13953 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13954 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13955 of OMP context, but with task_shared_vars set. */
13958 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
13963 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13964 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
13966 && DECL_HAS_VALUE_EXPR_P (t
))
13969 if (task_shared_vars
13971 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
13974 /* If a global variable has been privatized, TREE_CONSTANT on
13975 ADDR_EXPR might be wrong. */
13976 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
13977 recompute_tree_invariant_for_addr_expr (t
);
13979 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
13983 /* Data to be communicated between lower_omp_regimplify_operands and
13984 lower_omp_regimplify_operands_p. */
13986 struct lower_omp_regimplify_operands_data
13992 /* Helper function for lower_omp_regimplify_operands. Find
13993 omp_member_access_dummy_var vars and adjust temporarily their
13994 DECL_VALUE_EXPRs if needed. */
13997 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14000 tree t
= omp_member_access_dummy_var (*tp
);
14003 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14004 lower_omp_regimplify_operands_data
*ldata
14005 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14006 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14009 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14010 ldata
->decls
->safe_push (*tp
);
14011 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14012 SET_DECL_VALUE_EXPR (*tp
, v
);
14015 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14019 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14020 of omp_member_access_dummy_var vars during regimplification. */
14023 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14024 gimple_stmt_iterator
*gsi_p
)
14026 auto_vec
<tree
, 10> decls
;
14029 struct walk_stmt_info wi
;
14030 memset (&wi
, '\0', sizeof (wi
));
14031 struct lower_omp_regimplify_operands_data data
;
14033 data
.decls
= &decls
;
14035 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14037 gimple_regimplify_operands (stmt
, gsi_p
);
14038 while (!decls
.is_empty ())
14040 tree t
= decls
.pop ();
14041 tree v
= decls
.pop ();
14042 SET_DECL_VALUE_EXPR (t
, v
);
14047 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14049 gimple
*stmt
= gsi_stmt (*gsi_p
);
14050 struct walk_stmt_info wi
;
14053 if (gimple_has_location (stmt
))
14054 input_location
= gimple_location (stmt
);
14056 if (task_shared_vars
)
14057 memset (&wi
, '\0', sizeof (wi
));
14059 /* If we have issued syntax errors, avoid doing any heavy lifting.
14060 Just replace the OMP directives with a NOP to avoid
14061 confusing RTL expansion. */
14062 if (seen_error () && is_gimple_omp (stmt
))
14064 gsi_replace (gsi_p
, gimple_build_nop (), true);
14068 switch (gimple_code (stmt
))
14072 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14073 if ((ctx
|| task_shared_vars
)
14074 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14075 lower_omp_regimplify_p
,
14076 ctx
? NULL
: &wi
, NULL
)
14077 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14078 lower_omp_regimplify_p
,
14079 ctx
? NULL
: &wi
, NULL
)))
14080 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14084 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14086 case GIMPLE_EH_FILTER
:
14087 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14090 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14091 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14093 case GIMPLE_TRANSACTION
:
14094 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14098 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14100 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14101 oacc_privatization_scan_decl_chain (ctx
, vars
);
14103 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14104 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14106 case GIMPLE_OMP_PARALLEL
:
14107 case GIMPLE_OMP_TASK
:
14108 ctx
= maybe_lookup_ctx (stmt
);
14110 if (ctx
->cancellable
)
14111 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14112 lower_omp_taskreg (gsi_p
, ctx
);
14114 case GIMPLE_OMP_FOR
:
14115 ctx
= maybe_lookup_ctx (stmt
);
14117 if (ctx
->cancellable
)
14118 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14119 lower_omp_for (gsi_p
, ctx
);
14121 case GIMPLE_OMP_SECTIONS
:
14122 ctx
= maybe_lookup_ctx (stmt
);
14124 if (ctx
->cancellable
)
14125 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14126 lower_omp_sections (gsi_p
, ctx
);
14128 case GIMPLE_OMP_SCOPE
:
14129 ctx
= maybe_lookup_ctx (stmt
);
14131 lower_omp_scope (gsi_p
, ctx
);
14133 case GIMPLE_OMP_SINGLE
:
14134 ctx
= maybe_lookup_ctx (stmt
);
14136 lower_omp_single (gsi_p
, ctx
);
14138 case GIMPLE_OMP_MASTER
:
14139 case GIMPLE_OMP_MASKED
:
14140 ctx
= maybe_lookup_ctx (stmt
);
14142 lower_omp_master (gsi_p
, ctx
);
14144 case GIMPLE_OMP_TASKGROUP
:
14145 ctx
= maybe_lookup_ctx (stmt
);
14147 lower_omp_taskgroup (gsi_p
, ctx
);
14149 case GIMPLE_OMP_ORDERED
:
14150 ctx
= maybe_lookup_ctx (stmt
);
14152 lower_omp_ordered (gsi_p
, ctx
);
14154 case GIMPLE_OMP_SCAN
:
14155 ctx
= maybe_lookup_ctx (stmt
);
14157 lower_omp_scan (gsi_p
, ctx
);
14159 case GIMPLE_OMP_CRITICAL
:
14160 ctx
= maybe_lookup_ctx (stmt
);
14162 lower_omp_critical (gsi_p
, ctx
);
14164 case GIMPLE_OMP_ATOMIC_LOAD
:
14165 if ((ctx
|| task_shared_vars
)
14166 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14167 as_a
<gomp_atomic_load
*> (stmt
)),
14168 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14169 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14171 case GIMPLE_OMP_TARGET
:
14172 ctx
= maybe_lookup_ctx (stmt
);
14174 lower_omp_target (gsi_p
, ctx
);
14176 case GIMPLE_OMP_TEAMS
:
14177 ctx
= maybe_lookup_ctx (stmt
);
14179 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14180 lower_omp_taskreg (gsi_p
, ctx
);
14182 lower_omp_teams (gsi_p
, ctx
);
14186 call_stmt
= as_a
<gcall
*> (stmt
);
14187 fndecl
= gimple_call_fndecl (call_stmt
);
14189 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14190 switch (DECL_FUNCTION_CODE (fndecl
))
14192 case BUILT_IN_GOMP_BARRIER
:
14196 case BUILT_IN_GOMP_CANCEL
:
14197 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14200 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14201 cctx
= cctx
->outer
;
14202 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14203 if (!cctx
->cancellable
)
14205 if (DECL_FUNCTION_CODE (fndecl
)
14206 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14208 stmt
= gimple_build_nop ();
14209 gsi_replace (gsi_p
, stmt
, false);
14213 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14215 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14216 gimple_call_set_fndecl (call_stmt
, fndecl
);
14217 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14220 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14221 gimple_call_set_lhs (call_stmt
, lhs
);
14222 tree fallthru_label
;
14223 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14225 g
= gimple_build_label (fallthru_label
);
14226 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14227 g
= gimple_build_cond (NE_EXPR
, lhs
,
14228 fold_convert (TREE_TYPE (lhs
),
14229 boolean_false_node
),
14230 cctx
->cancel_label
, fallthru_label
);
14231 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14238 case GIMPLE_ASSIGN
:
14239 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14241 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14242 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14243 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14244 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14245 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14246 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14247 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14248 && (gimple_omp_target_kind (up
->stmt
)
14249 == GF_OMP_TARGET_KIND_DATA
)))
14251 else if (!up
->lastprivate_conditional_map
)
14253 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14254 if (TREE_CODE (lhs
) == MEM_REF
14255 && DECL_P (TREE_OPERAND (lhs
, 0))
14256 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14257 0))) == REFERENCE_TYPE
)
14258 lhs
= TREE_OPERAND (lhs
, 0);
14260 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14263 if (up
->combined_into_simd_safelen1
)
14266 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14269 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14270 clauses
= gimple_omp_for_clauses (up
->stmt
);
14272 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14273 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14274 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14275 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14276 OMP_CLAUSE__CONDTEMP_
);
14277 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14278 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14279 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14286 if ((ctx
|| task_shared_vars
)
14287 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14290 /* Just remove clobbers, this should happen only if we have
14291 "privatized" local addressable variables in SIMD regions,
14292 the clobber isn't needed in that case and gimplifying address
14293 of the ARRAY_REF into a pointer and creating MEM_REF based
14294 clobber would create worse code than we get with the clobber
14296 if (gimple_clobber_p (stmt
))
14298 gsi_replace (gsi_p
, gimple_build_nop (), true);
14301 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14308 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14310 location_t saved_location
= input_location
;
14311 gimple_stmt_iterator gsi
;
14312 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14313 lower_omp_1 (&gsi
, ctx
);
14314 /* During gimplification, we haven't folded statments inside offloading
14315 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14316 if (target_nesting_level
|| taskreg_nesting_level
)
14317 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14319 input_location
= saved_location
;
14322 /* Main entry point. */
14324 static unsigned int
14325 execute_lower_omp (void)
14331 /* This pass always runs, to provide PROP_gimple_lomp.
14332 But often, there is nothing to do. */
14333 if (flag_openacc
== 0 && flag_openmp
== 0
14334 && flag_openmp_simd
== 0)
14337 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14338 delete_omp_context
);
14340 body
= gimple_body (current_function_decl
);
14342 scan_omp (&body
, NULL
);
14343 gcc_assert (taskreg_nesting_level
== 0);
14344 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14345 finish_taskreg_scan (ctx
);
14346 taskreg_contexts
.release ();
14348 if (all_contexts
->root
)
14350 if (task_shared_vars
)
14351 push_gimplify_context ();
14352 lower_omp (&body
, NULL
);
14353 if (task_shared_vars
)
14354 pop_gimplify_context (NULL
);
14359 splay_tree_delete (all_contexts
);
14360 all_contexts
= NULL
;
14362 BITMAP_FREE (task_shared_vars
);
14363 BITMAP_FREE (global_nonaddressable_vars
);
14365 /* If current function is a method, remove artificial dummy VAR_DECL created
14366 for non-static data member privatization, they aren't needed for
14367 debuginfo nor anything else, have been already replaced everywhere in the
14368 IL and cause problems with LTO. */
14369 if (DECL_ARGUMENTS (current_function_decl
)
14370 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14371 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14373 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14379 const pass_data pass_data_lower_omp
=
14381 GIMPLE_PASS
, /* type */
14382 "omplower", /* name */
14383 OPTGROUP_OMP
, /* optinfo_flags */
14384 TV_NONE
, /* tv_id */
14385 PROP_gimple_any
, /* properties_required */
14386 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14387 0, /* properties_destroyed */
14388 0, /* todo_flags_start */
14389 0, /* todo_flags_finish */
14392 class pass_lower_omp
: public gimple_opt_pass
14395 pass_lower_omp (gcc::context
*ctxt
)
14396 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14399 /* opt_pass methods: */
14400 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
14402 }; // class pass_lower_omp
14404 } // anon namespace
14407 make_pass_lower_omp (gcc::context
*ctxt
)
14409 return new pass_lower_omp (ctxt
);
14412 /* The following is a utility to diagnose structured block violations.
14413 It is not part of the "omplower" pass, as that's invoked too late. It
14414 should be invoked by the respective front ends after gimplification. */
14416 static splay_tree all_labels
;
14418 /* Check for mismatched contexts and generate an error if needed. Return
14419 true if an error is detected. */
14422 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14423 gimple
*branch_ctx
, gimple
*label_ctx
)
14425 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14426 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14428 if (label_ctx
== branch_ctx
)
14431 const char* kind
= NULL
;
14435 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14436 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14438 gcc_checking_assert (kind
== NULL
);
14444 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14448 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14449 so we could traverse it and issue a correct "exit" or "enter" error
14450 message upon a structured block violation.
14452 We built the context by building a list with tree_cons'ing, but there is
14453 no easy counterpart in gimple tuples. It seems like far too much work
14454 for issuing exit/enter error messages. If someone really misses the
14455 distinct error message... patches welcome. */
14458 /* Try to avoid confusing the user by producing and error message
14459 with correct "exit" or "enter" verbiage. We prefer "exit"
14460 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14461 if (branch_ctx
== NULL
)
14467 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14472 label_ctx
= TREE_CHAIN (label_ctx
);
14477 error ("invalid exit from %s structured block", kind
);
14479 error ("invalid entry to %s structured block", kind
);
14482 /* If it's obvious we have an invalid entry, be specific about the error. */
14483 if (branch_ctx
== NULL
)
14484 error ("invalid entry to %s structured block", kind
);
14487 /* Otherwise, be vague and lazy, but efficient. */
14488 error ("invalid branch to/from %s structured block", kind
);
14491 gsi_replace (gsi_p
, gimple_build_nop (), false);
14495 /* Pass 1: Create a minimal tree of structured blocks, and record
14496 where each label is found. */
14499 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14500 struct walk_stmt_info
*wi
)
14502 gimple
*context
= (gimple
*) wi
->info
;
14503 gimple
*inner_context
;
14504 gimple
*stmt
= gsi_stmt (*gsi_p
);
14506 *handled_ops_p
= true;
14508 switch (gimple_code (stmt
))
14512 case GIMPLE_OMP_PARALLEL
:
14513 case GIMPLE_OMP_TASK
:
14514 case GIMPLE_OMP_SCOPE
:
14515 case GIMPLE_OMP_SECTIONS
:
14516 case GIMPLE_OMP_SINGLE
:
14517 case GIMPLE_OMP_SECTION
:
14518 case GIMPLE_OMP_MASTER
:
14519 case GIMPLE_OMP_MASKED
:
14520 case GIMPLE_OMP_ORDERED
:
14521 case GIMPLE_OMP_SCAN
:
14522 case GIMPLE_OMP_CRITICAL
:
14523 case GIMPLE_OMP_TARGET
:
14524 case GIMPLE_OMP_TEAMS
:
14525 case GIMPLE_OMP_TASKGROUP
:
14526 /* The minimal context here is just the current OMP construct. */
14527 inner_context
= stmt
;
14528 wi
->info
= inner_context
;
14529 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14530 wi
->info
= context
;
14533 case GIMPLE_OMP_FOR
:
14534 inner_context
= stmt
;
14535 wi
->info
= inner_context
;
14536 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14538 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14539 diagnose_sb_1
, NULL
, wi
);
14540 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14541 wi
->info
= context
;
14545 splay_tree_insert (all_labels
,
14546 (splay_tree_key
) gimple_label_label (
14547 as_a
<glabel
*> (stmt
)),
14548 (splay_tree_value
) context
);
14558 /* Pass 2: Check each branch and see if its context differs from that of
14559 the destination label's context. */
14562 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14563 struct walk_stmt_info
*wi
)
14565 gimple
*context
= (gimple
*) wi
->info
;
14567 gimple
*stmt
= gsi_stmt (*gsi_p
);
14569 *handled_ops_p
= true;
14571 switch (gimple_code (stmt
))
14575 case GIMPLE_OMP_PARALLEL
:
14576 case GIMPLE_OMP_TASK
:
14577 case GIMPLE_OMP_SCOPE
:
14578 case GIMPLE_OMP_SECTIONS
:
14579 case GIMPLE_OMP_SINGLE
:
14580 case GIMPLE_OMP_SECTION
:
14581 case GIMPLE_OMP_MASTER
:
14582 case GIMPLE_OMP_MASKED
:
14583 case GIMPLE_OMP_ORDERED
:
14584 case GIMPLE_OMP_SCAN
:
14585 case GIMPLE_OMP_CRITICAL
:
14586 case GIMPLE_OMP_TARGET
:
14587 case GIMPLE_OMP_TEAMS
:
14588 case GIMPLE_OMP_TASKGROUP
:
14590 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14591 wi
->info
= context
;
14594 case GIMPLE_OMP_FOR
:
14596 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14598 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14599 diagnose_sb_2
, NULL
, wi
);
14600 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14601 wi
->info
= context
;
14606 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14607 tree lab
= gimple_cond_true_label (cond_stmt
);
14610 n
= splay_tree_lookup (all_labels
,
14611 (splay_tree_key
) lab
);
14612 diagnose_sb_0 (gsi_p
, context
,
14613 n
? (gimple
*) n
->value
: NULL
);
14615 lab
= gimple_cond_false_label (cond_stmt
);
14618 n
= splay_tree_lookup (all_labels
,
14619 (splay_tree_key
) lab
);
14620 diagnose_sb_0 (gsi_p
, context
,
14621 n
? (gimple
*) n
->value
: NULL
);
14628 tree lab
= gimple_goto_dest (stmt
);
14629 if (TREE_CODE (lab
) != LABEL_DECL
)
14632 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14633 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14637 case GIMPLE_SWITCH
:
14639 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14641 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14643 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14644 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14645 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14651 case GIMPLE_RETURN
:
14652 diagnose_sb_0 (gsi_p
, context
, NULL
);
14662 static unsigned int
14663 diagnose_omp_structured_block_errors (void)
14665 struct walk_stmt_info wi
;
14666 gimple_seq body
= gimple_body (current_function_decl
);
14668 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14670 memset (&wi
, 0, sizeof (wi
));
14671 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14673 memset (&wi
, 0, sizeof (wi
));
14674 wi
.want_locations
= true;
14675 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14677 gimple_set_body (current_function_decl
, body
);
14679 splay_tree_delete (all_labels
);
14687 const pass_data pass_data_diagnose_omp_blocks
=
14689 GIMPLE_PASS
, /* type */
14690 "*diagnose_omp_blocks", /* name */
14691 OPTGROUP_OMP
, /* optinfo_flags */
14692 TV_NONE
, /* tv_id */
14693 PROP_gimple_any
, /* properties_required */
14694 0, /* properties_provided */
14695 0, /* properties_destroyed */
14696 0, /* todo_flags_start */
14697 0, /* todo_flags_finish */
14700 class pass_diagnose_omp_blocks
: public gimple_opt_pass
14703 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14704 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
14707 /* opt_pass methods: */
14708 virtual bool gate (function
*)
14710 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
14712 virtual unsigned int execute (function
*)
14714 return diagnose_omp_structured_block_errors ();
14717 }; // class pass_diagnose_omp_blocks
14719 } // anon namespace
14722 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14724 return new pass_diagnose_omp_blocks (ctxt
);
14728 #include "gt-omp-low.h"