1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context
*outer
;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map
;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec
<tree
> task_reductions
;
121 /* A hash map from the reduction clauses to the registered array
123 hash_map
<tree
, unsigned> *task_reduction_map
;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses
;
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses
;
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
146 /* True if this parallel directive is nested within another. */
149 /* True if this construct can be cancelled. */
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
154 bool combined_into_simd_safelen1
;
156 /* True if there is nested scan context with inclusive clause. */
159 /* True if there is nested scan context with exclusive clause. */
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase
;
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent
;
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
172 static splay_tree all_contexts
;
173 static int taskreg_nesting_level
;
174 static int target_nesting_level
;
175 static bitmap task_shared_vars
;
176 static bitmap global_nonaddressable_vars
;
177 static vec
<omp_context
*> taskreg_contexts
;
179 static void scan_omp (gimple_seq
*, omp_context
*);
180 static tree
scan_omp_1_op (tree
*, int *, void *);
182 #define WALK_SUBSTMTS \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
196 is_oacc_parallel_or_serial (omp_context
*ctx
)
198 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
199 return ((outer_type
== GIMPLE_OMP_TARGET
)
200 && ((gimple_omp_target_kind (ctx
->stmt
)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
202 || (gimple_omp_target_kind (ctx
->stmt
)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
206 /* Return true if CTX corresponds to an oacc kernels region. */
209 is_oacc_kernels (omp_context
*ctx
)
211 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
212 return ((outer_type
== GIMPLE_OMP_TARGET
)
213 && (gimple_omp_target_kind (ctx
->stmt
)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
222 omp_member_access_dummy_var (tree decl
)
225 || !DECL_ARTIFICIAL (decl
)
226 || !DECL_IGNORED_P (decl
)
227 || !DECL_HAS_VALUE_EXPR_P (decl
)
228 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
231 tree v
= DECL_VALUE_EXPR (decl
);
232 if (TREE_CODE (v
) != COMPONENT_REF
)
236 switch (TREE_CODE (v
))
242 case POINTER_PLUS_EXPR
:
243 v
= TREE_OPERAND (v
, 0);
246 if (DECL_CONTEXT (v
) == current_function_decl
247 && DECL_ARTIFICIAL (v
)
248 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
256 /* Helper for unshare_and_remap, called through walk_tree. */
259 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
261 tree
*pair
= (tree
*) data
;
264 *tp
= unshare_expr (pair
[1]);
267 else if (IS_TYPE_OR_DECL_P (*tp
))
272 /* Return unshare_expr (X) with all occurrences of FROM
276 unshare_and_remap (tree x
, tree from
, tree to
)
278 tree pair
[2] = { from
, to
};
279 x
= unshare_expr (x
);
280 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
287 scan_omp_op (tree
*tp
, omp_context
*ctx
)
289 struct walk_stmt_info wi
;
291 memset (&wi
, 0, sizeof (wi
));
293 wi
.want_locations
= true;
295 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
298 static void lower_omp (gimple_seq
*, omp_context
*);
299 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
300 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
302 /* Return true if CTX is for an omp parallel. */
305 is_parallel_ctx (omp_context
*ctx
)
307 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
311 /* Return true if CTX is for an omp task. */
314 is_task_ctx (omp_context
*ctx
)
316 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
320 /* Return true if CTX is for an omp taskloop. */
323 is_taskloop_ctx (omp_context
*ctx
)
325 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
330 /* Return true if CTX is for a host omp teams. */
333 is_host_teams_ctx (omp_context
*ctx
)
335 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
344 is_taskreg_ctx (omp_context
*ctx
)
346 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
349 /* Return true if EXPR is variable sized. */
352 is_variable_sized (const_tree expr
)
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
362 lookup_decl (tree var
, omp_context
*ctx
)
364 tree
*n
= ctx
->cb
.decl_map
->get (var
);
369 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
371 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
372 return n
? *n
: NULL_TREE
;
376 lookup_field (tree var
, omp_context
*ctx
)
379 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
380 return (tree
) n
->value
;
384 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
387 n
= splay_tree_lookup (ctx
->sfield_map
388 ? ctx
->sfield_map
: ctx
->field_map
, key
);
389 return (tree
) n
->value
;
393 lookup_sfield (tree var
, omp_context
*ctx
)
395 return lookup_sfield ((splay_tree_key
) var
, ctx
);
399 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
402 n
= splay_tree_lookup (ctx
->field_map
, key
);
403 return n
? (tree
) n
->value
: NULL_TREE
;
407 maybe_lookup_field (tree var
, omp_context
*ctx
)
409 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
416 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
419 || TYPE_ATOMIC (TREE_TYPE (decl
)))
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
442 /* Do not use copy-in/copy-out for variables that have their
444 if (is_global_var (decl
))
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl
))
454 if (!global_nonaddressable_vars
)
455 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
456 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars
,
463 else if (TREE_ADDRESSABLE (decl
))
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
468 if (TREE_READONLY (decl
)
469 || ((TREE_CODE (decl
) == RESULT_DECL
470 || TREE_CODE (decl
) == PARM_DECL
)
471 && DECL_BY_REFERENCE (decl
)))
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx
->is_nested
)
483 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
484 if ((is_taskreg_ctx (up
)
485 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up
->stmt
)))
487 && maybe_lookup_decl (decl
, up
))
494 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
496 for (c
= gimple_omp_target_clauses (up
->stmt
);
497 c
; c
= OMP_CLAUSE_CHAIN (c
))
498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c
) == decl
)
503 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
504 c
; c
= OMP_CLAUSE_CHAIN (c
))
505 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c
) == decl
)
510 goto maybe_mark_addressable_and_ret
;
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx
))
520 maybe_mark_addressable_and_ret
:
521 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
522 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
527 if (!task_shared_vars
)
528 task_shared_vars
= BITMAP_ALLOC (NULL
);
529 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
530 TREE_ADDRESSABLE (outer
) = 1;
539 /* Construct a new automatic decl similar to VAR. */
542 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
544 tree copy
= copy_var_decl (var
, name
, type
);
546 DECL_CONTEXT (copy
) = current_function_decl
;
547 DECL_CHAIN (copy
) = ctx
->block_vars
;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
552 if (TREE_ADDRESSABLE (var
)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
557 TREE_ADDRESSABLE (copy
) = 0;
558 ctx
->block_vars
= copy
;
564 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
566 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
572 omp_build_component_ref (tree obj
, tree field
)
574 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
575 if (TREE_THIS_VOLATILE (field
))
576 TREE_THIS_VOLATILE (ret
) |= 1;
577 if (TREE_READONLY (field
))
578 TREE_READONLY (ret
) |= 1;
582 /* Build tree nodes to access the field for VAR on the receiver side. */
585 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
587 tree x
, field
= lookup_field (var
, ctx
);
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x
= maybe_lookup_field (field
, ctx
);
595 x
= build_simple_mem_ref (ctx
->receiver_decl
);
596 TREE_THIS_NOTRAP (x
) = 1;
597 x
= omp_build_component_ref (x
, field
);
600 x
= build_simple_mem_ref (x
);
601 TREE_THIS_NOTRAP (x
) = 1;
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
612 build_outer_var_ref (tree var
, omp_context
*ctx
,
613 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
616 omp_context
*outer
= ctx
->outer
;
617 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
618 outer
= outer
->outer
;
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
622 else if (is_variable_sized (var
))
624 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
625 x
= build_outer_var_ref (x
, ctx
, code
);
626 x
= build_simple_mem_ref (x
);
628 else if (is_taskreg_ctx (ctx
))
630 bool by_ref
= use_pointer_for_field (var
, NULL
);
631 x
= build_receiver_ref (var
, by_ref
, ctx
);
633 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
636 || (code
== OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
638 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
646 if (outer
&& is_taskreg_ctx (outer
))
647 x
= lookup_decl (var
, outer
);
649 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
653 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
657 = splay_tree_lookup (outer
->field_map
,
658 (splay_tree_key
) &DECL_UID (var
));
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
664 x
= lookup_decl (var
, outer
);
668 tree field
= (tree
) n
->value
;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x
= maybe_lookup_field (field
, outer
);
675 x
= build_simple_mem_ref (outer
->receiver_decl
);
676 x
= omp_build_component_ref (x
, field
);
677 if (use_pointer_for_field (var
, outer
))
678 x
= build_simple_mem_ref (x
);
682 x
= lookup_decl (var
, outer
);
683 else if (omp_is_reference (var
))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
687 else if (omp_member_access_dummy_var (var
))
694 tree t
= omp_member_access_dummy_var (var
);
697 x
= DECL_VALUE_EXPR (var
);
698 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
700 x
= unshare_and_remap (x
, t
, o
);
702 x
= unshare_expr (x
);
706 if (omp_is_reference (var
))
707 x
= build_simple_mem_ref (x
);
712 /* Build tree nodes to access the field for VAR on the sender side. */
715 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
717 tree field
= lookup_sfield (key
, ctx
);
718 return omp_build_component_ref (ctx
->sender_decl
, field
);
722 build_sender_ref (tree var
, omp_context
*ctx
)
724 return build_sender_ref ((splay_tree_key
) var
, ctx
);
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
731 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
733 tree field
, type
, sfield
= NULL_TREE
;
734 splay_tree_key key
= (splay_tree_key
) var
;
736 if ((mask
& 16) != 0)
738 key
= (splay_tree_key
) &DECL_NAME (var
);
739 gcc_checking_assert (key
!= (splay_tree_key
) var
);
743 key
= (splay_tree_key
) &DECL_UID (var
);
744 gcc_checking_assert (key
!= (splay_tree_key
) var
);
746 gcc_assert ((mask
& 1) == 0
747 || !splay_tree_lookup (ctx
->field_map
, key
));
748 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
749 || !splay_tree_lookup (ctx
->sfield_map
, key
));
750 gcc_assert ((mask
& 3) == 3
751 || !is_gimple_omp_oacc (ctx
->stmt
));
753 type
= TREE_TYPE (var
);
754 if ((mask
& 16) != 0)
755 type
= lang_hooks
.decls
.omp_array_data (var
, true);
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type
)
761 && TYPE_RESTRICT (type
))
762 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
766 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
767 type
= build_pointer_type (build_pointer_type (type
));
770 type
= build_pointer_type (type
);
771 else if ((mask
& 3) == 1 && omp_is_reference (var
))
772 type
= TREE_TYPE (type
);
774 field
= build_decl (DECL_SOURCE_LOCATION (var
),
775 FIELD_DECL
, DECL_NAME (var
), type
);
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field
) = var
;
781 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
783 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
784 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
785 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
788 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
792 insert_field_into_struct (ctx
->record_type
, field
);
793 if (ctx
->srecord_type
)
795 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
796 FIELD_DECL
, DECL_NAME (var
), type
);
797 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
798 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
799 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
800 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
801 insert_field_into_struct (ctx
->srecord_type
, sfield
);
806 if (ctx
->srecord_type
== NULL_TREE
)
810 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
811 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
812 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
814 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
815 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
816 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
817 insert_field_into_struct (ctx
->srecord_type
, sfield
);
818 splay_tree_insert (ctx
->sfield_map
,
819 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
820 (splay_tree_value
) sfield
);
824 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
825 : ctx
->srecord_type
, field
);
829 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
830 if ((mask
& 2) && ctx
->sfield_map
)
831 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
835 install_var_local (tree var
, omp_context
*ctx
)
837 tree new_var
= omp_copy_decl_1 (var
, ctx
);
838 insert_decl_map (&ctx
->cb
, var
, new_var
);
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
846 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
850 new_decl
= lookup_decl (decl
, ctx
);
852 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
855 && DECL_HAS_VALUE_EXPR_P (decl
))
857 tree ve
= DECL_VALUE_EXPR (decl
);
858 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
859 SET_DECL_VALUE_EXPR (new_decl
, ve
);
860 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
865 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
866 if (size
== error_mark_node
)
867 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
868 DECL_SIZE (new_decl
) = size
;
870 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
871 if (size
== error_mark_node
)
872 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
873 DECL_SIZE_UNIT (new_decl
) = size
;
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
883 omp_copy_decl (tree var
, copy_body_data
*cb
)
885 omp_context
*ctx
= (omp_context
*) cb
;
888 if (TREE_CODE (var
) == LABEL_DECL
)
890 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
892 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
893 DECL_CONTEXT (new_var
) = current_function_decl
;
894 insert_decl_map (&ctx
->cb
, var
, new_var
);
898 while (!is_taskreg_ctx (ctx
))
903 new_var
= maybe_lookup_decl (var
, ctx
);
908 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
911 return error_mark_node
;
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
917 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
919 omp_context
*ctx
= XCNEW (omp_context
);
921 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
922 (splay_tree_value
) ctx
);
927 ctx
->outer
= outer_ctx
;
928 ctx
->cb
= outer_ctx
->cb
;
929 ctx
->cb
.block
= NULL
;
930 ctx
->depth
= outer_ctx
->depth
+ 1;
934 ctx
->cb
.src_fn
= current_function_decl
;
935 ctx
->cb
.dst_fn
= current_function_decl
;
936 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
937 gcc_checking_assert (ctx
->cb
.src_node
);
938 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
939 ctx
->cb
.src_cfun
= cfun
;
940 ctx
->cb
.copy_decl
= omp_copy_decl
;
941 ctx
->cb
.eh_lp_nr
= 0;
942 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
943 ctx
->cb
.adjust_array_error_bounds
= true;
944 ctx
->cb
.dont_remap_vla_if_no_change
= true;
948 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
953 static gimple_seq
maybe_catch_exception (gimple_seq
);
955 /* Finalize task copyfn. */
958 finalize_task_copyfn (gomp_task
*task_stmt
)
960 struct function
*child_cfun
;
962 gimple_seq seq
= NULL
, new_seq
;
965 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
966 if (child_fn
== NULL_TREE
)
969 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
970 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
972 push_cfun (child_cfun
);
973 bind
= gimplify_body (child_fn
, false);
974 gimple_seq_add_stmt (&seq
, bind
);
975 new_seq
= maybe_catch_exception (seq
);
978 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
980 gimple_seq_add_stmt (&seq
, bind
);
982 gimple_set_body (child_fn
, seq
);
985 /* Inform the callgraph about the new function. */
986 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
987 node
->parallelized_function
= 1;
988 cgraph_node::add_new_function (child_fn
, false);
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
995 delete_omp_context (splay_tree_value value
)
997 omp_context
*ctx
= (omp_context
*) value
;
999 delete ctx
->cb
.decl_map
;
1002 splay_tree_delete (ctx
->field_map
);
1003 if (ctx
->sfield_map
)
1004 splay_tree_delete (ctx
->sfield_map
);
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx
->record_type
)
1011 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1012 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1014 if (ctx
->srecord_type
)
1017 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1018 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1021 if (is_task_ctx (ctx
))
1022 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1024 if (ctx
->task_reduction_map
)
1026 ctx
->task_reductions
.release ();
1027 delete ctx
->task_reduction_map
;
1030 delete ctx
->lastprivate_conditional_map
;
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1039 fixup_child_record_type (omp_context
*ctx
)
1041 tree f
, type
= ctx
->record_type
;
1043 if (!ctx
->receiver_decl
)
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1050 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1054 tree name
, new_fields
= NULL
;
1056 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1057 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1058 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1059 TYPE_DECL
, name
, type
);
1060 TYPE_NAME (type
) = name
;
1062 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1064 tree new_f
= copy_node (f
);
1065 DECL_CONTEXT (new_f
) = type
;
1066 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1067 DECL_CHAIN (new_f
) = new_fields
;
1068 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1069 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1071 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1078 (splay_tree_value
) new_f
);
1080 TYPE_FIELDS (type
) = nreverse (new_fields
);
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx
->stmt
))
1087 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1089 TREE_TYPE (ctx
->receiver_decl
)
1090 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1097 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1100 bool scan_array_reductions
= false;
1102 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1106 switch (OMP_CLAUSE_CODE (c
))
1108 case OMP_CLAUSE_PRIVATE
:
1109 decl
= OMP_CLAUSE_DECL (c
);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1112 else if (!is_variable_sized (decl
))
1113 install_var_local (decl
, ctx
);
1116 case OMP_CLAUSE_SHARED
:
1117 decl
= OMP_CLAUSE_DECL (c
);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx
))
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1126 if (is_global_var (odecl
))
1128 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1131 gcc_assert (is_taskreg_ctx (ctx
));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1133 || !is_variable_sized (decl
));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1140 use_pointer_for_field (decl
, ctx
);
1143 by_ref
= use_pointer_for_field (decl
, NULL
);
1144 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1145 || TREE_ADDRESSABLE (decl
)
1147 || omp_is_reference (decl
))
1149 by_ref
= use_pointer_for_field (decl
, ctx
);
1150 install_var_field (decl
, by_ref
, 3, ctx
);
1151 install_var_local (decl
, ctx
);
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1158 case OMP_CLAUSE_REDUCTION
:
1159 if (is_oacc_parallel_or_serial (ctx
) || is_oacc_kernels (ctx
))
1160 ctx
->local_reduction_clauses
1161 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1164 case OMP_CLAUSE_IN_REDUCTION
:
1165 decl
= OMP_CLAUSE_DECL (c
);
1166 if (TREE_CODE (decl
) == MEM_REF
)
1168 tree t
= TREE_OPERAND (decl
, 0);
1169 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1170 t
= TREE_OPERAND (t
, 0);
1171 if (TREE_CODE (t
) == INDIRECT_REF
1172 || TREE_CODE (t
) == ADDR_EXPR
)
1173 t
= TREE_OPERAND (t
, 0);
1174 install_var_local (t
, ctx
);
1175 if (is_taskreg_ctx (ctx
)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1177 || (is_task_ctx (ctx
)
1178 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1181 == POINTER_TYPE
)))))
1182 && !is_variable_sized (t
)
1183 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1185 && !is_task_ctx (ctx
))))
1187 by_ref
= use_pointer_for_field (t
, NULL
);
1188 if (is_task_ctx (ctx
)
1189 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1192 install_var_field (t
, false, 1, ctx
);
1193 install_var_field (t
, by_ref
, 2, ctx
);
1196 install_var_field (t
, by_ref
, 3, ctx
);
1200 if (is_task_ctx (ctx
)
1201 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c
)
1203 && is_parallel_ctx (ctx
)))
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1209 by_ref
= use_pointer_for_field (decl
, ctx
);
1210 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1211 install_var_field (decl
, by_ref
, 3, ctx
);
1213 install_var_local (decl
, ctx
);
1216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c
))
1219 install_var_local (decl
, ctx
);
1224 case OMP_CLAUSE_LASTPRIVATE
:
1225 /* Let the corresponding firstprivate clause create
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1231 case OMP_CLAUSE_FIRSTPRIVATE
:
1232 case OMP_CLAUSE_LINEAR
:
1233 decl
= OMP_CLAUSE_DECL (c
);
1235 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1237 && is_gimple_omp_offloaded (ctx
->stmt
))
1239 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1240 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1241 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1242 install_var_field (decl
, true, 3, ctx
);
1244 install_var_field (decl
, false, 3, ctx
);
1246 if (is_variable_sized (decl
))
1248 if (is_task_ctx (ctx
))
1249 install_var_field (decl
, false, 1, ctx
);
1252 else if (is_taskreg_ctx (ctx
))
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1256 by_ref
= use_pointer_for_field (decl
, NULL
);
1258 if (is_task_ctx (ctx
)
1259 && (global
|| by_ref
|| omp_is_reference (decl
)))
1261 install_var_field (decl
, false, 1, ctx
);
1263 install_var_field (decl
, by_ref
, 2, ctx
);
1266 install_var_field (decl
, by_ref
, 3, ctx
);
1268 install_var_local (decl
, ctx
);
1271 case OMP_CLAUSE_USE_DEVICE_PTR
:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1273 decl
= OMP_CLAUSE_DECL (c
);
1275 /* Fortran array descriptors. */
1276 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1277 install_var_field (decl
, false, 19, ctx
);
1278 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl
)
1280 && !omp_is_allocatable_or_ptr (decl
))
1281 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1282 install_var_field (decl
, true, 11, ctx
);
1284 install_var_field (decl
, false, 11, ctx
);
1285 if (DECL_SIZE (decl
)
1286 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1288 tree decl2
= DECL_VALUE_EXPR (decl
);
1289 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1290 decl2
= TREE_OPERAND (decl2
, 0);
1291 gcc_assert (DECL_P (decl2
));
1292 install_var_local (decl2
, ctx
);
1294 install_var_local (decl
, ctx
);
1297 case OMP_CLAUSE_IS_DEVICE_PTR
:
1298 decl
= OMP_CLAUSE_DECL (c
);
1301 case OMP_CLAUSE__LOOPTEMP_
:
1302 case OMP_CLAUSE__REDUCTEMP_
:
1303 gcc_assert (is_taskreg_ctx (ctx
));
1304 decl
= OMP_CLAUSE_DECL (c
);
1305 install_var_field (decl
, false, 3, ctx
);
1306 install_var_local (decl
, ctx
);
1309 case OMP_CLAUSE_COPYPRIVATE
:
1310 case OMP_CLAUSE_COPYIN
:
1311 decl
= OMP_CLAUSE_DECL (c
);
1312 by_ref
= use_pointer_for_field (decl
, NULL
);
1313 install_var_field (decl
, by_ref
, 3, ctx
);
1316 case OMP_CLAUSE_FINAL
:
1318 case OMP_CLAUSE_NUM_THREADS
:
1319 case OMP_CLAUSE_NUM_TEAMS
:
1320 case OMP_CLAUSE_THREAD_LIMIT
:
1321 case OMP_CLAUSE_DEVICE
:
1322 case OMP_CLAUSE_SCHEDULE
:
1323 case OMP_CLAUSE_DIST_SCHEDULE
:
1324 case OMP_CLAUSE_DEPEND
:
1325 case OMP_CLAUSE_PRIORITY
:
1326 case OMP_CLAUSE_GRAINSIZE
:
1327 case OMP_CLAUSE_NUM_TASKS
:
1328 case OMP_CLAUSE_NUM_GANGS
:
1329 case OMP_CLAUSE_NUM_WORKERS
:
1330 case OMP_CLAUSE_VECTOR_LENGTH
:
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1336 case OMP_CLAUSE_FROM
:
1337 case OMP_CLAUSE_MAP
:
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1340 decl
= OMP_CLAUSE_DECL (c
);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1347 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c
)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1350 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1351 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1354 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1355 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1356 && varpool_node::get_create (decl
)->offloadable
1357 && !lookup_attribute ("omp declare target link",
1358 DECL_ATTRIBUTES (decl
)))
1360 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1361 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1363 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1364 not offloaded; there is nothing to map for those. */
1365 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1366 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1367 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1370 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1371 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1372 || (OMP_CLAUSE_MAP_KIND (c
)
1373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1375 if (TREE_CODE (decl
) == COMPONENT_REF
1376 || (TREE_CODE (decl
) == INDIRECT_REF
1377 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1379 == REFERENCE_TYPE
)))
1381 if (DECL_SIZE (decl
)
1382 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1384 tree decl2
= DECL_VALUE_EXPR (decl
);
1385 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1386 decl2
= TREE_OPERAND (decl2
, 0);
1387 gcc_assert (DECL_P (decl2
));
1388 install_var_local (decl2
, ctx
);
1390 install_var_local (decl
, ctx
);
1395 if (DECL_SIZE (decl
)
1396 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1398 tree decl2
= DECL_VALUE_EXPR (decl
);
1399 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1400 decl2
= TREE_OPERAND (decl2
, 0);
1401 gcc_assert (DECL_P (decl2
));
1402 install_var_field (decl2
, true, 3, ctx
);
1403 install_var_local (decl2
, ctx
);
1404 install_var_local (decl
, ctx
);
1408 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1409 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1410 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1411 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1412 install_var_field (decl
, true, 7, ctx
);
1414 install_var_field (decl
, true, 3, ctx
);
1415 if (is_gimple_omp_offloaded (ctx
->stmt
)
1416 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1417 install_var_local (decl
, ctx
);
1422 tree base
= get_base_address (decl
);
1423 tree nc
= OMP_CLAUSE_CHAIN (c
);
1426 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1427 && OMP_CLAUSE_DECL (nc
) == base
1428 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1429 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1432 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1438 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1439 decl
= OMP_CLAUSE_DECL (c
);
1441 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1442 (splay_tree_key
) decl
));
1444 = build_decl (OMP_CLAUSE_LOCATION (c
),
1445 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1446 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1447 insert_field_into_struct (ctx
->record_type
, field
);
1448 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1449 (splay_tree_value
) field
);
1454 case OMP_CLAUSE_ORDER
:
1455 ctx
->order_concurrent
= true;
1458 case OMP_CLAUSE_BIND
:
1462 case OMP_CLAUSE_NOWAIT
:
1463 case OMP_CLAUSE_ORDERED
:
1464 case OMP_CLAUSE_COLLAPSE
:
1465 case OMP_CLAUSE_UNTIED
:
1466 case OMP_CLAUSE_MERGEABLE
:
1467 case OMP_CLAUSE_PROC_BIND
:
1468 case OMP_CLAUSE_SAFELEN
:
1469 case OMP_CLAUSE_SIMDLEN
:
1470 case OMP_CLAUSE_THREADS
:
1471 case OMP_CLAUSE_SIMD
:
1472 case OMP_CLAUSE_NOGROUP
:
1473 case OMP_CLAUSE_DEFAULTMAP
:
1474 case OMP_CLAUSE_ASYNC
:
1475 case OMP_CLAUSE_WAIT
:
1476 case OMP_CLAUSE_GANG
:
1477 case OMP_CLAUSE_WORKER
:
1478 case OMP_CLAUSE_VECTOR
:
1479 case OMP_CLAUSE_INDEPENDENT
:
1480 case OMP_CLAUSE_AUTO
:
1481 case OMP_CLAUSE_SEQ
:
1482 case OMP_CLAUSE_TILE
:
1483 case OMP_CLAUSE__SIMT_
:
1484 case OMP_CLAUSE_DEFAULT
:
1485 case OMP_CLAUSE_NONTEMPORAL
:
1486 case OMP_CLAUSE_IF_PRESENT
:
1487 case OMP_CLAUSE_FINALIZE
:
1488 case OMP_CLAUSE_TASK_REDUCTION
:
1489 case OMP_CLAUSE_ALLOCATE
:
1492 case OMP_CLAUSE_ALIGNED
:
1493 decl
= OMP_CLAUSE_DECL (c
);
1494 if (is_global_var (decl
)
1495 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1496 install_var_local (decl
, ctx
);
1499 case OMP_CLAUSE__CONDTEMP_
:
1500 decl
= OMP_CLAUSE_DECL (c
);
1501 if (is_parallel_ctx (ctx
))
1503 install_var_field (decl
, false, 3, ctx
);
1504 install_var_local (decl
, ctx
);
1506 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1507 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1508 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1509 install_var_local (decl
, ctx
);
1512 case OMP_CLAUSE__CACHE_
:
1518 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1520 switch (OMP_CLAUSE_CODE (c
))
1522 case OMP_CLAUSE_LASTPRIVATE
:
1523 /* Let the corresponding firstprivate clause create
1525 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1526 scan_array_reductions
= true;
1527 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1531 case OMP_CLAUSE_FIRSTPRIVATE
:
1532 case OMP_CLAUSE_PRIVATE
:
1533 case OMP_CLAUSE_LINEAR
:
1534 case OMP_CLAUSE_IS_DEVICE_PTR
:
1535 decl
= OMP_CLAUSE_DECL (c
);
1536 if (is_variable_sized (decl
))
1538 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1539 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1540 && is_gimple_omp_offloaded (ctx
->stmt
))
1542 tree decl2
= DECL_VALUE_EXPR (decl
);
1543 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1544 decl2
= TREE_OPERAND (decl2
, 0);
1545 gcc_assert (DECL_P (decl2
));
1546 install_var_local (decl2
, ctx
);
1547 fixup_remapped_decl (decl2
, ctx
, false);
1549 install_var_local (decl
, ctx
);
1551 fixup_remapped_decl (decl
, ctx
,
1552 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1553 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1554 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1555 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1556 scan_array_reductions
= true;
1559 case OMP_CLAUSE_REDUCTION
:
1560 case OMP_CLAUSE_IN_REDUCTION
:
1561 decl
= OMP_CLAUSE_DECL (c
);
1562 if (TREE_CODE (decl
) != MEM_REF
)
1564 if (is_variable_sized (decl
))
1565 install_var_local (decl
, ctx
);
1566 fixup_remapped_decl (decl
, ctx
, false);
1568 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1569 scan_array_reductions
= true;
1572 case OMP_CLAUSE_TASK_REDUCTION
:
1573 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1574 scan_array_reductions
= true;
1577 case OMP_CLAUSE_SHARED
:
1578 /* Ignore shared directives in teams construct inside of
1579 target construct. */
1580 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1581 && !is_host_teams_ctx (ctx
))
1583 decl
= OMP_CLAUSE_DECL (c
);
1584 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1586 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1588 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1591 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1592 install_var_field (decl
, by_ref
, 11, ctx
);
1595 fixup_remapped_decl (decl
, ctx
, false);
1598 case OMP_CLAUSE_MAP
:
1599 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1601 decl
= OMP_CLAUSE_DECL (c
);
1603 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1604 && (OMP_CLAUSE_MAP_KIND (c
)
1605 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1606 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1607 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1608 && varpool_node::get_create (decl
)->offloadable
)
1612 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1613 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1614 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1615 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1617 tree new_decl
= lookup_decl (decl
, ctx
);
1618 TREE_TYPE (new_decl
)
1619 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1621 else if (DECL_SIZE (decl
)
1622 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1624 tree decl2
= DECL_VALUE_EXPR (decl
);
1625 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1626 decl2
= TREE_OPERAND (decl2
, 0);
1627 gcc_assert (DECL_P (decl2
));
1628 fixup_remapped_decl (decl2
, ctx
, false);
1629 fixup_remapped_decl (decl
, ctx
, true);
1632 fixup_remapped_decl (decl
, ctx
, false);
1636 case OMP_CLAUSE_COPYPRIVATE
:
1637 case OMP_CLAUSE_COPYIN
:
1638 case OMP_CLAUSE_DEFAULT
:
1640 case OMP_CLAUSE_NUM_THREADS
:
1641 case OMP_CLAUSE_NUM_TEAMS
:
1642 case OMP_CLAUSE_THREAD_LIMIT
:
1643 case OMP_CLAUSE_DEVICE
:
1644 case OMP_CLAUSE_SCHEDULE
:
1645 case OMP_CLAUSE_DIST_SCHEDULE
:
1646 case OMP_CLAUSE_NOWAIT
:
1647 case OMP_CLAUSE_ORDERED
:
1648 case OMP_CLAUSE_COLLAPSE
:
1649 case OMP_CLAUSE_UNTIED
:
1650 case OMP_CLAUSE_FINAL
:
1651 case OMP_CLAUSE_MERGEABLE
:
1652 case OMP_CLAUSE_PROC_BIND
:
1653 case OMP_CLAUSE_SAFELEN
:
1654 case OMP_CLAUSE_SIMDLEN
:
1655 case OMP_CLAUSE_ALIGNED
:
1656 case OMP_CLAUSE_DEPEND
:
1657 case OMP_CLAUSE_ALLOCATE
:
1658 case OMP_CLAUSE__LOOPTEMP_
:
1659 case OMP_CLAUSE__REDUCTEMP_
:
1661 case OMP_CLAUSE_FROM
:
1662 case OMP_CLAUSE_PRIORITY
:
1663 case OMP_CLAUSE_GRAINSIZE
:
1664 case OMP_CLAUSE_NUM_TASKS
:
1665 case OMP_CLAUSE_THREADS
:
1666 case OMP_CLAUSE_SIMD
:
1667 case OMP_CLAUSE_NOGROUP
:
1668 case OMP_CLAUSE_DEFAULTMAP
:
1669 case OMP_CLAUSE_ORDER
:
1670 case OMP_CLAUSE_BIND
:
1671 case OMP_CLAUSE_USE_DEVICE_PTR
:
1672 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1673 case OMP_CLAUSE_NONTEMPORAL
:
1674 case OMP_CLAUSE_ASYNC
:
1675 case OMP_CLAUSE_WAIT
:
1676 case OMP_CLAUSE_NUM_GANGS
:
1677 case OMP_CLAUSE_NUM_WORKERS
:
1678 case OMP_CLAUSE_VECTOR_LENGTH
:
1679 case OMP_CLAUSE_GANG
:
1680 case OMP_CLAUSE_WORKER
:
1681 case OMP_CLAUSE_VECTOR
:
1682 case OMP_CLAUSE_INDEPENDENT
:
1683 case OMP_CLAUSE_AUTO
:
1684 case OMP_CLAUSE_SEQ
:
1685 case OMP_CLAUSE_TILE
:
1686 case OMP_CLAUSE__SIMT_
:
1687 case OMP_CLAUSE_IF_PRESENT
:
1688 case OMP_CLAUSE_FINALIZE
:
1689 case OMP_CLAUSE__CONDTEMP_
:
1692 case OMP_CLAUSE__CACHE_
:
1698 gcc_checking_assert (!scan_array_reductions
1699 || !is_gimple_omp_oacc (ctx
->stmt
));
1700 if (scan_array_reductions
)
1702 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1703 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1704 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1705 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1706 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1708 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1709 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1711 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1712 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1713 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1714 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1715 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1716 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1720 /* Create a new name for omp child function. Returns an identifier. */
1723 create_omp_child_function_name (bool task_copy
)
1725 return clone_function_name_numbered (current_function_decl
,
1726 task_copy
? "_omp_cpyfn" : "_omp_fn");
1729 /* Return true if CTX may belong to offloaded code: either if current function
1730 is offloaded, or any enclosing context corresponds to a target region. */
1733 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1735 if (cgraph_node::get (current_function_decl
)->offloadable
)
1737 for (; ctx
; ctx
= ctx
->outer
)
1738 if (is_gimple_omp_offloaded (ctx
->stmt
))
1743 /* Build a decl for the omp child function. It'll not contain a body
1744 yet, just the bare decl. */
1747 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1749 tree decl
, type
, name
, t
;
1751 name
= create_omp_child_function_name (task_copy
);
1753 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1754 ptr_type_node
, NULL_TREE
);
1756 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1758 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1760 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1763 ctx
->cb
.dst_fn
= decl
;
1765 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1767 TREE_STATIC (decl
) = 1;
1768 TREE_USED (decl
) = 1;
1769 DECL_ARTIFICIAL (decl
) = 1;
1770 DECL_IGNORED_P (decl
) = 0;
1771 TREE_PUBLIC (decl
) = 0;
1772 DECL_UNINLINABLE (decl
) = 1;
1773 DECL_EXTERNAL (decl
) = 0;
1774 DECL_CONTEXT (decl
) = NULL_TREE
;
1775 DECL_INITIAL (decl
) = make_node (BLOCK
);
1776 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1777 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1778 /* Remove omp declare simd attribute from the new attributes. */
1779 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1781 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1784 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1785 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1786 *p
= TREE_CHAIN (*p
);
1789 tree chain
= TREE_CHAIN (*p
);
1790 *p
= copy_node (*p
);
1791 p
= &TREE_CHAIN (*p
);
1795 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1796 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1797 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1798 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1799 DECL_FUNCTION_VERSIONED (decl
)
1800 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1802 if (omp_maybe_offloaded_ctx (ctx
))
1804 cgraph_node::get_create (decl
)->offloadable
= 1;
1805 if (ENABLE_OFFLOADING
)
1806 g
->have_offload
= true;
1809 if (cgraph_node::get_create (decl
)->offloadable
1810 && !lookup_attribute ("omp declare target",
1811 DECL_ATTRIBUTES (current_function_decl
)))
1813 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1814 ? "omp target entrypoint"
1815 : "omp declare target");
1816 DECL_ATTRIBUTES (decl
)
1817 = tree_cons (get_identifier (target_attr
),
1818 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1821 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1822 RESULT_DECL
, NULL_TREE
, void_type_node
);
1823 DECL_ARTIFICIAL (t
) = 1;
1824 DECL_IGNORED_P (t
) = 1;
1825 DECL_CONTEXT (t
) = decl
;
1826 DECL_RESULT (decl
) = t
;
1828 tree data_name
= get_identifier (".omp_data_i");
1829 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1831 DECL_ARTIFICIAL (t
) = 1;
1832 DECL_NAMELESS (t
) = 1;
1833 DECL_ARG_TYPE (t
) = ptr_type_node
;
1834 DECL_CONTEXT (t
) = current_function_decl
;
1836 TREE_READONLY (t
) = 1;
1837 DECL_ARGUMENTS (decl
) = t
;
1839 ctx
->receiver_decl
= t
;
1842 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1843 PARM_DECL
, get_identifier (".omp_data_o"),
1845 DECL_ARTIFICIAL (t
) = 1;
1846 DECL_NAMELESS (t
) = 1;
1847 DECL_ARG_TYPE (t
) = ptr_type_node
;
1848 DECL_CONTEXT (t
) = current_function_decl
;
1850 TREE_ADDRESSABLE (t
) = 1;
1851 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1852 DECL_ARGUMENTS (decl
) = t
;
1855 /* Allocate memory for the function structure. The call to
1856 allocate_struct_function clobbers CFUN, so we need to restore
1858 push_struct_function (decl
);
1859 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1860 init_tree_ssa (cfun
);
1864 /* Callback for walk_gimple_seq. Check if combined parallel
1865 contains gimple_omp_for_combined_into_p OMP_FOR. */
1868 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1869 bool *handled_ops_p
,
1870 struct walk_stmt_info
*wi
)
1872 gimple
*stmt
= gsi_stmt (*gsi_p
);
1874 *handled_ops_p
= true;
1875 switch (gimple_code (stmt
))
1879 case GIMPLE_OMP_FOR
:
1880 if (gimple_omp_for_combined_into_p (stmt
)
1881 && gimple_omp_for_kind (stmt
)
1882 == *(const enum gf_mask
*) (wi
->info
))
1885 return integer_zero_node
;
1894 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1897 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1898 omp_context
*outer_ctx
)
1900 struct walk_stmt_info wi
;
1902 memset (&wi
, 0, sizeof (wi
));
1904 wi
.info
= (void *) &msk
;
1905 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1906 if (wi
.info
!= (void *) &msk
)
1908 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1909 struct omp_for_data fd
;
1910 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1911 /* We need two temporaries with fd.loop.v type (istart/iend)
1912 and then (fd.collapse - 1) temporaries with the same
1913 type for count2 ... countN-1 vars if not constant. */
1914 size_t count
= 2, i
;
1915 tree type
= fd
.iter_type
;
1917 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1919 count
+= fd
.collapse
- 1;
1920 /* If there are lastprivate clauses on the inner
1921 GIMPLE_OMP_FOR, add one more temporaries for the total number
1922 of iterations (product of count1 ... countN-1). */
1923 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1924 OMP_CLAUSE_LASTPRIVATE
)
1925 || (msk
== GF_OMP_FOR_KIND_FOR
1926 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1927 OMP_CLAUSE_LASTPRIVATE
)))
1929 tree temp
= create_tmp_var (type
);
1930 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
1931 OMP_CLAUSE__LOOPTEMP_
);
1932 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1933 OMP_CLAUSE_DECL (c
) = temp
;
1934 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1935 gimple_omp_taskreg_set_clauses (stmt
, c
);
1938 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
1939 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
1940 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
1942 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
1943 tree type2
= TREE_TYPE (v
);
1945 for (i
= 0; i
< 3; i
++)
1947 tree temp
= create_tmp_var (type2
);
1948 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
1949 OMP_CLAUSE__LOOPTEMP_
);
1950 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1951 OMP_CLAUSE_DECL (c
) = temp
;
1952 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1953 gimple_omp_taskreg_set_clauses (stmt
, c
);
1957 for (i
= 0; i
< count
; i
++)
1959 tree temp
= create_tmp_var (type
);
1960 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1961 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1962 OMP_CLAUSE_DECL (c
) = temp
;
1963 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1964 gimple_omp_taskreg_set_clauses (stmt
, c
);
1967 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1968 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1969 OMP_CLAUSE_REDUCTION
))
1971 tree type
= build_pointer_type (pointer_sized_int_node
);
1972 tree temp
= create_tmp_var (type
);
1973 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1974 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1975 OMP_CLAUSE_DECL (c
) = temp
;
1976 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1977 gimple_omp_task_set_clauses (stmt
, c
);
1981 /* Scan an OpenMP parallel directive. */
1984 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1988 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1990 /* Ignore parallel directives with empty bodies, unless there
1991 are copyin clauses. */
1993 && empty_body_p (gimple_omp_body (stmt
))
1994 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1995 OMP_CLAUSE_COPYIN
) == NULL
)
1997 gsi_replace (gsi
, gimple_build_nop (), false);
2001 if (gimple_omp_parallel_combined_p (stmt
))
2002 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2003 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2004 OMP_CLAUSE_REDUCTION
);
2005 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2006 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2008 tree type
= build_pointer_type (pointer_sized_int_node
);
2009 tree temp
= create_tmp_var (type
);
2010 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2012 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2013 OMP_CLAUSE_DECL (c
) = temp
;
2014 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2015 gimple_omp_parallel_set_clauses (stmt
, c
);
2018 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2021 ctx
= new_omp_context (stmt
, outer_ctx
);
2022 taskreg_contexts
.safe_push (ctx
);
2023 if (taskreg_nesting_level
> 1)
2024 ctx
->is_nested
= true;
2025 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2026 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2027 name
= create_tmp_var_name (".omp_data_s");
2028 name
= build_decl (gimple_location (stmt
),
2029 TYPE_DECL
, name
, ctx
->record_type
);
2030 DECL_ARTIFICIAL (name
) = 1;
2031 DECL_NAMELESS (name
) = 1;
2032 TYPE_NAME (ctx
->record_type
) = name
;
2033 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2034 create_omp_child_function (ctx
, false);
2035 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2037 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2038 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2040 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2041 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2044 /* Scan an OpenMP task directive. */
2047 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2051 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2053 /* Ignore task directives with empty bodies, unless they have depend
2056 && gimple_omp_body (stmt
)
2057 && empty_body_p (gimple_omp_body (stmt
))
2058 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2060 gsi_replace (gsi
, gimple_build_nop (), false);
2064 if (gimple_omp_task_taskloop_p (stmt
))
2065 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2067 ctx
= new_omp_context (stmt
, outer_ctx
);
2069 if (gimple_omp_task_taskwait_p (stmt
))
2071 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2075 taskreg_contexts
.safe_push (ctx
);
2076 if (taskreg_nesting_level
> 1)
2077 ctx
->is_nested
= true;
2078 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2079 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2080 name
= create_tmp_var_name (".omp_data_s");
2081 name
= build_decl (gimple_location (stmt
),
2082 TYPE_DECL
, name
, ctx
->record_type
);
2083 DECL_ARTIFICIAL (name
) = 1;
2084 DECL_NAMELESS (name
) = 1;
2085 TYPE_NAME (ctx
->record_type
) = name
;
2086 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2087 create_omp_child_function (ctx
, false);
2088 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2090 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2092 if (ctx
->srecord_type
)
2094 name
= create_tmp_var_name (".omp_data_a");
2095 name
= build_decl (gimple_location (stmt
),
2096 TYPE_DECL
, name
, ctx
->srecord_type
);
2097 DECL_ARTIFICIAL (name
) = 1;
2098 DECL_NAMELESS (name
) = 1;
2099 TYPE_NAME (ctx
->srecord_type
) = name
;
2100 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2101 create_omp_child_function (ctx
, true);
2104 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2106 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2108 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2109 t
= build_int_cst (long_integer_type_node
, 0);
2110 gimple_omp_task_set_arg_size (stmt
, t
);
2111 t
= build_int_cst (long_integer_type_node
, 1);
2112 gimple_omp_task_set_arg_align (stmt
, t
);
2116 /* Helper function for finish_taskreg_scan, called through walk_tree.
2117 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2118 tree, replace it in the expression. */
2121 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2125 omp_context
*ctx
= (omp_context
*) data
;
2126 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2129 if (DECL_HAS_VALUE_EXPR_P (t
))
2130 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2135 else if (IS_TYPE_OR_DECL_P (*tp
))
2140 /* If any decls have been made addressable during scan_omp,
2141 adjust their fields if needed, and layout record types
2142 of parallel/task constructs. */
2145 finish_taskreg_scan (omp_context
*ctx
)
2147 if (ctx
->record_type
== NULL_TREE
)
2150 /* If any task_shared_vars were needed, verify all
2151 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2152 statements if use_pointer_for_field hasn't changed
2153 because of that. If it did, update field types now. */
2154 if (task_shared_vars
)
2158 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2159 c
; c
= OMP_CLAUSE_CHAIN (c
))
2160 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2161 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2163 tree decl
= OMP_CLAUSE_DECL (c
);
2165 /* Global variables don't need to be copied,
2166 the receiver side will use them directly. */
2167 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2169 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2170 || !use_pointer_for_field (decl
, ctx
))
2172 tree field
= lookup_field (decl
, ctx
);
2173 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2174 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2176 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2177 TREE_THIS_VOLATILE (field
) = 0;
2178 DECL_USER_ALIGN (field
) = 0;
2179 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2180 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2181 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2182 if (ctx
->srecord_type
)
2184 tree sfield
= lookup_sfield (decl
, ctx
);
2185 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2186 TREE_THIS_VOLATILE (sfield
) = 0;
2187 DECL_USER_ALIGN (sfield
) = 0;
2188 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2189 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2190 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2195 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2197 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2198 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2201 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2202 expects to find it at the start of data. */
2203 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2204 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2208 *p
= DECL_CHAIN (*p
);
2212 p
= &DECL_CHAIN (*p
);
2213 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2214 TYPE_FIELDS (ctx
->record_type
) = f
;
2216 layout_type (ctx
->record_type
);
2217 fixup_child_record_type (ctx
);
2219 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2221 layout_type (ctx
->record_type
);
2222 fixup_child_record_type (ctx
);
2226 location_t loc
= gimple_location (ctx
->stmt
);
2227 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2228 /* Move VLA fields to the end. */
2229 p
= &TYPE_FIELDS (ctx
->record_type
);
2231 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2232 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2235 *p
= TREE_CHAIN (*p
);
2236 TREE_CHAIN (*q
) = NULL_TREE
;
2237 q
= &TREE_CHAIN (*q
);
2240 p
= &DECL_CHAIN (*p
);
2242 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2244 /* Move fields corresponding to first and second _looptemp_
2245 clause first. There are filled by GOMP_taskloop
2246 and thus need to be in specific positions. */
2247 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2248 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2249 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2250 OMP_CLAUSE__LOOPTEMP_
);
2251 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2252 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2253 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2254 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2255 p
= &TYPE_FIELDS (ctx
->record_type
);
2257 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2258 *p
= DECL_CHAIN (*p
);
2260 p
= &DECL_CHAIN (*p
);
2261 DECL_CHAIN (f1
) = f2
;
2264 DECL_CHAIN (f2
) = f3
;
2265 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2268 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2269 TYPE_FIELDS (ctx
->record_type
) = f1
;
2270 if (ctx
->srecord_type
)
2272 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2273 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2275 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2276 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2278 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2279 *p
= DECL_CHAIN (*p
);
2281 p
= &DECL_CHAIN (*p
);
2282 DECL_CHAIN (f1
) = f2
;
2283 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2286 DECL_CHAIN (f2
) = f3
;
2287 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2290 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2291 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2294 layout_type (ctx
->record_type
);
2295 fixup_child_record_type (ctx
);
2296 if (ctx
->srecord_type
)
2297 layout_type (ctx
->srecord_type
);
2298 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2299 TYPE_SIZE_UNIT (ctx
->record_type
));
2300 if (TREE_CODE (t
) != INTEGER_CST
)
2302 t
= unshare_expr (t
);
2303 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2305 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2306 t
= build_int_cst (long_integer_type_node
,
2307 TYPE_ALIGN_UNIT (ctx
->record_type
));
2308 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2312 /* Find the enclosing offload context. */
2314 static omp_context
*
2315 enclosing_target_ctx (omp_context
*ctx
)
2317 for (; ctx
; ctx
= ctx
->outer
)
2318 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2324 /* Return true if ctx is part of an oacc kernels region. */
2327 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2329 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2331 gimple
*stmt
= ctx
->stmt
;
2332 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2333 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2340 /* Check the parallelism clauses inside a kernels regions.
2341 Until kernels handling moves to use the same loop indirection
2342 scheme as parallel, we need to do this checking early. */
2345 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2347 bool checking
= true;
2348 unsigned outer_mask
= 0;
2349 unsigned this_mask
= 0;
2350 bool has_seq
= false, has_auto
= false;
2353 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2357 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2359 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2362 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2364 switch (OMP_CLAUSE_CODE (c
))
2366 case OMP_CLAUSE_GANG
:
2367 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2369 case OMP_CLAUSE_WORKER
:
2370 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2372 case OMP_CLAUSE_VECTOR
:
2373 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2375 case OMP_CLAUSE_SEQ
:
2378 case OMP_CLAUSE_AUTO
:
2388 if (has_seq
&& (this_mask
|| has_auto
))
2389 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2390 " OpenACC loop specifiers");
2391 else if (has_auto
&& this_mask
)
2392 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2393 " OpenACC loop specifiers");
2395 if (this_mask
& outer_mask
)
2396 error_at (gimple_location (stmt
), "inner loop uses same"
2397 " OpenACC parallelism as containing loop");
2400 return outer_mask
| this_mask
;
2403 /* Scan a GIMPLE_OMP_FOR. */
2405 static omp_context
*
2406 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2410 tree clauses
= gimple_omp_for_clauses (stmt
);
2412 ctx
= new_omp_context (stmt
, outer_ctx
);
2414 if (is_gimple_omp_oacc (stmt
))
2416 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2418 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
2419 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2421 char const *check
= NULL
;
2423 switch (OMP_CLAUSE_CODE (c
))
2425 case OMP_CLAUSE_GANG
:
2429 case OMP_CLAUSE_WORKER
:
2433 case OMP_CLAUSE_VECTOR
:
2441 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2442 error_at (gimple_location (stmt
),
2443 "argument not permitted on %qs clause in"
2444 " OpenACC %<parallel%> or %<serial%>", check
);
2447 if (tgt
&& is_oacc_kernels (tgt
))
2449 /* Strip out reductions, as they are not handled yet. */
2450 tree
*prev_ptr
= &clauses
;
2452 while (tree probe
= *prev_ptr
)
2454 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2456 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2457 *prev_ptr
= *next_ptr
;
2459 prev_ptr
= next_ptr
;
2462 gimple_omp_for_set_clauses (stmt
, clauses
);
2463 check_oacc_kernel_gwv (stmt
, ctx
);
2466 /* Collect all variables named in reductions on this loop. Ensure
2467 that, if this loop has a reduction on some variable v, and there is
2468 a reduction on v somewhere in an outer context, then there is a
2469 reduction on v on all intervening loops as well. */
2470 tree local_reduction_clauses
= NULL
;
2471 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2473 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2474 local_reduction_clauses
2475 = tree_cons (NULL
, c
, local_reduction_clauses
);
2477 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2478 ctx
->outer_reduction_clauses
2479 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2480 ctx
->outer
->outer_reduction_clauses
);
2481 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2482 tree local_iter
= local_reduction_clauses
;
2483 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2485 tree local_clause
= TREE_VALUE (local_iter
);
2486 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2487 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2488 bool have_outer_reduction
= false;
2489 tree ctx_iter
= outer_reduction_clauses
;
2490 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2492 tree outer_clause
= TREE_VALUE (ctx_iter
);
2493 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2494 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2495 if (outer_var
== local_var
&& outer_op
!= local_op
)
2497 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2498 "conflicting reduction operations for %qE",
2500 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2501 "location of the previous reduction for %qE",
2504 if (outer_var
== local_var
)
2506 have_outer_reduction
= true;
2510 if (have_outer_reduction
)
2512 /* There is a reduction on outer_var both on this loop and on
2513 some enclosing loop. Walk up the context tree until such a
2514 loop with a reduction on outer_var is found, and complain
2515 about all intervening loops that do not have such a
2517 struct omp_context
*curr_loop
= ctx
->outer
;
2519 while (curr_loop
!= NULL
)
2521 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2522 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2524 tree curr_clause
= TREE_VALUE (curr_iter
);
2525 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2526 if (curr_var
== local_var
)
2533 warning_at (gimple_location (curr_loop
->stmt
), 0,
2534 "nested loop in reduction needs "
2535 "reduction clause for %qE",
2539 curr_loop
= curr_loop
->outer
;
2543 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2544 ctx
->outer_reduction_clauses
2545 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2546 ctx
->outer_reduction_clauses
);
2549 scan_sharing_clauses (clauses
, ctx
);
2551 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2552 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2554 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2555 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2556 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2557 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2559 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2563 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2566 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2567 omp_context
*outer_ctx
)
2569 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2570 gsi_replace (gsi
, bind
, false);
2571 gimple_seq seq
= NULL
;
2572 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2573 tree cond
= create_tmp_var_raw (integer_type_node
);
2574 DECL_CONTEXT (cond
) = current_function_decl
;
2575 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2576 gimple_bind_set_vars (bind
, cond
);
2577 gimple_call_set_lhs (g
, cond
);
2578 gimple_seq_add_stmt (&seq
, g
);
2579 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2580 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2581 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2582 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2583 gimple_seq_add_stmt (&seq
, g
);
2584 g
= gimple_build_label (lab1
);
2585 gimple_seq_add_stmt (&seq
, g
);
2586 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2587 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2588 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2589 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2590 gimple_omp_for_set_clauses (new_stmt
, clause
);
2591 gimple_seq_add_stmt (&seq
, new_stmt
);
2592 g
= gimple_build_goto (lab3
);
2593 gimple_seq_add_stmt (&seq
, g
);
2594 g
= gimple_build_label (lab2
);
2595 gimple_seq_add_stmt (&seq
, g
);
2596 gimple_seq_add_stmt (&seq
, stmt
);
2597 g
= gimple_build_label (lab3
);
2598 gimple_seq_add_stmt (&seq
, g
);
2599 gimple_bind_set_body (bind
, seq
);
2601 scan_omp_for (new_stmt
, outer_ctx
);
2602 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2605 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2606 struct walk_stmt_info
*);
2607 static omp_context
*maybe_lookup_ctx (gimple
*);
2609 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2610 for scan phase loop. */
2613 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2614 omp_context
*outer_ctx
)
2616 /* The only change between inclusive and exclusive scan will be
2617 within the first simd loop, so just use inclusive in the
2618 worksharing loop. */
2619 outer_ctx
->scan_inclusive
= true;
2620 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2621 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2623 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2624 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2625 gsi_replace (gsi
, input_stmt
, false);
2626 gimple_seq input_body
= NULL
;
2627 gimple_seq_add_stmt (&input_body
, stmt
);
2628 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2630 gimple_stmt_iterator input1_gsi
= gsi_none ();
2631 struct walk_stmt_info wi
;
2632 memset (&wi
, 0, sizeof (wi
));
2634 wi
.info
= (void *) &input1_gsi
;
2635 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2636 gcc_assert (!gsi_end_p (input1_gsi
));
2638 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2639 gsi_next (&input1_gsi
);
2640 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2641 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2642 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2643 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2644 std::swap (input_stmt1
, scan_stmt1
);
2646 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2647 gimple_omp_set_body (input_stmt1
, NULL
);
2649 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2650 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2652 gimple_omp_set_body (input_stmt1
, input_body1
);
2653 gimple_omp_set_body (scan_stmt1
, NULL
);
2655 gimple_stmt_iterator input2_gsi
= gsi_none ();
2656 memset (&wi
, 0, sizeof (wi
));
2658 wi
.info
= (void *) &input2_gsi
;
2659 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2661 gcc_assert (!gsi_end_p (input2_gsi
));
2663 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2664 gsi_next (&input2_gsi
);
2665 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2666 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2667 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2668 std::swap (input_stmt2
, scan_stmt2
);
2670 gimple_omp_set_body (input_stmt2
, NULL
);
2672 gimple_omp_set_body (input_stmt
, input_body
);
2673 gimple_omp_set_body (scan_stmt
, scan_body
);
2675 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2676 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2678 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2679 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2681 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2684 /* Scan an OpenMP sections directive. */
2687 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2691 ctx
= new_omp_context (stmt
, outer_ctx
);
2692 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2693 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2696 /* Scan an OpenMP single directive. */
2699 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2704 ctx
= new_omp_context (stmt
, outer_ctx
);
2705 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2706 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2707 name
= create_tmp_var_name (".omp_copy_s");
2708 name
= build_decl (gimple_location (stmt
),
2709 TYPE_DECL
, name
, ctx
->record_type
);
2710 TYPE_NAME (ctx
->record_type
) = name
;
2712 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2713 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2715 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2716 ctx
->record_type
= NULL
;
2718 layout_type (ctx
->record_type
);
2721 /* Scan a GIMPLE_OMP_TARGET. */
2724 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2728 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2729 tree clauses
= gimple_omp_target_clauses (stmt
);
2731 ctx
= new_omp_context (stmt
, outer_ctx
);
2732 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2733 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2734 name
= create_tmp_var_name (".omp_data_t");
2735 name
= build_decl (gimple_location (stmt
),
2736 TYPE_DECL
, name
, ctx
->record_type
);
2737 DECL_ARTIFICIAL (name
) = 1;
2738 DECL_NAMELESS (name
) = 1;
2739 TYPE_NAME (ctx
->record_type
) = name
;
2740 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2744 create_omp_child_function (ctx
, false);
2745 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2748 scan_sharing_clauses (clauses
, ctx
);
2749 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2751 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2752 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2755 TYPE_FIELDS (ctx
->record_type
)
2756 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2759 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2760 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2762 field
= DECL_CHAIN (field
))
2763 gcc_assert (DECL_ALIGN (field
) == align
);
2765 layout_type (ctx
->record_type
);
2767 fixup_child_record_type (ctx
);
2771 /* Scan an OpenMP teams directive. */
2774 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2776 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2778 if (!gimple_omp_teams_host (stmt
))
2780 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2781 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2784 taskreg_contexts
.safe_push (ctx
);
2785 gcc_assert (taskreg_nesting_level
== 1);
2786 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2787 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2788 tree name
= create_tmp_var_name (".omp_data_s");
2789 name
= build_decl (gimple_location (stmt
),
2790 TYPE_DECL
, name
, ctx
->record_type
);
2791 DECL_ARTIFICIAL (name
) = 1;
2792 DECL_NAMELESS (name
) = 1;
2793 TYPE_NAME (ctx
->record_type
) = name
;
2794 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2795 create_omp_child_function (ctx
, false);
2796 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2798 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2799 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2801 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2802 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2805 /* Check nesting restrictions. */
2807 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2811 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2812 inside an OpenACC CTX. */
2813 if (!(is_gimple_omp (stmt
)
2814 && is_gimple_omp_oacc (stmt
))
2815 /* Except for atomic codes that we share with OpenMP. */
2816 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2817 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2819 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2821 error_at (gimple_location (stmt
),
2822 "non-OpenACC construct inside of OpenACC routine");
2826 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2827 if (is_gimple_omp (octx
->stmt
)
2828 && is_gimple_omp_oacc (octx
->stmt
))
2830 error_at (gimple_location (stmt
),
2831 "non-OpenACC construct inside of OpenACC region");
2838 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2840 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2842 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2843 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2847 if (ctx
->order_concurrent
2848 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2849 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2850 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2852 error_at (gimple_location (stmt
),
2853 "OpenMP constructs other than %<parallel%>, %<loop%>"
2854 " or %<simd%> may not be nested inside a region with"
2855 " the %<order(concurrent)%> clause");
2858 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2860 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2861 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2863 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2864 && (ctx
->outer
== NULL
2865 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2866 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2867 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2868 != GF_OMP_FOR_KIND_FOR
)
2869 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2871 error_at (gimple_location (stmt
),
2872 "%<ordered simd threads%> must be closely "
2873 "nested inside of %<for simd%> region");
2879 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2880 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2881 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2883 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2884 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2886 error_at (gimple_location (stmt
),
2887 "OpenMP constructs other than "
2888 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2889 "not be nested inside %<simd%> region");
2892 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2894 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2895 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2896 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2897 OMP_CLAUSE_BIND
) == NULL_TREE
))
2898 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2900 error_at (gimple_location (stmt
),
2901 "only %<distribute%>, %<parallel%> or %<loop%> "
2902 "regions are allowed to be strictly nested inside "
2903 "%<teams%> region");
2907 else if (ctx
->order_concurrent
2908 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
2909 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
2910 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
2911 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
2914 error_at (gimple_location (stmt
),
2915 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2916 "%<simd%> may not be nested inside a %<loop%> region");
2918 error_at (gimple_location (stmt
),
2919 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2920 "%<simd%> may not be nested inside a region with "
2921 "the %<order(concurrent)%> clause");
2925 switch (gimple_code (stmt
))
2927 case GIMPLE_OMP_FOR
:
2928 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
2930 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2932 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2934 error_at (gimple_location (stmt
),
2935 "%<distribute%> region must be strictly nested "
2936 "inside %<teams%> construct");
2941 /* We split taskloop into task and nested taskloop in it. */
2942 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2944 /* For now, hope this will change and loop bind(parallel) will not
2945 be allowed in lots of contexts. */
2946 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
2947 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
2949 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2954 switch (gimple_code (ctx
->stmt
))
2956 case GIMPLE_OMP_FOR
:
2957 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2958 == GF_OMP_FOR_KIND_OACC_LOOP
);
2961 case GIMPLE_OMP_TARGET
:
2962 switch (gimple_omp_target_kind (ctx
->stmt
))
2964 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2965 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2966 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
2977 else if (oacc_get_fn_attrib (current_function_decl
))
2981 error_at (gimple_location (stmt
),
2982 "OpenACC loop directive must be associated with"
2983 " an OpenACC compute region");
2989 if (is_gimple_call (stmt
)
2990 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2991 == BUILT_IN_GOMP_CANCEL
2992 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2993 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2995 const char *bad
= NULL
;
2996 const char *kind
= NULL
;
2997 const char *construct
2998 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2999 == BUILT_IN_GOMP_CANCEL
)
3001 : "cancellation point";
3004 error_at (gimple_location (stmt
), "orphaned %qs construct",
3008 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3009 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3013 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3015 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3016 == BUILT_IN_GOMP_CANCEL
3017 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3018 ctx
->cancellable
= true;
3022 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3023 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3025 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3026 == BUILT_IN_GOMP_CANCEL
3027 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3029 ctx
->cancellable
= true;
3030 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3032 warning_at (gimple_location (stmt
), 0,
3033 "%<cancel for%> inside "
3034 "%<nowait%> for construct");
3035 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3036 OMP_CLAUSE_ORDERED
))
3037 warning_at (gimple_location (stmt
), 0,
3038 "%<cancel for%> inside "
3039 "%<ordered%> for construct");
3044 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3045 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3047 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3048 == BUILT_IN_GOMP_CANCEL
3049 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3051 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3053 ctx
->cancellable
= true;
3054 if (omp_find_clause (gimple_omp_sections_clauses
3057 warning_at (gimple_location (stmt
), 0,
3058 "%<cancel sections%> inside "
3059 "%<nowait%> sections construct");
3063 gcc_assert (ctx
->outer
3064 && gimple_code (ctx
->outer
->stmt
)
3065 == GIMPLE_OMP_SECTIONS
);
3066 ctx
->outer
->cancellable
= true;
3067 if (omp_find_clause (gimple_omp_sections_clauses
3070 warning_at (gimple_location (stmt
), 0,
3071 "%<cancel sections%> inside "
3072 "%<nowait%> sections construct");
3078 if (!is_task_ctx (ctx
)
3079 && (!is_taskloop_ctx (ctx
)
3080 || ctx
->outer
== NULL
3081 || !is_task_ctx (ctx
->outer
)))
3085 for (omp_context
*octx
= ctx
->outer
;
3086 octx
; octx
= octx
->outer
)
3088 switch (gimple_code (octx
->stmt
))
3090 case GIMPLE_OMP_TASKGROUP
:
3092 case GIMPLE_OMP_TARGET
:
3093 if (gimple_omp_target_kind (octx
->stmt
)
3094 != GF_OMP_TARGET_KIND_REGION
)
3097 case GIMPLE_OMP_PARALLEL
:
3098 case GIMPLE_OMP_TEAMS
:
3099 error_at (gimple_location (stmt
),
3100 "%<%s taskgroup%> construct not closely "
3101 "nested inside of %<taskgroup%> region",
3104 case GIMPLE_OMP_TASK
:
3105 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3107 && is_taskloop_ctx (octx
->outer
))
3110 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3111 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3120 ctx
->cancellable
= true;
3125 error_at (gimple_location (stmt
), "invalid arguments");
3130 error_at (gimple_location (stmt
),
3131 "%<%s %s%> construct not closely nested inside of %qs",
3132 construct
, kind
, bad
);
3137 case GIMPLE_OMP_SECTIONS
:
3138 case GIMPLE_OMP_SINGLE
:
3139 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3140 switch (gimple_code (ctx
->stmt
))
3142 case GIMPLE_OMP_FOR
:
3143 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3144 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3147 case GIMPLE_OMP_SECTIONS
:
3148 case GIMPLE_OMP_SINGLE
:
3149 case GIMPLE_OMP_ORDERED
:
3150 case GIMPLE_OMP_MASTER
:
3151 case GIMPLE_OMP_TASK
:
3152 case GIMPLE_OMP_CRITICAL
:
3153 if (is_gimple_call (stmt
))
3155 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3156 != BUILT_IN_GOMP_BARRIER
)
3158 error_at (gimple_location (stmt
),
3159 "barrier region may not be closely nested inside "
3160 "of work-sharing, %<loop%>, %<critical%>, "
3161 "%<ordered%>, %<master%>, explicit %<task%> or "
3162 "%<taskloop%> region");
3165 error_at (gimple_location (stmt
),
3166 "work-sharing region may not be closely nested inside "
3167 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3168 "%<master%>, explicit %<task%> or %<taskloop%> region");
3170 case GIMPLE_OMP_PARALLEL
:
3171 case GIMPLE_OMP_TEAMS
:
3173 case GIMPLE_OMP_TARGET
:
3174 if (gimple_omp_target_kind (ctx
->stmt
)
3175 == GF_OMP_TARGET_KIND_REGION
)
3182 case GIMPLE_OMP_MASTER
:
3183 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3184 switch (gimple_code (ctx
->stmt
))
3186 case GIMPLE_OMP_FOR
:
3187 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3188 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3191 case GIMPLE_OMP_SECTIONS
:
3192 case GIMPLE_OMP_SINGLE
:
3193 case GIMPLE_OMP_TASK
:
3194 error_at (gimple_location (stmt
),
3195 "%<master%> region may not be closely nested inside "
3196 "of work-sharing, %<loop%>, explicit %<task%> or "
3197 "%<taskloop%> region");
3199 case GIMPLE_OMP_PARALLEL
:
3200 case GIMPLE_OMP_TEAMS
:
3202 case GIMPLE_OMP_TARGET
:
3203 if (gimple_omp_target_kind (ctx
->stmt
)
3204 == GF_OMP_TARGET_KIND_REGION
)
3211 case GIMPLE_OMP_TASK
:
3212 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3213 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3214 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3215 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3217 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3218 error_at (OMP_CLAUSE_LOCATION (c
),
3219 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3220 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3224 case GIMPLE_OMP_ORDERED
:
3225 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3226 c
; c
= OMP_CLAUSE_CHAIN (c
))
3228 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3230 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3231 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3234 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3235 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3236 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3239 /* Look for containing ordered(N) loop. */
3241 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3243 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3244 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3246 error_at (OMP_CLAUSE_LOCATION (c
),
3247 "%<ordered%> construct with %<depend%> clause "
3248 "must be closely nested inside an %<ordered%> "
3252 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3254 error_at (OMP_CLAUSE_LOCATION (c
),
3255 "%<ordered%> construct with %<depend%> clause "
3256 "must be closely nested inside a loop with "
3257 "%<ordered%> clause with a parameter");
3263 error_at (OMP_CLAUSE_LOCATION (c
),
3264 "invalid depend kind in omp %<ordered%> %<depend%>");
3268 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3269 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3271 /* ordered simd must be closely nested inside of simd region,
3272 and simd region must not encounter constructs other than
3273 ordered simd, therefore ordered simd may be either orphaned,
3274 or ctx->stmt must be simd. The latter case is handled already
3278 error_at (gimple_location (stmt
),
3279 "%<ordered%> %<simd%> must be closely nested inside "
3284 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3285 switch (gimple_code (ctx
->stmt
))
3287 case GIMPLE_OMP_CRITICAL
:
3288 case GIMPLE_OMP_TASK
:
3289 case GIMPLE_OMP_ORDERED
:
3290 ordered_in_taskloop
:
3291 error_at (gimple_location (stmt
),
3292 "%<ordered%> region may not be closely nested inside "
3293 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3294 "%<taskloop%> region");
3296 case GIMPLE_OMP_FOR
:
3297 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3298 goto ordered_in_taskloop
;
3300 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3301 OMP_CLAUSE_ORDERED
);
3304 error_at (gimple_location (stmt
),
3305 "%<ordered%> region must be closely nested inside "
3306 "a loop region with an %<ordered%> clause");
3309 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3310 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3312 error_at (gimple_location (stmt
),
3313 "%<ordered%> region without %<depend%> clause may "
3314 "not be closely nested inside a loop region with "
3315 "an %<ordered%> clause with a parameter");
3319 case GIMPLE_OMP_TARGET
:
3320 if (gimple_omp_target_kind (ctx
->stmt
)
3321 != GF_OMP_TARGET_KIND_REGION
)
3324 case GIMPLE_OMP_PARALLEL
:
3325 case GIMPLE_OMP_TEAMS
:
3326 error_at (gimple_location (stmt
),
3327 "%<ordered%> region must be closely nested inside "
3328 "a loop region with an %<ordered%> clause");
3334 case GIMPLE_OMP_CRITICAL
:
3337 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3338 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3339 if (gomp_critical
*other_crit
3340 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3341 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3343 error_at (gimple_location (stmt
),
3344 "%<critical%> region may not be nested inside "
3345 "a %<critical%> region with the same name");
3350 case GIMPLE_OMP_TEAMS
:
3353 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3354 || (gimple_omp_target_kind (ctx
->stmt
)
3355 != GF_OMP_TARGET_KIND_REGION
))
3357 /* Teams construct can appear either strictly nested inside of
3358 target construct with no intervening stmts, or can be encountered
3359 only by initial task (so must not appear inside any OpenMP
3361 error_at (gimple_location (stmt
),
3362 "%<teams%> construct must be closely nested inside of "
3363 "%<target%> construct or not nested in any OpenMP "
3368 case GIMPLE_OMP_TARGET
:
3369 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3370 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3371 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3372 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3374 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3375 error_at (OMP_CLAUSE_LOCATION (c
),
3376 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3377 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3380 if (is_gimple_omp_offloaded (stmt
)
3381 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3383 error_at (gimple_location (stmt
),
3384 "OpenACC region inside of OpenACC routine, nested "
3385 "parallelism not supported yet");
3388 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3390 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3392 if (is_gimple_omp (stmt
)
3393 && is_gimple_omp_oacc (stmt
)
3394 && is_gimple_omp (ctx
->stmt
))
3396 error_at (gimple_location (stmt
),
3397 "OpenACC construct inside of non-OpenACC region");
3403 const char *stmt_name
, *ctx_stmt_name
;
3404 switch (gimple_omp_target_kind (stmt
))
3406 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3407 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3408 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3409 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3410 stmt_name
= "target enter data"; break;
3411 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3412 stmt_name
= "target exit data"; break;
3413 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3414 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3415 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3416 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3417 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3418 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3419 stmt_name
= "enter/exit data"; break;
3420 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3421 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3423 default: gcc_unreachable ();
3425 switch (gimple_omp_target_kind (ctx
->stmt
))
3427 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3428 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3429 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3430 ctx_stmt_name
= "parallel"; break;
3431 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3432 ctx_stmt_name
= "kernels"; break;
3433 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3434 ctx_stmt_name
= "serial"; break;
3435 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3436 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3437 ctx_stmt_name
= "host_data"; break;
3438 default: gcc_unreachable ();
3441 /* OpenACC/OpenMP mismatch? */
3442 if (is_gimple_omp_oacc (stmt
)
3443 != is_gimple_omp_oacc (ctx
->stmt
))
3445 error_at (gimple_location (stmt
),
3446 "%s %qs construct inside of %s %qs region",
3447 (is_gimple_omp_oacc (stmt
)
3448 ? "OpenACC" : "OpenMP"), stmt_name
,
3449 (is_gimple_omp_oacc (ctx
->stmt
)
3450 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3453 if (is_gimple_omp_offloaded (ctx
->stmt
))
3455 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3456 if (is_gimple_omp_oacc (ctx
->stmt
))
3458 error_at (gimple_location (stmt
),
3459 "%qs construct inside of %qs region",
3460 stmt_name
, ctx_stmt_name
);
3465 warning_at (gimple_location (stmt
), 0,
3466 "%qs construct inside of %qs region",
3467 stmt_name
, ctx_stmt_name
);
3479 /* Helper function scan_omp.
3481 Callback for walk_tree or operators in walk_gimple_stmt used to
3482 scan for OMP directives in TP. */
3485 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3487 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3488 omp_context
*ctx
= (omp_context
*) wi
->info
;
3491 switch (TREE_CODE (t
))
3499 tree repl
= remap_decl (t
, &ctx
->cb
);
3500 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3506 if (ctx
&& TYPE_P (t
))
3507 *tp
= remap_type (t
, &ctx
->cb
);
3508 else if (!DECL_P (t
))
3513 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3514 if (tem
!= TREE_TYPE (t
))
3516 if (TREE_CODE (t
) == INTEGER_CST
)
3517 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3519 TREE_TYPE (t
) = tem
;
3529 /* Return true if FNDECL is a setjmp or a longjmp. */
3532 setjmp_or_longjmp_p (const_tree fndecl
)
3534 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3535 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3538 tree declname
= DECL_NAME (fndecl
);
3540 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3541 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3542 || !TREE_PUBLIC (fndecl
))
3545 const char *name
= IDENTIFIER_POINTER (declname
);
3546 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3549 /* Return true if FNDECL is an omp_* runtime API call. */
3552 omp_runtime_api_call (const_tree fndecl
)
3554 tree declname
= DECL_NAME (fndecl
);
3556 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3557 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3558 || !TREE_PUBLIC (fndecl
))
3561 const char *name
= IDENTIFIER_POINTER (declname
);
3562 if (strncmp (name
, "omp_", 4) != 0)
3565 static const char *omp_runtime_apis
[] =
3567 /* This array has 3 sections. First omp_* calls that don't
3568 have any suffixes. */
3570 "target_associate_ptr",
3571 "target_disassociate_ptr",
3573 "target_is_present",
3575 "target_memcpy_rect",
3577 /* Now omp_* calls that are available as omp_* and omp_*_. */
3580 "destroy_nest_lock",
3583 "get_affinity_format",
3585 "get_default_device",
3587 "get_initial_device",
3589 "get_max_active_levels",
3590 "get_max_task_priority",
3598 "get_partition_num_places",
3610 "is_initial_device",
3612 "pause_resource_all",
3613 "set_affinity_format",
3621 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3622 "get_ancestor_thread_num",
3623 "get_partition_place_nums",
3624 "get_place_num_procs",
3625 "get_place_proc_ids",
3628 "set_default_device",
3630 "set_max_active_levels",
3637 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3639 if (omp_runtime_apis
[i
] == NULL
)
3644 size_t len
= strlen (omp_runtime_apis
[i
]);
3645 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3646 && (name
[4 + len
] == '\0'
3648 && name
[4 + len
] == '_'
3649 && (name
[4 + len
+ 1] == '\0'
3651 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3657 /* Helper function for scan_omp.
3659 Callback for walk_gimple_stmt used to scan for OMP directives in
3660 the current statement in GSI. */
3663 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3664 struct walk_stmt_info
*wi
)
3666 gimple
*stmt
= gsi_stmt (*gsi
);
3667 omp_context
*ctx
= (omp_context
*) wi
->info
;
3669 if (gimple_has_location (stmt
))
3670 input_location
= gimple_location (stmt
);
3672 /* Check the nesting restrictions. */
3673 bool remove
= false;
3674 if (is_gimple_omp (stmt
))
3675 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3676 else if (is_gimple_call (stmt
))
3678 tree fndecl
= gimple_call_fndecl (stmt
);
3682 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3683 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3684 && setjmp_or_longjmp_p (fndecl
)
3688 error_at (gimple_location (stmt
),
3689 "setjmp/longjmp inside %<simd%> construct");
3691 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3692 switch (DECL_FUNCTION_CODE (fndecl
))
3694 case BUILT_IN_GOMP_BARRIER
:
3695 case BUILT_IN_GOMP_CANCEL
:
3696 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3697 case BUILT_IN_GOMP_TASKYIELD
:
3698 case BUILT_IN_GOMP_TASKWAIT
:
3699 case BUILT_IN_GOMP_TASKGROUP_START
:
3700 case BUILT_IN_GOMP_TASKGROUP_END
:
3701 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3708 omp_context
*octx
= ctx
;
3709 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3711 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3714 error_at (gimple_location (stmt
),
3715 "OpenMP runtime API call %qD in a region with "
3716 "%<order(concurrent)%> clause", fndecl
);
3723 stmt
= gimple_build_nop ();
3724 gsi_replace (gsi
, stmt
, false);
3727 *handled_ops_p
= true;
3729 switch (gimple_code (stmt
))
3731 case GIMPLE_OMP_PARALLEL
:
3732 taskreg_nesting_level
++;
3733 scan_omp_parallel (gsi
, ctx
);
3734 taskreg_nesting_level
--;
3737 case GIMPLE_OMP_TASK
:
3738 taskreg_nesting_level
++;
3739 scan_omp_task (gsi
, ctx
);
3740 taskreg_nesting_level
--;
3743 case GIMPLE_OMP_FOR
:
3744 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3745 == GF_OMP_FOR_KIND_SIMD
)
3746 && gimple_omp_for_combined_into_p (stmt
)
3747 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3749 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3750 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3751 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3753 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3757 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3758 == GF_OMP_FOR_KIND_SIMD
)
3759 && omp_maybe_offloaded_ctx (ctx
)
3760 && omp_max_simt_vf ()
3761 && gimple_omp_for_collapse (stmt
) == 1)
3762 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3764 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3767 case GIMPLE_OMP_SECTIONS
:
3768 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3771 case GIMPLE_OMP_SINGLE
:
3772 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3775 case GIMPLE_OMP_SCAN
:
3776 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3778 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3779 ctx
->scan_inclusive
= true;
3780 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3781 ctx
->scan_exclusive
= true;
3784 case GIMPLE_OMP_SECTION
:
3785 case GIMPLE_OMP_MASTER
:
3786 case GIMPLE_OMP_ORDERED
:
3787 case GIMPLE_OMP_CRITICAL
:
3788 ctx
= new_omp_context (stmt
, ctx
);
3789 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3792 case GIMPLE_OMP_TASKGROUP
:
3793 ctx
= new_omp_context (stmt
, ctx
);
3794 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3795 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3798 case GIMPLE_OMP_TARGET
:
3799 if (is_gimple_omp_offloaded (stmt
))
3801 taskreg_nesting_level
++;
3802 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3803 taskreg_nesting_level
--;
3806 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3809 case GIMPLE_OMP_TEAMS
:
3810 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3812 taskreg_nesting_level
++;
3813 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3814 taskreg_nesting_level
--;
3817 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3824 *handled_ops_p
= false;
3826 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3828 var
= DECL_CHAIN (var
))
3829 insert_decl_map (&ctx
->cb
, var
, var
);
3833 *handled_ops_p
= false;
3841 /* Scan all the statements starting at the current statement. CTX
3842 contains context information about the OMP directives and
3843 clauses found during the scan. */
3846 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3848 location_t saved_location
;
3849 struct walk_stmt_info wi
;
3851 memset (&wi
, 0, sizeof (wi
));
3853 wi
.want_locations
= true;
3855 saved_location
= input_location
;
3856 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3857 input_location
= saved_location
;
3860 /* Re-gimplification and code generation routines. */
3862 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3863 of BIND if in a method. */
3866 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3868 if (DECL_ARGUMENTS (current_function_decl
)
3869 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3870 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3873 tree vars
= gimple_bind_vars (bind
);
3874 for (tree
*pvar
= &vars
; *pvar
; )
3875 if (omp_member_access_dummy_var (*pvar
))
3876 *pvar
= DECL_CHAIN (*pvar
);
3878 pvar
= &DECL_CHAIN (*pvar
);
3879 gimple_bind_set_vars (bind
, vars
);
3883 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3884 block and its subblocks. */
3887 remove_member_access_dummy_vars (tree block
)
3889 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3890 if (omp_member_access_dummy_var (*pvar
))
3891 *pvar
= DECL_CHAIN (*pvar
);
3893 pvar
= &DECL_CHAIN (*pvar
);
3895 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3896 remove_member_access_dummy_vars (block
);
3899 /* If a context was created for STMT when it was scanned, return it. */
3901 static omp_context
*
3902 maybe_lookup_ctx (gimple
*stmt
)
3905 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3906 return n
? (omp_context
*) n
->value
: NULL
;
3910 /* Find the mapping for DECL in CTX or the immediately enclosing
3911 context that has a mapping for DECL.
3913 If CTX is a nested parallel directive, we may have to use the decl
3914 mappings created in CTX's parent context. Suppose that we have the
3915 following parallel nesting (variable UIDs showed for clarity):
3918 #omp parallel shared(iD.1562) -> outer parallel
3919 iD.1562 = iD.1562 + 1;
3921 #omp parallel shared (iD.1562) -> inner parallel
3922 iD.1562 = iD.1562 - 1;
3924 Each parallel structure will create a distinct .omp_data_s structure
3925 for copying iD.1562 in/out of the directive:
3927 outer parallel .omp_data_s.1.i -> iD.1562
3928 inner parallel .omp_data_s.2.i -> iD.1562
3930 A shared variable mapping will produce a copy-out operation before
3931 the parallel directive and a copy-in operation after it. So, in
3932 this case we would have:
3935 .omp_data_o.1.i = iD.1562;
3936 #omp parallel shared(iD.1562) -> outer parallel
3937 .omp_data_i.1 = &.omp_data_o.1
3938 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3940 .omp_data_o.2.i = iD.1562; -> **
3941 #omp parallel shared(iD.1562) -> inner parallel
3942 .omp_data_i.2 = &.omp_data_o.2
3943 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3946 ** This is a problem. The symbol iD.1562 cannot be referenced
3947 inside the body of the outer parallel region. But since we are
3948 emitting this copy operation while expanding the inner parallel
3949 directive, we need to access the CTX structure of the outer
3950 parallel directive to get the correct mapping:
3952 .omp_data_o.2.i = .omp_data_i.1->i
3954 Since there may be other workshare or parallel directives enclosing
3955 the parallel directive, it may be necessary to walk up the context
3956 parent chain. This is not a problem in general because nested
3957 parallelism happens only rarely. */
3960 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3965 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3966 t
= maybe_lookup_decl (decl
, up
);
3968 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3970 return t
? t
: decl
;
3974 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3975 in outer contexts. */
3978 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3983 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3984 t
= maybe_lookup_decl (decl
, up
);
3986 return t
? t
: decl
;
3990 /* Construct the initialization value for reduction operation OP. */
3993 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4002 case TRUTH_ORIF_EXPR
:
4003 case TRUTH_XOR_EXPR
:
4005 return build_zero_cst (type
);
4008 case TRUTH_AND_EXPR
:
4009 case TRUTH_ANDIF_EXPR
:
4011 return fold_convert_loc (loc
, type
, integer_one_node
);
4014 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4017 if (SCALAR_FLOAT_TYPE_P (type
))
4019 REAL_VALUE_TYPE max
, min
;
4020 if (HONOR_INFINITIES (type
))
4023 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4026 real_maxval (&min
, 1, TYPE_MODE (type
));
4027 return build_real (type
, min
);
4029 else if (POINTER_TYPE_P (type
))
4032 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4033 return wide_int_to_tree (type
, min
);
4037 gcc_assert (INTEGRAL_TYPE_P (type
));
4038 return TYPE_MIN_VALUE (type
);
4042 if (SCALAR_FLOAT_TYPE_P (type
))
4044 REAL_VALUE_TYPE max
;
4045 if (HONOR_INFINITIES (type
))
4048 real_maxval (&max
, 0, TYPE_MODE (type
));
4049 return build_real (type
, max
);
4051 else if (POINTER_TYPE_P (type
))
4054 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4055 return wide_int_to_tree (type
, max
);
4059 gcc_assert (INTEGRAL_TYPE_P (type
));
4060 return TYPE_MAX_VALUE (type
);
4068 /* Construct the initialization value for reduction CLAUSE. */
4071 omp_reduction_init (tree clause
, tree type
)
4073 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4074 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4077 /* Return alignment to be assumed for var in CLAUSE, which should be
4078 OMP_CLAUSE_ALIGNED. */
4081 omp_clause_aligned_alignment (tree clause
)
4083 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4084 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4086 /* Otherwise return implementation defined alignment. */
4087 unsigned int al
= 1;
4088 opt_scalar_mode mode_iter
;
4089 auto_vector_modes modes
;
4090 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4091 static enum mode_class classes
[]
4092 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4093 for (int i
= 0; i
< 4; i
+= 2)
4094 /* The for loop above dictates that we only walk through scalar classes. */
4095 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4097 scalar_mode mode
= mode_iter
.require ();
4098 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4099 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4101 machine_mode alt_vmode
;
4102 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4103 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4104 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4107 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4108 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4110 type
= build_vector_type_for_mode (type
, vmode
);
4111 if (TYPE_MODE (type
) != vmode
)
4113 if (TYPE_ALIGN_UNIT (type
) > al
)
4114 al
= TYPE_ALIGN_UNIT (type
);
4116 return build_int_cst (integer_type_node
, al
);
4120 /* This structure is part of the interface between lower_rec_simd_input_clauses
4121 and lower_rec_input_clauses. */
4123 class omplow_simd_context
{
4125 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4129 vec
<tree
, va_heap
> simt_eargs
;
4130 gimple_seq simt_dlist
;
4131 poly_uint64_pod max_vf
;
4135 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4139 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4140 omplow_simd_context
*sctx
, tree
&ivar
,
4141 tree
&lvar
, tree
*rvar
= NULL
,
4144 if (known_eq (sctx
->max_vf
, 0U))
4146 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4147 if (maybe_gt (sctx
->max_vf
, 1U))
4149 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4150 OMP_CLAUSE_SAFELEN
);
4153 poly_uint64 safe_len
;
4154 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4155 || maybe_lt (safe_len
, 1U))
4158 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4161 if (maybe_gt (sctx
->max_vf
, 1U))
4163 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4164 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4167 if (known_eq (sctx
->max_vf
, 1U))
4172 if (is_gimple_reg (new_var
))
4174 ivar
= lvar
= new_var
;
4177 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4178 ivar
= lvar
= create_tmp_var (type
);
4179 TREE_ADDRESSABLE (ivar
) = 1;
4180 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4181 NULL
, DECL_ATTRIBUTES (ivar
));
4182 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4183 tree clobber
= build_clobber (type
);
4184 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4185 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4189 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4190 tree avar
= create_tmp_var_raw (atype
);
4191 if (TREE_ADDRESSABLE (new_var
))
4192 TREE_ADDRESSABLE (avar
) = 1;
4193 DECL_ATTRIBUTES (avar
)
4194 = tree_cons (get_identifier ("omp simd array"), NULL
,
4195 DECL_ATTRIBUTES (avar
));
4196 gimple_add_tmp_var (avar
);
4198 if (rvar
&& !ctx
->for_simd_scan_phase
)
4200 /* For inscan reductions, create another array temporary,
4201 which will hold the reduced value. */
4202 iavar
= create_tmp_var_raw (atype
);
4203 if (TREE_ADDRESSABLE (new_var
))
4204 TREE_ADDRESSABLE (iavar
) = 1;
4205 DECL_ATTRIBUTES (iavar
)
4206 = tree_cons (get_identifier ("omp simd array"), NULL
,
4207 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4208 DECL_ATTRIBUTES (iavar
)));
4209 gimple_add_tmp_var (iavar
);
4210 ctx
->cb
.decl_map
->put (avar
, iavar
);
4211 if (sctx
->lastlane
== NULL_TREE
)
4212 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4213 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4214 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4215 TREE_THIS_NOTRAP (*rvar
) = 1;
4217 if (ctx
->scan_exclusive
)
4219 /* And for exclusive scan yet another one, which will
4220 hold the value during the scan phase. */
4221 tree savar
= create_tmp_var_raw (atype
);
4222 if (TREE_ADDRESSABLE (new_var
))
4223 TREE_ADDRESSABLE (savar
) = 1;
4224 DECL_ATTRIBUTES (savar
)
4225 = tree_cons (get_identifier ("omp simd array"), NULL
,
4226 tree_cons (get_identifier ("omp simd inscan "
4228 DECL_ATTRIBUTES (savar
)));
4229 gimple_add_tmp_var (savar
);
4230 ctx
->cb
.decl_map
->put (iavar
, savar
);
4231 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4232 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4233 TREE_THIS_NOTRAP (*rvar2
) = 1;
4236 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4237 NULL_TREE
, NULL_TREE
);
4238 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4239 NULL_TREE
, NULL_TREE
);
4240 TREE_THIS_NOTRAP (ivar
) = 1;
4241 TREE_THIS_NOTRAP (lvar
) = 1;
4243 if (DECL_P (new_var
))
4245 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4246 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4251 /* Helper function of lower_rec_input_clauses. For a reference
4252 in simd reduction, add an underlying variable it will reference. */
4255 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4257 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4258 if (TREE_CONSTANT (z
))
4260 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4261 get_name (new_vard
));
4262 gimple_add_tmp_var (z
);
4263 TREE_ADDRESSABLE (z
) = 1;
4264 z
= build_fold_addr_expr_loc (loc
, z
);
4265 gimplify_assign (new_vard
, z
, ilist
);
4269 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4270 code to emit (type) (tskred_temp[idx]). */
4273 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4276 unsigned HOST_WIDE_INT sz
4277 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4278 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4279 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4281 tree v
= create_tmp_var (pointer_sized_int_node
);
4282 gimple
*g
= gimple_build_assign (v
, r
);
4283 gimple_seq_add_stmt (ilist
, g
);
4284 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4286 v
= create_tmp_var (type
);
4287 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4288 gimple_seq_add_stmt (ilist
, g
);
4293 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4294 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4295 private variables. Initialization statements go in ILIST, while calls
4296 to destructors go in DLIST. */
4299 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4300 omp_context
*ctx
, struct omp_for_data
*fd
)
4302 tree c
, copyin_seq
, x
, ptr
;
4303 bool copyin_by_ref
= false;
4304 bool lastprivate_firstprivate
= false;
4305 bool reduction_omp_orig_ref
= false;
4307 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4308 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4309 omplow_simd_context sctx
= omplow_simd_context ();
4310 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4311 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4312 gimple_seq llist
[4] = { };
4313 tree nonconst_simd_if
= NULL_TREE
;
4316 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4318 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4319 with data sharing clauses referencing variable sized vars. That
4320 is unnecessarily hard to support and very unlikely to result in
4321 vectorized code anyway. */
4323 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4324 switch (OMP_CLAUSE_CODE (c
))
4326 case OMP_CLAUSE_LINEAR
:
4327 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4330 case OMP_CLAUSE_PRIVATE
:
4331 case OMP_CLAUSE_FIRSTPRIVATE
:
4332 case OMP_CLAUSE_LASTPRIVATE
:
4333 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4335 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4337 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4338 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4342 case OMP_CLAUSE_REDUCTION
:
4343 case OMP_CLAUSE_IN_REDUCTION
:
4344 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4345 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4347 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4349 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4350 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4355 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4357 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4358 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4360 case OMP_CLAUSE_SIMDLEN
:
4361 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4364 case OMP_CLAUSE__CONDTEMP_
:
4365 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4373 /* Add a placeholder for simduid. */
4374 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4375 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4377 unsigned task_reduction_cnt
= 0;
4378 unsigned task_reduction_cntorig
= 0;
4379 unsigned task_reduction_cnt_full
= 0;
4380 unsigned task_reduction_cntorig_full
= 0;
4381 unsigned task_reduction_other_cnt
= 0;
4382 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4383 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4384 /* Do all the fixed sized types in the first pass, and the variable sized
4385 types in the second pass. This makes sure that the scalar arguments to
4386 the variable sized types are processed before we use them in the
4387 variable sized operations. For task reductions we use 4 passes, in the
4388 first two we ignore them, in the third one gather arguments for
4389 GOMP_task_reduction_remap call and in the last pass actually handle
4390 the task reductions. */
4391 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4394 if (pass
== 2 && task_reduction_cnt
)
4397 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4398 + task_reduction_cntorig
);
4399 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4400 gimple_add_tmp_var (tskred_avar
);
4401 TREE_ADDRESSABLE (tskred_avar
) = 1;
4402 task_reduction_cnt_full
= task_reduction_cnt
;
4403 task_reduction_cntorig_full
= task_reduction_cntorig
;
4405 else if (pass
== 3 && task_reduction_cnt
)
4407 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4409 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4410 size_int (task_reduction_cntorig
),
4411 build_fold_addr_expr (tskred_avar
));
4412 gimple_seq_add_stmt (ilist
, g
);
4414 if (pass
== 3 && task_reduction_other_cnt
)
4416 /* For reduction clauses, build
4417 tskred_base = (void *) tskred_temp[2]
4418 + omp_get_thread_num () * tskred_temp[1]
4419 or if tskred_temp[1] is known to be constant, that constant
4420 directly. This is the start of the private reduction copy block
4421 for the current thread. */
4422 tree v
= create_tmp_var (integer_type_node
);
4423 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4424 gimple
*g
= gimple_build_call (x
, 0);
4425 gimple_call_set_lhs (g
, v
);
4426 gimple_seq_add_stmt (ilist
, g
);
4427 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4428 tskred_temp
= OMP_CLAUSE_DECL (c
);
4429 if (is_taskreg_ctx (ctx
))
4430 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4431 tree v2
= create_tmp_var (sizetype
);
4432 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4433 gimple_seq_add_stmt (ilist
, g
);
4434 if (ctx
->task_reductions
[0])
4435 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4437 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4438 tree v3
= create_tmp_var (sizetype
);
4439 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4440 gimple_seq_add_stmt (ilist
, g
);
4441 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4442 tskred_base
= create_tmp_var (ptr_type_node
);
4443 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4444 gimple_seq_add_stmt (ilist
, g
);
4446 task_reduction_cnt
= 0;
4447 task_reduction_cntorig
= 0;
4448 task_reduction_other_cnt
= 0;
4449 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4451 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4454 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4455 bool task_reduction_p
= false;
4456 bool task_reduction_needs_orig_p
= false;
4457 tree cond
= NULL_TREE
;
4461 case OMP_CLAUSE_PRIVATE
:
4462 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4465 case OMP_CLAUSE_SHARED
:
4466 /* Ignore shared directives in teams construct inside
4467 of target construct. */
4468 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4469 && !is_host_teams_ctx (ctx
))
4471 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4473 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4474 || is_global_var (OMP_CLAUSE_DECL (c
)));
4477 case OMP_CLAUSE_FIRSTPRIVATE
:
4478 case OMP_CLAUSE_COPYIN
:
4480 case OMP_CLAUSE_LINEAR
:
4481 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4482 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4483 lastprivate_firstprivate
= true;
4485 case OMP_CLAUSE_REDUCTION
:
4486 case OMP_CLAUSE_IN_REDUCTION
:
4487 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4489 task_reduction_p
= true;
4490 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4492 task_reduction_other_cnt
++;
4497 task_reduction_cnt
++;
4498 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4500 var
= OMP_CLAUSE_DECL (c
);
4501 /* If var is a global variable that isn't privatized
4502 in outer contexts, we don't need to look up the
4503 original address, it is always the address of the
4504 global variable itself. */
4506 || omp_is_reference (var
)
4508 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4510 task_reduction_needs_orig_p
= true;
4511 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4512 task_reduction_cntorig
++;
4516 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4517 reduction_omp_orig_ref
= true;
4519 case OMP_CLAUSE__REDUCTEMP_
:
4520 if (!is_taskreg_ctx (ctx
))
4523 case OMP_CLAUSE__LOOPTEMP_
:
4524 /* Handle _looptemp_/_reductemp_ clauses only on
4529 case OMP_CLAUSE_LASTPRIVATE
:
4530 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4532 lastprivate_firstprivate
= true;
4533 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4536 /* Even without corresponding firstprivate, if
4537 decl is Fortran allocatable, it needs outer var
4540 && lang_hooks
.decls
.omp_private_outer_ref
4541 (OMP_CLAUSE_DECL (c
)))
4542 lastprivate_firstprivate
= true;
4544 case OMP_CLAUSE_ALIGNED
:
4547 var
= OMP_CLAUSE_DECL (c
);
4548 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4549 && !is_global_var (var
))
4551 new_var
= maybe_lookup_decl (var
, ctx
);
4552 if (new_var
== NULL_TREE
)
4553 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4554 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4555 tree alarg
= omp_clause_aligned_alignment (c
);
4556 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4557 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4558 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4559 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4560 gimplify_and_add (x
, ilist
);
4562 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4563 && is_global_var (var
))
4565 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4566 new_var
= lookup_decl (var
, ctx
);
4567 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4568 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4569 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4570 tree alarg
= omp_clause_aligned_alignment (c
);
4571 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4572 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4573 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4574 x
= create_tmp_var (ptype
);
4575 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4576 gimplify_and_add (t
, ilist
);
4577 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4578 SET_DECL_VALUE_EXPR (new_var
, t
);
4579 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4582 case OMP_CLAUSE__CONDTEMP_
:
4583 if (is_parallel_ctx (ctx
)
4584 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4591 if (task_reduction_p
!= (pass
>= 2))
4594 new_var
= var
= OMP_CLAUSE_DECL (c
);
4595 if ((c_kind
== OMP_CLAUSE_REDUCTION
4596 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4597 && TREE_CODE (var
) == MEM_REF
)
4599 var
= TREE_OPERAND (var
, 0);
4600 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4601 var
= TREE_OPERAND (var
, 0);
4602 if (TREE_CODE (var
) == INDIRECT_REF
4603 || TREE_CODE (var
) == ADDR_EXPR
)
4604 var
= TREE_OPERAND (var
, 0);
4605 if (is_variable_sized (var
))
4607 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4608 var
= DECL_VALUE_EXPR (var
);
4609 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4610 var
= TREE_OPERAND (var
, 0);
4611 gcc_assert (DECL_P (var
));
4615 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4616 new_var
= lookup_decl (var
, ctx
);
4618 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4623 /* C/C++ array section reductions. */
4624 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4625 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4626 && var
!= OMP_CLAUSE_DECL (c
))
4631 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4632 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4634 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4636 tree b
= TREE_OPERAND (orig_var
, 1);
4637 b
= maybe_lookup_decl (b
, ctx
);
4640 b
= TREE_OPERAND (orig_var
, 1);
4641 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4643 if (integer_zerop (bias
))
4647 bias
= fold_convert_loc (clause_loc
,
4648 TREE_TYPE (b
), bias
);
4649 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4650 TREE_TYPE (b
), b
, bias
);
4652 orig_var
= TREE_OPERAND (orig_var
, 0);
4656 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4657 if (is_global_var (out
)
4658 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4659 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4660 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4665 bool by_ref
= use_pointer_for_field (var
, NULL
);
4666 x
= build_receiver_ref (var
, by_ref
, ctx
);
4667 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4668 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4670 x
= build_fold_addr_expr (x
);
4672 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4673 x
= build_simple_mem_ref (x
);
4674 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4676 if (var
== TREE_OPERAND (orig_var
, 0))
4677 x
= build_fold_addr_expr (x
);
4679 bias
= fold_convert (sizetype
, bias
);
4680 x
= fold_convert (ptr_type_node
, x
);
4681 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4682 TREE_TYPE (x
), x
, bias
);
4683 unsigned cnt
= task_reduction_cnt
- 1;
4684 if (!task_reduction_needs_orig_p
)
4685 cnt
+= (task_reduction_cntorig_full
4686 - task_reduction_cntorig
);
4688 cnt
= task_reduction_cntorig
- 1;
4689 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4690 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4691 gimplify_assign (r
, x
, ilist
);
4695 if (TREE_CODE (orig_var
) == INDIRECT_REF
4696 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4697 orig_var
= TREE_OPERAND (orig_var
, 0);
4698 tree d
= OMP_CLAUSE_DECL (c
);
4699 tree type
= TREE_TYPE (d
);
4700 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4701 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4702 const char *name
= get_name (orig_var
);
4705 tree xv
= create_tmp_var (ptr_type_node
);
4706 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4708 unsigned cnt
= task_reduction_cnt
- 1;
4709 if (!task_reduction_needs_orig_p
)
4710 cnt
+= (task_reduction_cntorig_full
4711 - task_reduction_cntorig
);
4713 cnt
= task_reduction_cntorig
- 1;
4714 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4715 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4717 gimple
*g
= gimple_build_assign (xv
, x
);
4718 gimple_seq_add_stmt (ilist
, g
);
4722 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4724 if (ctx
->task_reductions
[1 + idx
])
4725 off
= fold_convert (sizetype
,
4726 ctx
->task_reductions
[1 + idx
]);
4728 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4730 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4732 gimple_seq_add_stmt (ilist
, g
);
4734 x
= fold_convert (build_pointer_type (boolean_type_node
),
4736 if (TREE_CONSTANT (v
))
4737 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4738 TYPE_SIZE_UNIT (type
));
4741 tree t
= maybe_lookup_decl (v
, ctx
);
4745 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4746 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4748 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4750 build_int_cst (TREE_TYPE (v
), 1));
4751 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4753 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4754 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4756 cond
= create_tmp_var (TREE_TYPE (x
));
4757 gimplify_assign (cond
, x
, ilist
);
4760 else if (TREE_CONSTANT (v
))
4762 x
= create_tmp_var_raw (type
, name
);
4763 gimple_add_tmp_var (x
);
4764 TREE_ADDRESSABLE (x
) = 1;
4765 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4770 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4771 tree t
= maybe_lookup_decl (v
, ctx
);
4775 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4776 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4777 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4779 build_int_cst (TREE_TYPE (v
), 1));
4780 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4782 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4783 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4784 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4787 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4788 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4789 tree y
= create_tmp_var (ptype
, name
);
4790 gimplify_assign (y
, x
, ilist
);
4794 if (!integer_zerop (bias
))
4796 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4798 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4800 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4801 pointer_sized_int_node
, yb
, bias
);
4802 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4803 yb
= create_tmp_var (ptype
, name
);
4804 gimplify_assign (yb
, x
, ilist
);
4808 d
= TREE_OPERAND (d
, 0);
4809 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4810 d
= TREE_OPERAND (d
, 0);
4811 if (TREE_CODE (d
) == ADDR_EXPR
)
4813 if (orig_var
!= var
)
4815 gcc_assert (is_variable_sized (orig_var
));
4816 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4818 gimplify_assign (new_var
, x
, ilist
);
4819 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4820 tree t
= build_fold_indirect_ref (new_var
);
4821 DECL_IGNORED_P (new_var
) = 0;
4822 TREE_THIS_NOTRAP (t
) = 1;
4823 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4824 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4828 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4829 build_int_cst (ptype
, 0));
4830 SET_DECL_VALUE_EXPR (new_var
, x
);
4831 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4836 gcc_assert (orig_var
== var
);
4837 if (TREE_CODE (d
) == INDIRECT_REF
)
4839 x
= create_tmp_var (ptype
, name
);
4840 TREE_ADDRESSABLE (x
) = 1;
4841 gimplify_assign (x
, yb
, ilist
);
4842 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4844 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4845 gimplify_assign (new_var
, x
, ilist
);
4847 /* GOMP_taskgroup_reduction_register memsets the whole
4848 array to zero. If the initializer is zero, we don't
4849 need to initialize it again, just mark it as ever
4850 used unconditionally, i.e. cond = true. */
4852 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4853 && initializer_zerop (omp_reduction_init (c
,
4856 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4858 gimple_seq_add_stmt (ilist
, g
);
4861 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4865 if (!is_parallel_ctx (ctx
))
4867 tree condv
= create_tmp_var (boolean_type_node
);
4868 g
= gimple_build_assign (condv
,
4869 build_simple_mem_ref (cond
));
4870 gimple_seq_add_stmt (ilist
, g
);
4871 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4872 g
= gimple_build_cond (NE_EXPR
, condv
,
4873 boolean_false_node
, end
, lab1
);
4874 gimple_seq_add_stmt (ilist
, g
);
4875 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4877 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4879 gimple_seq_add_stmt (ilist
, g
);
4882 tree y1
= create_tmp_var (ptype
);
4883 gimplify_assign (y1
, y
, ilist
);
4884 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4885 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4886 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4887 if (task_reduction_needs_orig_p
)
4889 y3
= create_tmp_var (ptype
);
4891 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4892 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4893 size_int (task_reduction_cnt_full
4894 + task_reduction_cntorig
- 1),
4895 NULL_TREE
, NULL_TREE
);
4898 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4899 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4902 gimplify_assign (y3
, ref
, ilist
);
4904 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4908 y2
= create_tmp_var (ptype
);
4909 gimplify_assign (y2
, y
, ilist
);
4911 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4913 tree ref
= build_outer_var_ref (var
, ctx
);
4914 /* For ref build_outer_var_ref already performs this. */
4915 if (TREE_CODE (d
) == INDIRECT_REF
)
4916 gcc_assert (omp_is_reference (var
));
4917 else if (TREE_CODE (d
) == ADDR_EXPR
)
4918 ref
= build_fold_addr_expr (ref
);
4919 else if (omp_is_reference (var
))
4920 ref
= build_fold_addr_expr (ref
);
4921 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4922 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4923 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4925 y3
= create_tmp_var (ptype
);
4926 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4930 y4
= create_tmp_var (ptype
);
4931 gimplify_assign (y4
, ref
, dlist
);
4935 tree i
= create_tmp_var (TREE_TYPE (v
));
4936 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4937 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4938 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4941 i2
= create_tmp_var (TREE_TYPE (v
));
4942 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4943 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4944 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4945 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4947 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4949 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4950 tree decl_placeholder
4951 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4952 SET_DECL_VALUE_EXPR (decl_placeholder
,
4953 build_simple_mem_ref (y1
));
4954 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4955 SET_DECL_VALUE_EXPR (placeholder
,
4956 y3
? build_simple_mem_ref (y3
)
4958 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4959 x
= lang_hooks
.decls
.omp_clause_default_ctor
4960 (c
, build_simple_mem_ref (y1
),
4961 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4963 gimplify_and_add (x
, ilist
);
4964 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4966 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4967 lower_omp (&tseq
, ctx
);
4968 gimple_seq_add_seq (ilist
, tseq
);
4970 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4973 SET_DECL_VALUE_EXPR (decl_placeholder
,
4974 build_simple_mem_ref (y2
));
4975 SET_DECL_VALUE_EXPR (placeholder
,
4976 build_simple_mem_ref (y4
));
4977 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4978 lower_omp (&tseq
, ctx
);
4979 gimple_seq_add_seq (dlist
, tseq
);
4980 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4982 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4983 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4986 x
= lang_hooks
.decls
.omp_clause_dtor
4987 (c
, build_simple_mem_ref (y2
));
4989 gimplify_and_add (x
, dlist
);
4994 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4995 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4997 /* reduction(-:var) sums up the partial results, so it
4998 acts identically to reduction(+:var). */
4999 if (code
== MINUS_EXPR
)
5002 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5005 x
= build2 (code
, TREE_TYPE (type
),
5006 build_simple_mem_ref (y4
),
5007 build_simple_mem_ref (y2
));
5008 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5012 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5013 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5014 gimple_seq_add_stmt (ilist
, g
);
5017 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5018 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5019 gimple_seq_add_stmt (ilist
, g
);
5021 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5022 build_int_cst (TREE_TYPE (i
), 1));
5023 gimple_seq_add_stmt (ilist
, g
);
5024 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5025 gimple_seq_add_stmt (ilist
, g
);
5026 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5029 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5030 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5031 gimple_seq_add_stmt (dlist
, g
);
5034 g
= gimple_build_assign
5035 (y4
, POINTER_PLUS_EXPR
, y4
,
5036 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5037 gimple_seq_add_stmt (dlist
, g
);
5039 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5040 build_int_cst (TREE_TYPE (i2
), 1));
5041 gimple_seq_add_stmt (dlist
, g
);
5042 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5043 gimple_seq_add_stmt (dlist
, g
);
5044 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5050 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5054 bool by_ref
= use_pointer_for_field (var
, ctx
);
5055 x
= build_receiver_ref (var
, by_ref
, ctx
);
5057 if (!omp_is_reference (var
))
5058 x
= build_fold_addr_expr (x
);
5059 x
= fold_convert (ptr_type_node
, x
);
5060 unsigned cnt
= task_reduction_cnt
- 1;
5061 if (!task_reduction_needs_orig_p
)
5062 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5064 cnt
= task_reduction_cntorig
- 1;
5065 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5066 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5067 gimplify_assign (r
, x
, ilist
);
5072 tree type
= TREE_TYPE (new_var
);
5073 if (!omp_is_reference (var
))
5074 type
= build_pointer_type (type
);
5075 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5077 unsigned cnt
= task_reduction_cnt
- 1;
5078 if (!task_reduction_needs_orig_p
)
5079 cnt
+= (task_reduction_cntorig_full
5080 - task_reduction_cntorig
);
5082 cnt
= task_reduction_cntorig
- 1;
5083 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5084 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5088 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5090 if (ctx
->task_reductions
[1 + idx
])
5091 off
= fold_convert (sizetype
,
5092 ctx
->task_reductions
[1 + idx
]);
5094 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5096 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5099 x
= fold_convert (type
, x
);
5101 if (omp_is_reference (var
))
5103 gimplify_assign (new_var
, x
, ilist
);
5105 new_var
= build_simple_mem_ref (new_var
);
5109 t
= create_tmp_var (type
);
5110 gimplify_assign (t
, x
, ilist
);
5111 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5112 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5114 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5115 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5116 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5117 cond
= create_tmp_var (TREE_TYPE (t
));
5118 gimplify_assign (cond
, t
, ilist
);
5120 else if (is_variable_sized (var
))
5122 /* For variable sized types, we need to allocate the
5123 actual storage here. Call alloca and store the
5124 result in the pointer decl that we created elsewhere. */
5128 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5133 ptr
= DECL_VALUE_EXPR (new_var
);
5134 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5135 ptr
= TREE_OPERAND (ptr
, 0);
5136 gcc_assert (DECL_P (ptr
));
5137 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5139 /* void *tmp = __builtin_alloca */
5140 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5141 stmt
= gimple_build_call (atmp
, 2, x
,
5142 size_int (DECL_ALIGN (var
)));
5143 cfun
->calls_alloca
= 1;
5144 tmp
= create_tmp_var_raw (ptr_type_node
);
5145 gimple_add_tmp_var (tmp
);
5146 gimple_call_set_lhs (stmt
, tmp
);
5148 gimple_seq_add_stmt (ilist
, stmt
);
5150 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5151 gimplify_assign (ptr
, x
, ilist
);
5154 else if (omp_is_reference (var
)
5155 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5156 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5158 /* For references that are being privatized for Fortran,
5159 allocate new backing storage for the new pointer
5160 variable. This allows us to avoid changing all the
5161 code that expects a pointer to something that expects
5162 a direct variable. */
5166 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5167 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5169 x
= build_receiver_ref (var
, false, ctx
);
5170 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5172 else if (TREE_CONSTANT (x
))
5174 /* For reduction in SIMD loop, defer adding the
5175 initialization of the reference, because if we decide
5176 to use SIMD array for it, the initilization could cause
5177 expansion ICE. Ditto for other privatization clauses. */
5182 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5184 gimple_add_tmp_var (x
);
5185 TREE_ADDRESSABLE (x
) = 1;
5186 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5192 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5193 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5194 tree al
= size_int (TYPE_ALIGN (rtype
));
5195 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5200 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5201 gimplify_assign (new_var
, x
, ilist
);
5204 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5206 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5207 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5208 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5216 switch (OMP_CLAUSE_CODE (c
))
5218 case OMP_CLAUSE_SHARED
:
5219 /* Ignore shared directives in teams construct inside
5220 target construct. */
5221 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5222 && !is_host_teams_ctx (ctx
))
5224 /* Shared global vars are just accessed directly. */
5225 if (is_global_var (new_var
))
5227 /* For taskloop firstprivate/lastprivate, represented
5228 as firstprivate and shared clause on the task, new_var
5229 is the firstprivate var. */
5230 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5232 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5233 needs to be delayed until after fixup_child_record_type so
5234 that we get the correct type during the dereference. */
5235 by_ref
= use_pointer_for_field (var
, ctx
);
5236 x
= build_receiver_ref (var
, by_ref
, ctx
);
5237 SET_DECL_VALUE_EXPR (new_var
, x
);
5238 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5240 /* ??? If VAR is not passed by reference, and the variable
5241 hasn't been initialized yet, then we'll get a warning for
5242 the store into the omp_data_s structure. Ideally, we'd be
5243 able to notice this and not store anything at all, but
5244 we're generating code too early. Suppress the warning. */
5246 TREE_NO_WARNING (var
) = 1;
5249 case OMP_CLAUSE__CONDTEMP_
:
5250 if (is_parallel_ctx (ctx
))
5252 x
= build_receiver_ref (var
, false, ctx
);
5253 SET_DECL_VALUE_EXPR (new_var
, x
);
5254 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5256 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5258 x
= build_zero_cst (TREE_TYPE (var
));
5263 case OMP_CLAUSE_LASTPRIVATE
:
5264 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5268 case OMP_CLAUSE_PRIVATE
:
5269 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5270 x
= build_outer_var_ref (var
, ctx
);
5271 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5273 if (is_task_ctx (ctx
))
5274 x
= build_receiver_ref (var
, false, ctx
);
5276 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5284 nx
= unshare_expr (new_var
);
5286 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5287 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5290 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5292 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5295 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5296 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5297 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5298 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5299 || (gimple_omp_for_index (ctx
->stmt
, 0)
5301 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5302 || omp_is_reference (var
))
5303 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5306 if (omp_is_reference (var
))
5308 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5309 tree new_vard
= TREE_OPERAND (new_var
, 0);
5310 gcc_assert (DECL_P (new_vard
));
5311 SET_DECL_VALUE_EXPR (new_vard
,
5312 build_fold_addr_expr (lvar
));
5313 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5318 tree iv
= unshare_expr (ivar
);
5320 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5323 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5327 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5329 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5330 unshare_expr (ivar
), x
);
5334 gimplify_and_add (x
, &llist
[0]);
5335 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5336 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5341 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5342 v
= TREE_OPERAND (v
, 0);
5343 gcc_assert (DECL_P (v
));
5345 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5346 tree t
= create_tmp_var (TREE_TYPE (v
));
5347 tree z
= build_zero_cst (TREE_TYPE (v
));
5349 = build_outer_var_ref (var
, ctx
,
5350 OMP_CLAUSE_LASTPRIVATE
);
5351 gimple_seq_add_stmt (dlist
,
5352 gimple_build_assign (t
, z
));
5353 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5354 tree civar
= DECL_VALUE_EXPR (v
);
5355 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5356 civar
= unshare_expr (civar
);
5357 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5358 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5359 unshare_expr (civar
));
5360 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5361 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5362 orig_v
, unshare_expr (ivar
)));
5363 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5365 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5367 gimple_seq tseq
= NULL
;
5368 gimplify_and_add (x
, &tseq
);
5370 lower_omp (&tseq
, ctx
->outer
);
5371 gimple_seq_add_seq (&llist
[1], tseq
);
5373 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5374 && ctx
->for_simd_scan_phase
)
5376 x
= unshare_expr (ivar
);
5378 = build_outer_var_ref (var
, ctx
,
5379 OMP_CLAUSE_LASTPRIVATE
);
5380 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5382 gimplify_and_add (x
, &llist
[0]);
5386 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5388 gimplify_and_add (y
, &llist
[1]);
5392 if (omp_is_reference (var
))
5394 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5395 tree new_vard
= TREE_OPERAND (new_var
, 0);
5396 gcc_assert (DECL_P (new_vard
));
5397 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5398 x
= TYPE_SIZE_UNIT (type
);
5399 if (TREE_CONSTANT (x
))
5401 x
= create_tmp_var_raw (type
, get_name (var
));
5402 gimple_add_tmp_var (x
);
5403 TREE_ADDRESSABLE (x
) = 1;
5404 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5405 x
= fold_convert_loc (clause_loc
,
5406 TREE_TYPE (new_vard
), x
);
5407 gimplify_assign (new_vard
, x
, ilist
);
5412 gimplify_and_add (nx
, ilist
);
5413 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5415 && ctx
->for_simd_scan_phase
)
5417 tree orig_v
= build_outer_var_ref (var
, ctx
,
5418 OMP_CLAUSE_LASTPRIVATE
);
5419 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5421 gimplify_and_add (x
, ilist
);
5426 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5428 gimplify_and_add (x
, dlist
);
5431 case OMP_CLAUSE_LINEAR
:
5432 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5433 goto do_firstprivate
;
5434 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5437 x
= build_outer_var_ref (var
, ctx
);
5440 case OMP_CLAUSE_FIRSTPRIVATE
:
5441 if (is_task_ctx (ctx
))
5443 if ((omp_is_reference (var
)
5444 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5445 || is_variable_sized (var
))
5447 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5449 || use_pointer_for_field (var
, NULL
))
5451 x
= build_receiver_ref (var
, false, ctx
);
5452 SET_DECL_VALUE_EXPR (new_var
, x
);
5453 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5457 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5458 && omp_is_reference (var
))
5460 x
= build_outer_var_ref (var
, ctx
);
5461 gcc_assert (TREE_CODE (x
) == MEM_REF
5462 && integer_zerop (TREE_OPERAND (x
, 1)));
5463 x
= TREE_OPERAND (x
, 0);
5464 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5465 (c
, unshare_expr (new_var
), x
);
5466 gimplify_and_add (x
, ilist
);
5470 x
= build_outer_var_ref (var
, ctx
);
5473 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5474 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5476 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5477 tree stept
= TREE_TYPE (t
);
5478 tree ct
= omp_find_clause (clauses
,
5479 OMP_CLAUSE__LOOPTEMP_
);
5481 tree l
= OMP_CLAUSE_DECL (ct
);
5482 tree n1
= fd
->loop
.n1
;
5483 tree step
= fd
->loop
.step
;
5484 tree itype
= TREE_TYPE (l
);
5485 if (POINTER_TYPE_P (itype
))
5486 itype
= signed_type_for (itype
);
5487 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5488 if (TYPE_UNSIGNED (itype
)
5489 && fd
->loop
.cond_code
== GT_EXPR
)
5490 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5491 fold_build1 (NEGATE_EXPR
, itype
, l
),
5492 fold_build1 (NEGATE_EXPR
,
5495 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5496 t
= fold_build2 (MULT_EXPR
, stept
,
5497 fold_convert (stept
, l
), t
);
5499 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5501 if (omp_is_reference (var
))
5503 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5504 tree new_vard
= TREE_OPERAND (new_var
, 0);
5505 gcc_assert (DECL_P (new_vard
));
5506 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5507 nx
= TYPE_SIZE_UNIT (type
);
5508 if (TREE_CONSTANT (nx
))
5510 nx
= create_tmp_var_raw (type
,
5512 gimple_add_tmp_var (nx
);
5513 TREE_ADDRESSABLE (nx
) = 1;
5514 nx
= build_fold_addr_expr_loc (clause_loc
,
5516 nx
= fold_convert_loc (clause_loc
,
5517 TREE_TYPE (new_vard
),
5519 gimplify_assign (new_vard
, nx
, ilist
);
5523 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5525 gimplify_and_add (x
, ilist
);
5529 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5530 x
= fold_build2 (POINTER_PLUS_EXPR
,
5531 TREE_TYPE (x
), x
, t
);
5533 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5536 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5537 || TREE_ADDRESSABLE (new_var
)
5538 || omp_is_reference (var
))
5539 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5542 if (omp_is_reference (var
))
5544 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5545 tree new_vard
= TREE_OPERAND (new_var
, 0);
5546 gcc_assert (DECL_P (new_vard
));
5547 SET_DECL_VALUE_EXPR (new_vard
,
5548 build_fold_addr_expr (lvar
));
5549 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5551 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5553 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5554 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5555 gimplify_and_add (x
, ilist
);
5556 gimple_stmt_iterator gsi
5557 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5559 = gimple_build_assign (unshare_expr (lvar
), iv
);
5560 gsi_insert_before_without_update (&gsi
, g
,
5562 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5563 enum tree_code code
= PLUS_EXPR
;
5564 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5565 code
= POINTER_PLUS_EXPR
;
5566 g
= gimple_build_assign (iv
, code
, iv
, t
);
5567 gsi_insert_before_without_update (&gsi
, g
,
5571 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5572 (c
, unshare_expr (ivar
), x
);
5573 gimplify_and_add (x
, &llist
[0]);
5574 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5576 gimplify_and_add (x
, &llist
[1]);
5579 if (omp_is_reference (var
))
5581 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5582 tree new_vard
= TREE_OPERAND (new_var
, 0);
5583 gcc_assert (DECL_P (new_vard
));
5584 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5585 nx
= TYPE_SIZE_UNIT (type
);
5586 if (TREE_CONSTANT (nx
))
5588 nx
= create_tmp_var_raw (type
, get_name (var
));
5589 gimple_add_tmp_var (nx
);
5590 TREE_ADDRESSABLE (nx
) = 1;
5591 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5592 nx
= fold_convert_loc (clause_loc
,
5593 TREE_TYPE (new_vard
), nx
);
5594 gimplify_assign (new_vard
, nx
, ilist
);
5598 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5599 (c
, unshare_expr (new_var
), x
);
5600 gimplify_and_add (x
, ilist
);
5603 case OMP_CLAUSE__LOOPTEMP_
:
5604 case OMP_CLAUSE__REDUCTEMP_
:
5605 gcc_assert (is_taskreg_ctx (ctx
));
5606 x
= build_outer_var_ref (var
, ctx
);
5607 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5608 gimplify_and_add (x
, ilist
);
5611 case OMP_CLAUSE_COPYIN
:
5612 by_ref
= use_pointer_for_field (var
, NULL
);
5613 x
= build_receiver_ref (var
, by_ref
, ctx
);
5614 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5615 append_to_statement_list (x
, ©in_seq
);
5616 copyin_by_ref
|= by_ref
;
5619 case OMP_CLAUSE_REDUCTION
:
5620 case OMP_CLAUSE_IN_REDUCTION
:
5621 /* OpenACC reductions are initialized using the
5622 GOACC_REDUCTION internal function. */
5623 if (is_gimple_omp_oacc (ctx
->stmt
))
5625 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5627 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5629 tree ptype
= TREE_TYPE (placeholder
);
5632 x
= error_mark_node
;
5633 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5634 && !task_reduction_needs_orig_p
)
5636 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5638 tree pptype
= build_pointer_type (ptype
);
5639 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5640 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5641 size_int (task_reduction_cnt_full
5642 + task_reduction_cntorig
- 1),
5643 NULL_TREE
, NULL_TREE
);
5647 = *ctx
->task_reduction_map
->get (c
);
5648 x
= task_reduction_read (ilist
, tskred_temp
,
5649 pptype
, 7 + 3 * idx
);
5651 x
= fold_convert (pptype
, x
);
5652 x
= build_simple_mem_ref (x
);
5657 x
= build_outer_var_ref (var
, ctx
);
5659 if (omp_is_reference (var
)
5660 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5661 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5663 SET_DECL_VALUE_EXPR (placeholder
, x
);
5664 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5665 tree new_vard
= new_var
;
5666 if (omp_is_reference (var
))
5668 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5669 new_vard
= TREE_OPERAND (new_var
, 0);
5670 gcc_assert (DECL_P (new_vard
));
5672 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5674 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5675 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5678 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5682 if (new_vard
== new_var
)
5684 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5685 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5689 SET_DECL_VALUE_EXPR (new_vard
,
5690 build_fold_addr_expr (ivar
));
5691 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5693 x
= lang_hooks
.decls
.omp_clause_default_ctor
5694 (c
, unshare_expr (ivar
),
5695 build_outer_var_ref (var
, ctx
));
5696 if (rvarp
&& ctx
->for_simd_scan_phase
)
5699 gimplify_and_add (x
, &llist
[0]);
5700 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5702 gimplify_and_add (x
, &llist
[1]);
5709 gimplify_and_add (x
, &llist
[0]);
5711 tree ivar2
= unshare_expr (lvar
);
5712 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5713 x
= lang_hooks
.decls
.omp_clause_default_ctor
5714 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5715 gimplify_and_add (x
, &llist
[0]);
5719 x
= lang_hooks
.decls
.omp_clause_default_ctor
5720 (c
, unshare_expr (rvar2
),
5721 build_outer_var_ref (var
, ctx
));
5722 gimplify_and_add (x
, &llist
[0]);
5725 /* For types that need construction, add another
5726 private var which will be default constructed
5727 and optionally initialized with
5728 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5729 loop we want to assign this value instead of
5730 constructing and destructing it in each
5732 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5733 gimple_add_tmp_var (nv
);
5734 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5738 x
= lang_hooks
.decls
.omp_clause_default_ctor
5739 (c
, nv
, build_outer_var_ref (var
, ctx
));
5740 gimplify_and_add (x
, ilist
);
5742 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5744 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5745 x
= DECL_VALUE_EXPR (new_vard
);
5747 if (new_vard
!= new_var
)
5748 vexpr
= build_fold_addr_expr (nv
);
5749 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5750 lower_omp (&tseq
, ctx
);
5751 SET_DECL_VALUE_EXPR (new_vard
, x
);
5752 gimple_seq_add_seq (ilist
, tseq
);
5753 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5756 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5758 gimplify_and_add (x
, dlist
);
5761 tree ref
= build_outer_var_ref (var
, ctx
);
5762 x
= unshare_expr (ivar
);
5763 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5765 gimplify_and_add (x
, &llist
[0]);
5767 ref
= build_outer_var_ref (var
, ctx
);
5768 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5770 gimplify_and_add (x
, &llist
[3]);
5772 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5773 if (new_vard
== new_var
)
5774 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5776 SET_DECL_VALUE_EXPR (new_vard
,
5777 build_fold_addr_expr (lvar
));
5779 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5781 gimplify_and_add (x
, &llist
[1]);
5783 tree ivar2
= unshare_expr (lvar
);
5784 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5785 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5787 gimplify_and_add (x
, &llist
[1]);
5791 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5793 gimplify_and_add (x
, &llist
[1]);
5798 gimplify_and_add (x
, &llist
[0]);
5799 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5801 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5802 lower_omp (&tseq
, ctx
);
5803 gimple_seq_add_seq (&llist
[0], tseq
);
5805 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5806 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5807 lower_omp (&tseq
, ctx
);
5808 gimple_seq_add_seq (&llist
[1], tseq
);
5809 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5810 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5811 if (new_vard
== new_var
)
5812 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5814 SET_DECL_VALUE_EXPR (new_vard
,
5815 build_fold_addr_expr (lvar
));
5816 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5818 gimplify_and_add (x
, &llist
[1]);
5821 /* If this is a reference to constant size reduction var
5822 with placeholder, we haven't emitted the initializer
5823 for it because it is undesirable if SIMD arrays are used.
5824 But if they aren't used, we need to emit the deferred
5825 initialization now. */
5826 else if (omp_is_reference (var
) && is_simd
)
5827 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5829 tree lab2
= NULL_TREE
;
5833 if (!is_parallel_ctx (ctx
))
5835 tree condv
= create_tmp_var (boolean_type_node
);
5836 tree m
= build_simple_mem_ref (cond
);
5837 g
= gimple_build_assign (condv
, m
);
5838 gimple_seq_add_stmt (ilist
, g
);
5840 = create_artificial_label (UNKNOWN_LOCATION
);
5841 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5842 g
= gimple_build_cond (NE_EXPR
, condv
,
5845 gimple_seq_add_stmt (ilist
, g
);
5846 gimple_seq_add_stmt (ilist
,
5847 gimple_build_label (lab1
));
5849 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5851 gimple_seq_add_stmt (ilist
, g
);
5853 x
= lang_hooks
.decls
.omp_clause_default_ctor
5854 (c
, unshare_expr (new_var
),
5856 : build_outer_var_ref (var
, ctx
));
5858 gimplify_and_add (x
, ilist
);
5860 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5861 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5863 if (ctx
->for_simd_scan_phase
)
5866 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5868 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5869 gimple_add_tmp_var (nv
);
5870 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5871 x
= lang_hooks
.decls
.omp_clause_default_ctor
5872 (c
, nv
, build_outer_var_ref (var
, ctx
));
5874 gimplify_and_add (x
, ilist
);
5875 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5877 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5879 if (new_vard
!= new_var
)
5880 vexpr
= build_fold_addr_expr (nv
);
5881 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5882 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5883 lower_omp (&tseq
, ctx
);
5884 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5885 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5886 gimple_seq_add_seq (ilist
, tseq
);
5888 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5889 if (is_simd
&& ctx
->scan_exclusive
)
5892 = create_tmp_var_raw (TREE_TYPE (new_var
));
5893 gimple_add_tmp_var (nv2
);
5894 ctx
->cb
.decl_map
->put (nv
, nv2
);
5895 x
= lang_hooks
.decls
.omp_clause_default_ctor
5896 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5897 gimplify_and_add (x
, ilist
);
5898 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5900 gimplify_and_add (x
, dlist
);
5902 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5904 gimplify_and_add (x
, dlist
);
5907 && ctx
->scan_exclusive
5908 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5910 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5911 gimple_add_tmp_var (nv2
);
5912 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5913 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5915 gimplify_and_add (x
, dlist
);
5917 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5921 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5923 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5924 lower_omp (&tseq
, ctx
);
5925 gimple_seq_add_seq (ilist
, tseq
);
5927 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5930 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5931 lower_omp (&tseq
, ctx
);
5932 gimple_seq_add_seq (dlist
, tseq
);
5933 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5935 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5939 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5946 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5947 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5948 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5953 tree lab2
= NULL_TREE
;
5954 /* GOMP_taskgroup_reduction_register memsets the whole
5955 array to zero. If the initializer is zero, we don't
5956 need to initialize it again, just mark it as ever
5957 used unconditionally, i.e. cond = true. */
5958 if (initializer_zerop (x
))
5960 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5962 gimple_seq_add_stmt (ilist
, g
);
5967 if (!cond) { cond = true; new_var = x; } */
5968 if (!is_parallel_ctx (ctx
))
5970 tree condv
= create_tmp_var (boolean_type_node
);
5971 tree m
= build_simple_mem_ref (cond
);
5972 g
= gimple_build_assign (condv
, m
);
5973 gimple_seq_add_stmt (ilist
, g
);
5975 = create_artificial_label (UNKNOWN_LOCATION
);
5976 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5977 g
= gimple_build_cond (NE_EXPR
, condv
,
5980 gimple_seq_add_stmt (ilist
, g
);
5981 gimple_seq_add_stmt (ilist
,
5982 gimple_build_label (lab1
));
5984 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5986 gimple_seq_add_stmt (ilist
, g
);
5987 gimplify_assign (new_var
, x
, ilist
);
5989 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5993 /* reduction(-:var) sums up the partial results, so it
5994 acts identically to reduction(+:var). */
5995 if (code
== MINUS_EXPR
)
5998 tree new_vard
= new_var
;
5999 if (is_simd
&& omp_is_reference (var
))
6001 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6002 new_vard
= TREE_OPERAND (new_var
, 0);
6003 gcc_assert (DECL_P (new_vard
));
6005 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6007 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6008 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6011 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6015 if (new_vard
!= new_var
)
6017 SET_DECL_VALUE_EXPR (new_vard
,
6018 build_fold_addr_expr (lvar
));
6019 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6022 tree ref
= build_outer_var_ref (var
, ctx
);
6026 if (ctx
->for_simd_scan_phase
)
6028 gimplify_assign (ivar
, ref
, &llist
[0]);
6029 ref
= build_outer_var_ref (var
, ctx
);
6030 gimplify_assign (ref
, rvar
, &llist
[3]);
6034 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6039 simt_lane
= create_tmp_var (unsigned_type_node
);
6040 x
= build_call_expr_internal_loc
6041 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6042 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6043 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6044 gimplify_assign (ivar
, x
, &llist
[2]);
6046 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
6047 ref
= build_outer_var_ref (var
, ctx
);
6048 gimplify_assign (ref
, x
, &llist
[1]);
6053 if (omp_is_reference (var
) && is_simd
)
6054 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6055 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6056 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6058 gimplify_assign (new_var
, x
, ilist
);
6061 tree ref
= build_outer_var_ref (var
, ctx
);
6063 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6064 ref
= build_outer_var_ref (var
, ctx
);
6065 gimplify_assign (ref
, x
, dlist
);
6078 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6079 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6082 if (known_eq (sctx
.max_vf
, 1U))
6084 sctx
.is_simt
= false;
6085 if (ctx
->lastprivate_conditional_map
)
6087 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6089 /* Signal to lower_omp_1 that it should use parent context. */
6090 ctx
->combined_into_simd_safelen1
= true;
6091 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6093 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6095 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6096 omp_context
*outer
= ctx
->outer
;
6097 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6098 outer
= outer
->outer
;
6099 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6100 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6101 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6107 /* When not vectorized, treat lastprivate(conditional:) like
6108 normal lastprivate, as there will be just one simd lane
6109 writing the privatized variable. */
6110 delete ctx
->lastprivate_conditional_map
;
6111 ctx
->lastprivate_conditional_map
= NULL
;
6116 if (nonconst_simd_if
)
6118 if (sctx
.lane
== NULL_TREE
)
6120 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6121 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6123 /* FIXME: For now. */
6124 sctx
.is_simt
= false;
6127 if (sctx
.lane
|| sctx
.is_simt
)
6129 uid
= create_tmp_var (ptr_type_node
, "simduid");
6130 /* Don't want uninit warnings on simduid, it is always uninitialized,
6131 but we use it not for the value, but for the DECL_UID only. */
6132 TREE_NO_WARNING (uid
) = 1;
6133 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6134 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6135 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6136 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6138 /* Emit calls denoting privatized variables and initializing a pointer to
6139 structure that holds private variables as fields after ompdevlow pass. */
6142 sctx
.simt_eargs
[0] = uid
;
6144 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6145 gimple_call_set_lhs (g
, uid
);
6146 gimple_seq_add_stmt (ilist
, g
);
6147 sctx
.simt_eargs
.release ();
6149 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6150 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6151 gimple_call_set_lhs (g
, simtrec
);
6152 gimple_seq_add_stmt (ilist
, g
);
6156 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6157 2 + (nonconst_simd_if
!= NULL
),
6158 uid
, integer_zero_node
,
6160 gimple_call_set_lhs (g
, sctx
.lane
);
6161 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6162 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6163 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6164 build_int_cst (unsigned_type_node
, 0));
6165 gimple_seq_add_stmt (ilist
, g
);
6168 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6170 gimple_call_set_lhs (g
, sctx
.lastlane
);
6171 gimple_seq_add_stmt (dlist
, g
);
6172 gimple_seq_add_seq (dlist
, llist
[3]);
6174 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6177 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6178 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6179 gimple_call_set_lhs (g
, simt_vf
);
6180 gimple_seq_add_stmt (dlist
, g
);
6182 tree t
= build_int_cst (unsigned_type_node
, 1);
6183 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6184 gimple_seq_add_stmt (dlist
, g
);
6186 t
= build_int_cst (unsigned_type_node
, 0);
6187 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6188 gimple_seq_add_stmt (dlist
, g
);
6190 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6191 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6192 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6193 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6194 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6196 gimple_seq_add_seq (dlist
, llist
[2]);
6198 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6199 gimple_seq_add_stmt (dlist
, g
);
6201 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6202 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6203 gimple_seq_add_stmt (dlist
, g
);
6205 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6207 for (int i
= 0; i
< 2; i
++)
6210 tree vf
= create_tmp_var (unsigned_type_node
);
6211 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6212 gimple_call_set_lhs (g
, vf
);
6213 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6214 gimple_seq_add_stmt (seq
, g
);
6215 tree t
= build_int_cst (unsigned_type_node
, 0);
6216 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6217 gimple_seq_add_stmt (seq
, g
);
6218 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6219 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6220 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6221 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6222 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6223 gimple_seq_add_seq (seq
, llist
[i
]);
6224 t
= build_int_cst (unsigned_type_node
, 1);
6225 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6226 gimple_seq_add_stmt (seq
, g
);
6227 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6228 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6229 gimple_seq_add_stmt (seq
, g
);
6230 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6235 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6237 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6238 gimple_seq_add_stmt (dlist
, g
);
6241 /* The copyin sequence is not to be executed by the main thread, since
6242 that would result in self-copies. Perhaps not visible to scalars,
6243 but it certainly is to C++ operator=. */
6246 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6248 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6249 build_int_cst (TREE_TYPE (x
), 0));
6250 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6251 gimplify_and_add (x
, ilist
);
6254 /* If any copyin variable is passed by reference, we must ensure the
6255 master thread doesn't modify it before it is copied over in all
6256 threads. Similarly for variables in both firstprivate and
6257 lastprivate clauses we need to ensure the lastprivate copying
6258 happens after firstprivate copying in all threads. And similarly
6259 for UDRs if initializer expression refers to omp_orig. */
6260 if (copyin_by_ref
|| lastprivate_firstprivate
6261 || (reduction_omp_orig_ref
6262 && !ctx
->scan_inclusive
6263 && !ctx
->scan_exclusive
))
6265 /* Don't add any barrier for #pragma omp simd or
6266 #pragma omp distribute. */
6267 if (!is_task_ctx (ctx
)
6268 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6269 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6270 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6273 /* If max_vf is non-zero, then we can use only a vectorization factor
6274 up to the max_vf we chose. So stick it into the safelen clause. */
6275 if (maybe_ne (sctx
.max_vf
, 0U))
6277 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6278 OMP_CLAUSE_SAFELEN
);
6279 poly_uint64 safe_len
;
6281 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6282 && maybe_gt (safe_len
, sctx
.max_vf
)))
6284 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6285 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6287 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6288 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6293 /* Create temporary variables for lastprivate(conditional:) implementation
6294 in context CTX with CLAUSES. */
6297 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6299 tree iter_type
= NULL_TREE
;
6300 tree cond_ptr
= NULL_TREE
;
6301 tree iter_var
= NULL_TREE
;
6302 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6303 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6304 tree next
= *clauses
;
6305 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6306 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6307 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6311 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6313 if (iter_type
== NULL_TREE
)
6315 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6316 iter_var
= create_tmp_var_raw (iter_type
);
6317 DECL_CONTEXT (iter_var
) = current_function_decl
;
6318 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6319 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6320 ctx
->block_vars
= iter_var
;
6322 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6323 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6324 OMP_CLAUSE_DECL (c3
) = iter_var
;
6325 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6327 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6329 next
= OMP_CLAUSE_CHAIN (cc
);
6330 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6331 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6332 ctx
->lastprivate_conditional_map
->put (o
, v
);
6335 if (iter_type
== NULL
)
6337 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6339 struct omp_for_data fd
;
6340 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6342 iter_type
= unsigned_type_for (fd
.iter_type
);
6344 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6345 iter_type
= unsigned_type_node
;
6346 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6350 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6351 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6355 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6356 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6357 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6358 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6359 ctx
->block_vars
= cond_ptr
;
6360 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6361 OMP_CLAUSE__CONDTEMP_
);
6362 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6363 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6366 iter_var
= create_tmp_var_raw (iter_type
);
6367 DECL_CONTEXT (iter_var
) = current_function_decl
;
6368 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6369 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6370 ctx
->block_vars
= iter_var
;
6372 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6373 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6374 OMP_CLAUSE_DECL (c3
) = iter_var
;
6375 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6376 OMP_CLAUSE_CHAIN (c2
) = c3
;
6377 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6379 tree v
= create_tmp_var_raw (iter_type
);
6380 DECL_CONTEXT (v
) = current_function_decl
;
6381 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6382 DECL_CHAIN (v
) = ctx
->block_vars
;
6383 ctx
->block_vars
= v
;
6384 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6385 ctx
->lastprivate_conditional_map
->put (o
, v
);
6390 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6391 both parallel and workshare constructs. PREDICATE may be NULL if it's
6392 always true. BODY_P is the sequence to insert early initialization
6393 if needed, STMT_LIST is where the non-conditional lastprivate handling
6394 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6398 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6399 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6402 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6403 bool par_clauses
= false;
6404 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6405 unsigned HOST_WIDE_INT conditional_off
= 0;
6406 gimple_seq post_stmt_list
= NULL
;
6408 /* Early exit if there are no lastprivate or linear clauses. */
6409 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6410 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6411 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6412 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6414 if (clauses
== NULL
)
6416 /* If this was a workshare clause, see if it had been combined
6417 with its parallel. In that case, look for the clauses on the
6418 parallel statement itself. */
6419 if (is_parallel_ctx (ctx
))
6423 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6426 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6427 OMP_CLAUSE_LASTPRIVATE
);
6428 if (clauses
== NULL
)
6433 bool maybe_simt
= false;
6434 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6435 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6437 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6438 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6440 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6446 tree label_true
, arm1
, arm2
;
6447 enum tree_code pred_code
= TREE_CODE (predicate
);
6449 label
= create_artificial_label (UNKNOWN_LOCATION
);
6450 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6451 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6453 arm1
= TREE_OPERAND (predicate
, 0);
6454 arm2
= TREE_OPERAND (predicate
, 1);
6455 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6456 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6461 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6462 arm2
= boolean_false_node
;
6463 pred_code
= NE_EXPR
;
6467 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6468 c
= fold_convert (integer_type_node
, c
);
6469 simtcond
= create_tmp_var (integer_type_node
);
6470 gimplify_assign (simtcond
, c
, stmt_list
);
6471 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6473 c
= create_tmp_var (integer_type_node
);
6474 gimple_call_set_lhs (g
, c
);
6475 gimple_seq_add_stmt (stmt_list
, g
);
6476 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6480 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6481 gimple_seq_add_stmt (stmt_list
, stmt
);
6482 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6485 tree cond_ptr
= NULL_TREE
;
6486 for (c
= clauses
; c
;)
6489 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6490 gimple_seq
*this_stmt_list
= stmt_list
;
6491 tree lab2
= NULL_TREE
;
6493 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6494 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6495 && ctx
->lastprivate_conditional_map
6496 && !ctx
->combined_into_simd_safelen1
)
6498 gcc_assert (body_p
);
6501 if (cond_ptr
== NULL_TREE
)
6503 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6504 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6506 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6507 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6508 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6509 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6510 this_stmt_list
= cstmt_list
;
6512 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6514 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6515 build_int_cst (TREE_TYPE (cond_ptr
),
6517 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6520 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6521 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6522 tree mem2
= copy_node (mem
);
6523 gimple_seq seq
= NULL
;
6524 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6525 gimple_seq_add_seq (this_stmt_list
, seq
);
6526 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6527 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6528 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6529 gimple_seq_add_stmt (this_stmt_list
, g
);
6530 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6531 gimplify_assign (mem2
, v
, this_stmt_list
);
6534 && ctx
->combined_into_simd_safelen1
6535 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6536 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6537 && ctx
->lastprivate_conditional_map
)
6538 this_stmt_list
= &post_stmt_list
;
6540 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6541 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6542 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6544 var
= OMP_CLAUSE_DECL (c
);
6545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6546 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6547 && is_taskloop_ctx (ctx
))
6549 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6550 new_var
= lookup_decl (var
, ctx
->outer
);
6554 new_var
= lookup_decl (var
, ctx
);
6555 /* Avoid uninitialized warnings for lastprivate and
6556 for linear iterators. */
6558 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6559 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6560 TREE_NO_WARNING (new_var
) = 1;
6563 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6565 tree val
= DECL_VALUE_EXPR (new_var
);
6566 if (TREE_CODE (val
) == ARRAY_REF
6567 && VAR_P (TREE_OPERAND (val
, 0))
6568 && lookup_attribute ("omp simd array",
6569 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6572 if (lastlane
== NULL
)
6574 lastlane
= create_tmp_var (unsigned_type_node
);
6576 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6578 TREE_OPERAND (val
, 1));
6579 gimple_call_set_lhs (g
, lastlane
);
6580 gimple_seq_add_stmt (this_stmt_list
, g
);
6582 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6583 TREE_OPERAND (val
, 0), lastlane
,
6584 NULL_TREE
, NULL_TREE
);
6585 TREE_THIS_NOTRAP (new_var
) = 1;
6588 else if (maybe_simt
)
6590 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6591 ? DECL_VALUE_EXPR (new_var
)
6593 if (simtlast
== NULL
)
6595 simtlast
= create_tmp_var (unsigned_type_node
);
6596 gcall
*g
= gimple_build_call_internal
6597 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6598 gimple_call_set_lhs (g
, simtlast
);
6599 gimple_seq_add_stmt (this_stmt_list
, g
);
6601 x
= build_call_expr_internal_loc
6602 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6603 TREE_TYPE (val
), 2, val
, simtlast
);
6604 new_var
= unshare_expr (new_var
);
6605 gimplify_assign (new_var
, x
, this_stmt_list
);
6606 new_var
= unshare_expr (new_var
);
6609 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6610 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6612 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6613 gimple_seq_add_seq (this_stmt_list
,
6614 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6615 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6617 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6618 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6620 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6621 gimple_seq_add_seq (this_stmt_list
,
6622 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6623 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6627 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6628 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
6629 && is_taskloop_ctx (ctx
))
6631 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6633 if (is_global_var (ovar
))
6637 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6638 if (omp_is_reference (var
))
6639 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6640 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6641 gimplify_and_add (x
, this_stmt_list
);
6644 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6648 c
= OMP_CLAUSE_CHAIN (c
);
6649 if (c
== NULL
&& !par_clauses
)
6651 /* If this was a workshare clause, see if it had been combined
6652 with its parallel. In that case, continue looking for the
6653 clauses also on the parallel statement itself. */
6654 if (is_parallel_ctx (ctx
))
6658 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6661 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6662 OMP_CLAUSE_LASTPRIVATE
);
6668 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6669 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6672 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6673 (which might be a placeholder). INNER is true if this is an inner
6674 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6675 join markers. Generate the before-loop forking sequence in
6676 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6677 general form of these sequences is
6679 GOACC_REDUCTION_SETUP
6681 GOACC_REDUCTION_INIT
6683 GOACC_REDUCTION_FINI
6685 GOACC_REDUCTION_TEARDOWN. */
6688 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6689 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6690 gimple_seq
*join_seq
, omp_context
*ctx
)
6692 gimple_seq before_fork
= NULL
;
6693 gimple_seq after_fork
= NULL
;
6694 gimple_seq before_join
= NULL
;
6695 gimple_seq after_join
= NULL
;
6696 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6697 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6698 unsigned offset
= 0;
6700 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6701 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6703 tree orig
= OMP_CLAUSE_DECL (c
);
6704 tree var
= maybe_lookup_decl (orig
, ctx
);
6705 tree ref_to_res
= NULL_TREE
;
6706 tree incoming
, outgoing
, v1
, v2
, v3
;
6707 bool is_private
= false;
6709 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6710 if (rcode
== MINUS_EXPR
)
6712 else if (rcode
== TRUTH_ANDIF_EXPR
)
6713 rcode
= BIT_AND_EXPR
;
6714 else if (rcode
== TRUTH_ORIF_EXPR
)
6715 rcode
= BIT_IOR_EXPR
;
6716 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6721 incoming
= outgoing
= var
;
6725 /* See if an outer construct also reduces this variable. */
6726 omp_context
*outer
= ctx
;
6728 while (omp_context
*probe
= outer
->outer
)
6730 enum gimple_code type
= gimple_code (probe
->stmt
);
6735 case GIMPLE_OMP_FOR
:
6736 cls
= gimple_omp_for_clauses (probe
->stmt
);
6739 case GIMPLE_OMP_TARGET
:
6740 if ((gimple_omp_target_kind (probe
->stmt
)
6741 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6742 && (gimple_omp_target_kind (probe
->stmt
)
6743 != GF_OMP_TARGET_KIND_OACC_SERIAL
))
6746 cls
= gimple_omp_target_clauses (probe
->stmt
);
6754 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6755 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6756 && orig
== OMP_CLAUSE_DECL (cls
))
6758 incoming
= outgoing
= lookup_decl (orig
, probe
);
6759 goto has_outer_reduction
;
6761 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6762 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6763 && orig
== OMP_CLAUSE_DECL (cls
))
6771 /* This is the outermost construct with this reduction,
6772 see if there's a mapping for it. */
6773 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6774 && maybe_lookup_field (orig
, outer
) && !is_private
)
6776 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6777 if (omp_is_reference (orig
))
6778 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6780 tree type
= TREE_TYPE (var
);
6781 if (POINTER_TYPE_P (type
))
6782 type
= TREE_TYPE (type
);
6785 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6789 /* Try to look at enclosing contexts for reduction var,
6790 use original if no mapping found. */
6792 omp_context
*c
= ctx
->outer
;
6795 t
= maybe_lookup_decl (orig
, c
);
6798 incoming
= outgoing
= (t
? t
: orig
);
6801 has_outer_reduction
:;
6805 ref_to_res
= integer_zero_node
;
6807 if (omp_is_reference (orig
))
6809 tree type
= TREE_TYPE (var
);
6810 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6814 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6815 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6818 v1
= create_tmp_var (type
, id
);
6819 v2
= create_tmp_var (type
, id
);
6820 v3
= create_tmp_var (type
, id
);
6822 gimplify_assign (v1
, var
, fork_seq
);
6823 gimplify_assign (v2
, var
, fork_seq
);
6824 gimplify_assign (v3
, var
, fork_seq
);
6826 var
= build_simple_mem_ref (var
);
6827 v1
= build_simple_mem_ref (v1
);
6828 v2
= build_simple_mem_ref (v2
);
6829 v3
= build_simple_mem_ref (v3
);
6830 outgoing
= build_simple_mem_ref (outgoing
);
6832 if (!TREE_CONSTANT (incoming
))
6833 incoming
= build_simple_mem_ref (incoming
);
6838 /* Determine position in reduction buffer, which may be used
6839 by target. The parser has ensured that this is not a
6840 variable-sized type. */
6841 fixed_size_mode mode
6842 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6843 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6844 offset
= (offset
+ align
- 1) & ~(align
- 1);
6845 tree off
= build_int_cst (sizetype
, offset
);
6846 offset
+= GET_MODE_SIZE (mode
);
6850 init_code
= build_int_cst (integer_type_node
,
6851 IFN_GOACC_REDUCTION_INIT
);
6852 fini_code
= build_int_cst (integer_type_node
,
6853 IFN_GOACC_REDUCTION_FINI
);
6854 setup_code
= build_int_cst (integer_type_node
,
6855 IFN_GOACC_REDUCTION_SETUP
);
6856 teardown_code
= build_int_cst (integer_type_node
,
6857 IFN_GOACC_REDUCTION_TEARDOWN
);
6861 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6862 TREE_TYPE (var
), 6, setup_code
,
6863 unshare_expr (ref_to_res
),
6864 incoming
, level
, op
, off
);
6866 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6867 TREE_TYPE (var
), 6, init_code
,
6868 unshare_expr (ref_to_res
),
6869 v1
, level
, op
, off
);
6871 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6872 TREE_TYPE (var
), 6, fini_code
,
6873 unshare_expr (ref_to_res
),
6874 v2
, level
, op
, off
);
6876 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6877 TREE_TYPE (var
), 6, teardown_code
,
6878 ref_to_res
, v3
, level
, op
, off
);
6880 gimplify_assign (v1
, setup_call
, &before_fork
);
6881 gimplify_assign (v2
, init_call
, &after_fork
);
6882 gimplify_assign (v3
, fini_call
, &before_join
);
6883 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6886 /* Now stitch things together. */
6887 gimple_seq_add_seq (fork_seq
, before_fork
);
6889 gimple_seq_add_stmt (fork_seq
, fork
);
6890 gimple_seq_add_seq (fork_seq
, after_fork
);
6892 gimple_seq_add_seq (join_seq
, before_join
);
6894 gimple_seq_add_stmt (join_seq
, join
);
6895 gimple_seq_add_seq (join_seq
, after_join
);
6898 /* Generate code to implement the REDUCTION clauses, append it
6899 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6900 that should be emitted also inside of the critical section,
6901 in that case clear *CLIST afterwards, otherwise leave it as is
6902 and let the caller emit it itself. */
6905 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6906 gimple_seq
*clist
, omp_context
*ctx
)
6908 gimple_seq sub_seq
= NULL
;
6913 /* OpenACC loop reductions are handled elsewhere. */
6914 if (is_gimple_omp_oacc (ctx
->stmt
))
6917 /* SIMD reductions are handled in lower_rec_input_clauses. */
6918 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6919 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6922 /* inscan reductions are handled elsewhere. */
6923 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6926 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6927 update in that case, otherwise use a lock. */
6928 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6929 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6930 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6932 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6933 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6935 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6945 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6947 tree var
, ref
, new_var
, orig_var
;
6948 enum tree_code code
;
6949 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6951 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6952 || OMP_CLAUSE_REDUCTION_TASK (c
))
6955 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6956 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6957 if (TREE_CODE (var
) == MEM_REF
)
6959 var
= TREE_OPERAND (var
, 0);
6960 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6961 var
= TREE_OPERAND (var
, 0);
6962 if (TREE_CODE (var
) == ADDR_EXPR
)
6963 var
= TREE_OPERAND (var
, 0);
6966 /* If this is a pointer or referenced based array
6967 section, the var could be private in the outer
6968 context e.g. on orphaned loop construct. Pretend this
6969 is private variable's outer reference. */
6970 ccode
= OMP_CLAUSE_PRIVATE
;
6971 if (TREE_CODE (var
) == INDIRECT_REF
)
6972 var
= TREE_OPERAND (var
, 0);
6975 if (is_variable_sized (var
))
6977 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6978 var
= DECL_VALUE_EXPR (var
);
6979 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6980 var
= TREE_OPERAND (var
, 0);
6981 gcc_assert (DECL_P (var
));
6984 new_var
= lookup_decl (var
, ctx
);
6985 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6986 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6987 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6988 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6990 /* reduction(-:var) sums up the partial results, so it acts
6991 identically to reduction(+:var). */
6992 if (code
== MINUS_EXPR
)
6997 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6999 addr
= save_expr (addr
);
7000 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7001 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
7002 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7003 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7004 gimplify_and_add (x
, stmt_seqp
);
7007 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7009 tree d
= OMP_CLAUSE_DECL (c
);
7010 tree type
= TREE_TYPE (d
);
7011 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7012 tree i
= create_tmp_var (TREE_TYPE (v
));
7013 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7014 tree bias
= TREE_OPERAND (d
, 1);
7015 d
= TREE_OPERAND (d
, 0);
7016 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7018 tree b
= TREE_OPERAND (d
, 1);
7019 b
= maybe_lookup_decl (b
, ctx
);
7022 b
= TREE_OPERAND (d
, 1);
7023 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7025 if (integer_zerop (bias
))
7029 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7030 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7031 TREE_TYPE (b
), b
, bias
);
7033 d
= TREE_OPERAND (d
, 0);
7035 /* For ref build_outer_var_ref already performs this, so
7036 only new_var needs a dereference. */
7037 if (TREE_CODE (d
) == INDIRECT_REF
)
7039 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7040 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
7042 else if (TREE_CODE (d
) == ADDR_EXPR
)
7044 if (orig_var
== var
)
7046 new_var
= build_fold_addr_expr (new_var
);
7047 ref
= build_fold_addr_expr (ref
);
7052 gcc_assert (orig_var
== var
);
7053 if (omp_is_reference (var
))
7054 ref
= build_fold_addr_expr (ref
);
7058 tree t
= maybe_lookup_decl (v
, ctx
);
7062 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7063 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7065 if (!integer_zerop (bias
))
7067 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7068 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7069 TREE_TYPE (new_var
), new_var
,
7070 unshare_expr (bias
));
7071 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7072 TREE_TYPE (ref
), ref
, bias
);
7074 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7075 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7076 tree m
= create_tmp_var (ptype
);
7077 gimplify_assign (m
, new_var
, stmt_seqp
);
7079 m
= create_tmp_var (ptype
);
7080 gimplify_assign (m
, ref
, stmt_seqp
);
7082 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7083 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7084 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7085 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7086 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7087 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7088 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7090 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7091 tree decl_placeholder
7092 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7093 SET_DECL_VALUE_EXPR (placeholder
, out
);
7094 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7095 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7096 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7097 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7098 gimple_seq_add_seq (&sub_seq
,
7099 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7100 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7101 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7102 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7106 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
7107 out
= unshare_expr (out
);
7108 gimplify_assign (out
, x
, &sub_seq
);
7110 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7111 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7112 gimple_seq_add_stmt (&sub_seq
, g
);
7113 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7114 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7115 gimple_seq_add_stmt (&sub_seq
, g
);
7116 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7117 build_int_cst (TREE_TYPE (i
), 1));
7118 gimple_seq_add_stmt (&sub_seq
, g
);
7119 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7120 gimple_seq_add_stmt (&sub_seq
, g
);
7121 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7123 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7125 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7127 if (omp_is_reference (var
)
7128 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7130 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7131 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7132 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7133 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7134 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7135 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7136 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7140 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
7141 ref
= build_outer_var_ref (var
, ctx
);
7142 gimplify_assign (ref
, x
, &sub_seq
);
7146 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7148 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7150 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7154 gimple_seq_add_seq (stmt_seqp
, *clist
);
7158 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7160 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7164 /* Generate code to implement the COPYPRIVATE clauses. */
7167 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7172 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7174 tree var
, new_var
, ref
, x
;
7176 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7178 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7181 var
= OMP_CLAUSE_DECL (c
);
7182 by_ref
= use_pointer_for_field (var
, NULL
);
7184 ref
= build_sender_ref (var
, ctx
);
7185 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7188 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7189 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7191 gimplify_assign (ref
, x
, slist
);
7193 ref
= build_receiver_ref (var
, false, ctx
);
7196 ref
= fold_convert_loc (clause_loc
,
7197 build_pointer_type (TREE_TYPE (new_var
)),
7199 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7201 if (omp_is_reference (var
))
7203 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7204 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7205 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7207 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7208 gimplify_and_add (x
, rlist
);
7213 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7214 and REDUCTION from the sender (aka parent) side. */
7217 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7221 int ignored_looptemp
= 0;
7222 bool is_taskloop
= false;
7224 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7225 by GOMP_taskloop. */
7226 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7228 ignored_looptemp
= 2;
7232 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7234 tree val
, ref
, x
, var
;
7235 bool by_ref
, do_in
= false, do_out
= false;
7236 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7238 switch (OMP_CLAUSE_CODE (c
))
7240 case OMP_CLAUSE_PRIVATE
:
7241 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7244 case OMP_CLAUSE_FIRSTPRIVATE
:
7245 case OMP_CLAUSE_COPYIN
:
7246 case OMP_CLAUSE_LASTPRIVATE
:
7247 case OMP_CLAUSE_IN_REDUCTION
:
7248 case OMP_CLAUSE__REDUCTEMP_
:
7250 case OMP_CLAUSE_REDUCTION
:
7251 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7254 case OMP_CLAUSE_SHARED
:
7255 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7258 case OMP_CLAUSE__LOOPTEMP_
:
7259 if (ignored_looptemp
)
7269 val
= OMP_CLAUSE_DECL (c
);
7270 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7271 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7272 && TREE_CODE (val
) == MEM_REF
)
7274 val
= TREE_OPERAND (val
, 0);
7275 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7276 val
= TREE_OPERAND (val
, 0);
7277 if (TREE_CODE (val
) == INDIRECT_REF
7278 || TREE_CODE (val
) == ADDR_EXPR
)
7279 val
= TREE_OPERAND (val
, 0);
7280 if (is_variable_sized (val
))
7284 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7285 outer taskloop region. */
7286 omp_context
*ctx_for_o
= ctx
;
7288 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7289 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7290 ctx_for_o
= ctx
->outer
;
7292 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7294 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7295 && is_global_var (var
)
7296 && (val
== OMP_CLAUSE_DECL (c
)
7297 || !is_task_ctx (ctx
)
7298 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7299 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7300 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7301 != POINTER_TYPE
)))))
7304 t
= omp_member_access_dummy_var (var
);
7307 var
= DECL_VALUE_EXPR (var
);
7308 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7310 var
= unshare_and_remap (var
, t
, o
);
7312 var
= unshare_expr (var
);
7315 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7317 /* Handle taskloop firstprivate/lastprivate, where the
7318 lastprivate on GIMPLE_OMP_TASK is represented as
7319 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7320 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7321 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7322 if (use_pointer_for_field (val
, ctx
))
7323 var
= build_fold_addr_expr (var
);
7324 gimplify_assign (x
, var
, ilist
);
7325 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7329 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7330 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7331 || val
== OMP_CLAUSE_DECL (c
))
7332 && is_variable_sized (val
))
7334 by_ref
= use_pointer_for_field (val
, NULL
);
7336 switch (OMP_CLAUSE_CODE (c
))
7338 case OMP_CLAUSE_FIRSTPRIVATE
:
7339 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7341 && is_task_ctx (ctx
))
7342 TREE_NO_WARNING (var
) = 1;
7346 case OMP_CLAUSE_PRIVATE
:
7347 case OMP_CLAUSE_COPYIN
:
7348 case OMP_CLAUSE__LOOPTEMP_
:
7349 case OMP_CLAUSE__REDUCTEMP_
:
7353 case OMP_CLAUSE_LASTPRIVATE
:
7354 if (by_ref
|| omp_is_reference (val
))
7356 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7363 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7368 case OMP_CLAUSE_REDUCTION
:
7369 case OMP_CLAUSE_IN_REDUCTION
:
7371 if (val
== OMP_CLAUSE_DECL (c
))
7373 if (is_task_ctx (ctx
))
7374 by_ref
= use_pointer_for_field (val
, ctx
);
7376 do_out
= !(by_ref
|| omp_is_reference (val
));
7379 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7388 ref
= build_sender_ref (val
, ctx
);
7389 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7390 gimplify_assign (ref
, x
, ilist
);
7391 if (is_task_ctx (ctx
))
7392 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7397 ref
= build_sender_ref (val
, ctx
);
7398 gimplify_assign (var
, ref
, olist
);
7403 /* Generate code to implement SHARED from the sender (aka parent)
7404 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7405 list things that got automatically shared. */
7408 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7410 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7412 if (ctx
->record_type
== NULL
)
7415 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7416 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7418 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7419 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7422 nvar
= maybe_lookup_decl (ovar
, ctx
);
7423 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7426 /* If CTX is a nested parallel directive. Find the immediately
7427 enclosing parallel or workshare construct that contains a
7428 mapping for OVAR. */
7429 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7431 t
= omp_member_access_dummy_var (var
);
7434 var
= DECL_VALUE_EXPR (var
);
7435 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7437 var
= unshare_and_remap (var
, t
, o
);
7439 var
= unshare_expr (var
);
7442 if (use_pointer_for_field (ovar
, ctx
))
7444 x
= build_sender_ref (ovar
, ctx
);
7445 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7446 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7448 gcc_assert (is_parallel_ctx (ctx
)
7449 && DECL_ARTIFICIAL (ovar
));
7450 /* _condtemp_ clause. */
7451 var
= build_constructor (TREE_TYPE (x
), NULL
);
7454 var
= build_fold_addr_expr (var
);
7455 gimplify_assign (x
, var
, ilist
);
7459 x
= build_sender_ref (ovar
, ctx
);
7460 gimplify_assign (x
, var
, ilist
);
7462 if (!TREE_READONLY (var
)
7463 /* We don't need to receive a new reference to a result
7464 or parm decl. In fact we may not store to it as we will
7465 invalidate any pending RSO and generate wrong gimple
7467 && !((TREE_CODE (var
) == RESULT_DECL
7468 || TREE_CODE (var
) == PARM_DECL
)
7469 && DECL_BY_REFERENCE (var
)))
7471 x
= build_sender_ref (ovar
, ctx
);
7472 gimplify_assign (var
, x
, olist
);
7478 /* Emit an OpenACC head marker call, encapulating the partitioning and
7479 other information that must be processed by the target compiler.
7480 Return the maximum number of dimensions the associated loop might
7481 be partitioned over. */
7484 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7485 gimple_seq
*seq
, omp_context
*ctx
)
7487 unsigned levels
= 0;
7489 tree gang_static
= NULL_TREE
;
7490 auto_vec
<tree
, 5> args
;
7492 args
.quick_push (build_int_cst
7493 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7494 args
.quick_push (ddvar
);
7495 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7497 switch (OMP_CLAUSE_CODE (c
))
7499 case OMP_CLAUSE_GANG
:
7500 tag
|= OLF_DIM_GANG
;
7501 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7502 /* static:* is represented by -1, and we can ignore it, as
7503 scheduling is always static. */
7504 if (gang_static
&& integer_minus_onep (gang_static
))
7505 gang_static
= NULL_TREE
;
7509 case OMP_CLAUSE_WORKER
:
7510 tag
|= OLF_DIM_WORKER
;
7514 case OMP_CLAUSE_VECTOR
:
7515 tag
|= OLF_DIM_VECTOR
;
7519 case OMP_CLAUSE_SEQ
:
7523 case OMP_CLAUSE_AUTO
:
7527 case OMP_CLAUSE_INDEPENDENT
:
7528 tag
|= OLF_INDEPENDENT
;
7531 case OMP_CLAUSE_TILE
:
7542 if (DECL_P (gang_static
))
7543 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7544 tag
|= OLF_GANG_STATIC
;
7547 /* In a parallel region, loops are implicitly INDEPENDENT. */
7548 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7549 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
7550 tag
|= OLF_INDEPENDENT
;
7553 /* Tiling could use all 3 levels. */
7557 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7558 Ensure at least one level, or 2 for possible auto
7560 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7561 << OLF_DIM_BASE
) | OLF_SEQ
));
7563 if (levels
< 1u + maybe_auto
)
7564 levels
= 1u + maybe_auto
;
7567 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7568 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7570 args
.quick_push (gang_static
);
7572 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7573 gimple_set_location (call
, loc
);
7574 gimple_set_lhs (call
, ddvar
);
7575 gimple_seq_add_stmt (seq
, call
);
7580 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7581 partitioning level of the enclosed region. */
7584 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7585 tree tofollow
, gimple_seq
*seq
)
7587 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7588 : IFN_UNIQUE_OACC_TAIL_MARK
);
7589 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7590 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7591 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7592 marker
, ddvar
, tofollow
);
7593 gimple_set_location (call
, loc
);
7594 gimple_set_lhs (call
, ddvar
);
7595 gimple_seq_add_stmt (seq
, call
);
7598 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7599 the loop clauses, from which we extract reductions. Initialize
7603 lower_oacc_head_tail (location_t loc
, tree clauses
,
7604 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7607 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7608 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7610 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7611 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7612 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7615 for (unsigned done
= 1; count
; count
--, done
++)
7617 gimple_seq fork_seq
= NULL
;
7618 gimple_seq join_seq
= NULL
;
7620 tree place
= build_int_cst (integer_type_node
, -1);
7621 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7622 fork_kind
, ddvar
, place
);
7623 gimple_set_location (fork
, loc
);
7624 gimple_set_lhs (fork
, ddvar
);
7626 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7627 join_kind
, ddvar
, place
);
7628 gimple_set_location (join
, loc
);
7629 gimple_set_lhs (join
, ddvar
);
7631 /* Mark the beginning of this level sequence. */
7633 lower_oacc_loop_marker (loc
, ddvar
, true,
7634 build_int_cst (integer_type_node
, count
),
7636 lower_oacc_loop_marker (loc
, ddvar
, false,
7637 build_int_cst (integer_type_node
, done
),
7640 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7641 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7643 /* Append this level to head. */
7644 gimple_seq_add_seq (head
, fork_seq
);
7645 /* Prepend it to tail. */
7646 gimple_seq_add_seq (&join_seq
, *tail
);
7652 /* Mark the end of the sequence. */
7653 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7654 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7657 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7658 catch handler and return it. This prevents programs from violating the
7659 structured block semantics with throws. */
7662 maybe_catch_exception (gimple_seq body
)
7667 if (!flag_exceptions
)
7670 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7671 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7673 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7675 g
= gimple_build_eh_must_not_throw (decl
);
7676 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7679 return gimple_seq_alloc_with_stmt (g
);
7683 /* Routines to lower OMP directives into OMP-GIMPLE. */
7685 /* If ctx is a worksharing context inside of a cancellable parallel
7686 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7687 and conditional branch to parallel's cancel_label to handle
7688 cancellation in the implicit barrier. */
7691 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7694 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7695 if (gimple_omp_return_nowait_p (omp_return
))
7697 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7698 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7699 && outer
->cancellable
)
7701 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7702 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7703 tree lhs
= create_tmp_var (c_bool_type
);
7704 gimple_omp_return_set_lhs (omp_return
, lhs
);
7705 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7706 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7707 fold_convert (c_bool_type
,
7708 boolean_false_node
),
7709 outer
->cancel_label
, fallthru_label
);
7710 gimple_seq_add_stmt (body
, g
);
7711 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7713 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7717 /* Find the first task_reduction or reduction clause or return NULL
7718 if there are none. */
7721 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7722 enum omp_clause_code ccode
)
7726 clauses
= omp_find_clause (clauses
, ccode
);
7727 if (clauses
== NULL_TREE
)
7729 if (ccode
!= OMP_CLAUSE_REDUCTION
7730 || code
== OMP_TASKLOOP
7731 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7733 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7737 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7738 gimple_seq
*, gimple_seq
*);
7740 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7741 CTX is the enclosing OMP context for the current statement. */
7744 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7746 tree block
, control
;
7747 gimple_stmt_iterator tgsi
;
7748 gomp_sections
*stmt
;
7750 gbind
*new_stmt
, *bind
;
7751 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7753 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7755 push_gimplify_context ();
7761 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7762 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7763 tree rtmp
= NULL_TREE
;
7766 tree type
= build_pointer_type (pointer_sized_int_node
);
7767 tree temp
= create_tmp_var (type
);
7768 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7769 OMP_CLAUSE_DECL (c
) = temp
;
7770 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7771 gimple_omp_sections_set_clauses (stmt
, c
);
7772 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7773 gimple_omp_sections_clauses (stmt
),
7774 &ilist
, &tred_dlist
);
7776 rtmp
= make_ssa_name (type
);
7777 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7780 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7781 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7783 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7784 &ilist
, &dlist
, ctx
, NULL
);
7786 control
= create_tmp_var (unsigned_type_node
, ".section");
7787 gimple_omp_sections_set_control (stmt
, control
);
7789 new_body
= gimple_omp_body (stmt
);
7790 gimple_omp_set_body (stmt
, NULL
);
7791 tgsi
= gsi_start (new_body
);
7792 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7797 sec_start
= gsi_stmt (tgsi
);
7798 sctx
= maybe_lookup_ctx (sec_start
);
7801 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7802 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7803 GSI_CONTINUE_LINKING
);
7804 gimple_omp_set_body (sec_start
, NULL
);
7806 if (gsi_one_before_end_p (tgsi
))
7808 gimple_seq l
= NULL
;
7809 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7810 &ilist
, &l
, &clist
, ctx
);
7811 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7812 gimple_omp_section_set_last (sec_start
);
7815 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7816 GSI_CONTINUE_LINKING
);
7819 block
= make_node (BLOCK
);
7820 bind
= gimple_build_bind (NULL
, new_body
, block
);
7823 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7827 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7828 gcall
*g
= gimple_build_call (fndecl
, 0);
7829 gimple_seq_add_stmt (&olist
, g
);
7830 gimple_seq_add_seq (&olist
, clist
);
7831 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7832 g
= gimple_build_call (fndecl
, 0);
7833 gimple_seq_add_stmt (&olist
, g
);
7836 block
= make_node (BLOCK
);
7837 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7838 gsi_replace (gsi_p
, new_stmt
, true);
7840 pop_gimplify_context (new_stmt
);
7841 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7842 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7843 if (BLOCK_VARS (block
))
7844 TREE_USED (block
) = 1;
7847 gimple_seq_add_seq (&new_body
, ilist
);
7848 gimple_seq_add_stmt (&new_body
, stmt
);
7849 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7850 gimple_seq_add_stmt (&new_body
, bind
);
7852 t
= gimple_build_omp_continue (control
, control
);
7853 gimple_seq_add_stmt (&new_body
, t
);
7855 gimple_seq_add_seq (&new_body
, olist
);
7856 if (ctx
->cancellable
)
7857 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7858 gimple_seq_add_seq (&new_body
, dlist
);
7860 new_body
= maybe_catch_exception (new_body
);
7862 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7863 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7864 t
= gimple_build_omp_return (nowait
);
7865 gimple_seq_add_stmt (&new_body
, t
);
7866 gimple_seq_add_seq (&new_body
, tred_dlist
);
7867 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7870 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7872 gimple_bind_set_body (new_stmt
, new_body
);
7876 /* A subroutine of lower_omp_single. Expand the simple form of
7877 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7879 if (GOMP_single_start ())
7881 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7883 FIXME. It may be better to delay expanding the logic of this until
7884 pass_expand_omp. The expanded logic may make the job more difficult
7885 to a synchronization analysis pass. */
7888 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7890 location_t loc
= gimple_location (single_stmt
);
7891 tree tlabel
= create_artificial_label (loc
);
7892 tree flabel
= create_artificial_label (loc
);
7893 gimple
*call
, *cond
;
7896 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7897 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7898 call
= gimple_build_call (decl
, 0);
7899 gimple_call_set_lhs (call
, lhs
);
7900 gimple_seq_add_stmt (pre_p
, call
);
7902 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7903 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7906 gimple_seq_add_stmt (pre_p
, cond
);
7907 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7908 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7909 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7913 /* A subroutine of lower_omp_single. Expand the simple form of
7914 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7916 #pragma omp single copyprivate (a, b, c)
7918 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7921 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7927 GOMP_single_copy_end (©out);
7938 FIXME. It may be better to delay expanding the logic of this until
7939 pass_expand_omp. The expanded logic may make the job more difficult
7940 to a synchronization analysis pass. */
7943 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7946 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7947 gimple_seq copyin_seq
;
7948 location_t loc
= gimple_location (single_stmt
);
7950 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7952 ptr_type
= build_pointer_type (ctx
->record_type
);
7953 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7955 l0
= create_artificial_label (loc
);
7956 l1
= create_artificial_label (loc
);
7957 l2
= create_artificial_label (loc
);
7959 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7960 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7961 t
= fold_convert_loc (loc
, ptr_type
, t
);
7962 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7964 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7965 build_int_cst (ptr_type
, 0));
7966 t
= build3 (COND_EXPR
, void_type_node
, t
,
7967 build_and_jump (&l0
), build_and_jump (&l1
));
7968 gimplify_and_add (t
, pre_p
);
7970 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7972 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7975 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7978 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7979 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7980 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7981 gimplify_and_add (t
, pre_p
);
7983 t
= build_and_jump (&l2
);
7984 gimplify_and_add (t
, pre_p
);
7986 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7988 gimple_seq_add_seq (pre_p
, copyin_seq
);
7990 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7994 /* Expand code for an OpenMP single directive. */
7997 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8000 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8002 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8004 push_gimplify_context ();
8006 block
= make_node (BLOCK
);
8007 bind
= gimple_build_bind (NULL
, NULL
, block
);
8008 gsi_replace (gsi_p
, bind
, true);
8011 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8012 &bind_body
, &dlist
, ctx
, NULL
);
8013 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8015 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8017 if (ctx
->record_type
)
8018 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8020 lower_omp_single_simple (single_stmt
, &bind_body
);
8022 gimple_omp_set_body (single_stmt
, NULL
);
8024 gimple_seq_add_seq (&bind_body
, dlist
);
8026 bind_body
= maybe_catch_exception (bind_body
);
8028 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8029 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8030 gimple
*g
= gimple_build_omp_return (nowait
);
8031 gimple_seq_add_stmt (&bind_body_tail
, g
);
8032 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8033 if (ctx
->record_type
)
8035 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8036 tree clobber
= build_clobber (ctx
->record_type
);
8037 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8038 clobber
), GSI_SAME_STMT
);
8040 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8041 gimple_bind_set_body (bind
, bind_body
);
8043 pop_gimplify_context (bind
);
8045 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8046 BLOCK_VARS (block
) = ctx
->block_vars
;
8047 if (BLOCK_VARS (block
))
8048 TREE_USED (block
) = 1;
8052 /* Expand code for an OpenMP master directive. */
8055 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8057 tree block
, lab
= NULL
, x
, bfn_decl
;
8058 gimple
*stmt
= gsi_stmt (*gsi_p
);
8060 location_t loc
= gimple_location (stmt
);
8063 push_gimplify_context ();
8065 block
= make_node (BLOCK
);
8066 bind
= gimple_build_bind (NULL
, NULL
, block
);
8067 gsi_replace (gsi_p
, bind
, true);
8068 gimple_bind_add_stmt (bind
, stmt
);
8070 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8071 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8072 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
8073 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8075 gimplify_and_add (x
, &tseq
);
8076 gimple_bind_add_seq (bind
, tseq
);
8078 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8079 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8080 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8081 gimple_omp_set_body (stmt
, NULL
);
8083 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8085 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8087 pop_gimplify_context (bind
);
8089 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8090 BLOCK_VARS (block
) = ctx
->block_vars
;
8093 /* Helper function for lower_omp_task_reductions. For a specific PASS
8094 find out the current clause it should be processed, or return false
8095 if all have been processed already. */
8098 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8099 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8100 tree
*type
, tree
*next
)
8102 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8104 if (ccode
== OMP_CLAUSE_REDUCTION
8105 && code
!= OMP_TASKLOOP
8106 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8108 *decl
= OMP_CLAUSE_DECL (*c
);
8109 *type
= TREE_TYPE (*decl
);
8110 if (TREE_CODE (*decl
) == MEM_REF
)
8117 if (omp_is_reference (*decl
))
8118 *type
= TREE_TYPE (*type
);
8119 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8122 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8131 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8132 OMP_TASKGROUP only with task modifier). Register mapping of those in
8133 START sequence and reducing them and unregister them in the END sequence. */
8136 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8137 gimple_seq
*start
, gimple_seq
*end
)
8139 enum omp_clause_code ccode
8140 = (code
== OMP_TASKGROUP
8141 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8142 tree cancellable
= NULL_TREE
;
8143 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8144 if (clauses
== NULL_TREE
)
8146 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8148 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8149 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8150 && outer
->cancellable
)
8152 cancellable
= error_mark_node
;
8155 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8158 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8159 tree
*last
= &TYPE_FIELDS (record_type
);
8163 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8165 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8168 DECL_CHAIN (field
) = ifield
;
8169 last
= &DECL_CHAIN (ifield
);
8170 DECL_CONTEXT (field
) = record_type
;
8171 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8172 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8173 DECL_CONTEXT (ifield
) = record_type
;
8174 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8175 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8177 for (int pass
= 0; pass
< 2; pass
++)
8179 tree decl
, type
, next
;
8180 for (tree c
= clauses
;
8181 omp_task_reduction_iterate (pass
, code
, ccode
,
8182 &c
, &decl
, &type
, &next
); c
= next
)
8185 tree new_type
= type
;
8187 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8189 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8190 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8192 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8194 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8195 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8196 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8199 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8200 DECL_CONTEXT (field
) = record_type
;
8201 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8202 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8204 last
= &DECL_CHAIN (field
);
8206 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8208 DECL_CONTEXT (bfield
) = record_type
;
8209 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8210 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8212 last
= &DECL_CHAIN (bfield
);
8216 layout_type (record_type
);
8218 /* Build up an array which registers with the runtime all the reductions
8219 and deregisters them at the end. Format documented in libgomp/task.c. */
8220 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8221 tree avar
= create_tmp_var_raw (atype
);
8222 gimple_add_tmp_var (avar
);
8223 TREE_ADDRESSABLE (avar
) = 1;
8224 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8225 NULL_TREE
, NULL_TREE
);
8226 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8227 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8228 gimple_seq seq
= NULL
;
8229 tree sz
= fold_convert (pointer_sized_int_node
,
8230 TYPE_SIZE_UNIT (record_type
));
8232 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8233 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8234 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8235 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8236 ctx
->task_reductions
.create (1 + cnt
);
8237 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8238 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8240 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8241 gimple_seq_add_seq (start
, seq
);
8242 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8243 NULL_TREE
, NULL_TREE
);
8244 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8245 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8246 NULL_TREE
, NULL_TREE
);
8247 t
= build_int_cst (pointer_sized_int_node
,
8248 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8249 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8250 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8251 NULL_TREE
, NULL_TREE
);
8252 t
= build_int_cst (pointer_sized_int_node
, -1);
8253 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8254 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8255 NULL_TREE
, NULL_TREE
);
8256 t
= build_int_cst (pointer_sized_int_node
, 0);
8257 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8259 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8260 and for each task reduction checks a bool right after the private variable
8261 within that thread's chunk; if the bool is clear, it hasn't been
8262 initialized and thus isn't going to be reduced nor destructed, otherwise
8263 reduce and destruct it. */
8264 tree idx
= create_tmp_var (size_type_node
);
8265 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8266 tree num_thr_sz
= create_tmp_var (size_type_node
);
8267 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8268 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8269 tree lab3
= NULL_TREE
;
8271 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8273 /* For worksharing constructs, only perform it in the master thread,
8274 with the exception of cancelled implicit barriers - then only handle
8275 the current thread. */
8276 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8277 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8278 tree thr_num
= create_tmp_var (integer_type_node
);
8279 g
= gimple_build_call (t
, 0);
8280 gimple_call_set_lhs (g
, thr_num
);
8281 gimple_seq_add_stmt (end
, g
);
8285 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8286 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8287 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8288 if (code
== OMP_FOR
)
8289 c
= gimple_omp_for_clauses (ctx
->stmt
);
8290 else /* if (code == OMP_SECTIONS) */
8291 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8292 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8294 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8296 gimple_seq_add_stmt (end
, g
);
8297 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8298 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8299 gimple_seq_add_stmt (end
, g
);
8300 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8301 build_one_cst (TREE_TYPE (idx
)));
8302 gimple_seq_add_stmt (end
, g
);
8303 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8304 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8306 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8307 gimple_seq_add_stmt (end
, g
);
8308 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8310 if (code
!= OMP_PARALLEL
)
8312 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8313 tree num_thr
= create_tmp_var (integer_type_node
);
8314 g
= gimple_build_call (t
, 0);
8315 gimple_call_set_lhs (g
, num_thr
);
8316 gimple_seq_add_stmt (end
, g
);
8317 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8318 gimple_seq_add_stmt (end
, g
);
8320 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8324 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8325 OMP_CLAUSE__REDUCTEMP_
);
8326 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8327 t
= fold_convert (size_type_node
, t
);
8328 gimplify_assign (num_thr_sz
, t
, end
);
8330 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8331 NULL_TREE
, NULL_TREE
);
8332 tree data
= create_tmp_var (pointer_sized_int_node
);
8333 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8334 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8336 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8337 ptr
= create_tmp_var (build_pointer_type (record_type
));
8339 ptr
= create_tmp_var (ptr_type_node
);
8340 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8342 tree field
= TYPE_FIELDS (record_type
);
8345 field
= DECL_CHAIN (DECL_CHAIN (field
));
8346 for (int pass
= 0; pass
< 2; pass
++)
8348 tree decl
, type
, next
;
8349 for (tree c
= clauses
;
8350 omp_task_reduction_iterate (pass
, code
, ccode
,
8351 &c
, &decl
, &type
, &next
); c
= next
)
8353 tree var
= decl
, ref
;
8354 if (TREE_CODE (decl
) == MEM_REF
)
8356 var
= TREE_OPERAND (var
, 0);
8357 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8358 var
= TREE_OPERAND (var
, 0);
8360 if (TREE_CODE (var
) == ADDR_EXPR
)
8361 var
= TREE_OPERAND (var
, 0);
8362 else if (TREE_CODE (var
) == INDIRECT_REF
)
8363 var
= TREE_OPERAND (var
, 0);
8364 tree orig_var
= var
;
8365 if (is_variable_sized (var
))
8367 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8368 var
= DECL_VALUE_EXPR (var
);
8369 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8370 var
= TREE_OPERAND (var
, 0);
8371 gcc_assert (DECL_P (var
));
8373 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8374 if (orig_var
!= var
)
8375 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8376 else if (TREE_CODE (v
) == ADDR_EXPR
)
8377 t
= build_fold_addr_expr (t
);
8378 else if (TREE_CODE (v
) == INDIRECT_REF
)
8379 t
= build_fold_indirect_ref (t
);
8380 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8382 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8383 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8384 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8386 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8387 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8388 fold_convert (size_type_node
,
8389 TREE_OPERAND (decl
, 1)));
8393 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8394 if (!omp_is_reference (decl
))
8395 t
= build_fold_addr_expr (t
);
8397 t
= fold_convert (pointer_sized_int_node
, t
);
8399 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8400 gimple_seq_add_seq (start
, seq
);
8401 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8402 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8403 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8404 t
= unshare_expr (byte_position (field
));
8405 t
= fold_convert (pointer_sized_int_node
, t
);
8406 ctx
->task_reduction_map
->put (c
, cnt
);
8407 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8410 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8411 gimple_seq_add_seq (start
, seq
);
8412 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8413 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8414 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8416 tree bfield
= DECL_CHAIN (field
);
8418 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8419 /* In parallel or worksharing all threads unconditionally
8420 initialize all their task reduction private variables. */
8421 cond
= boolean_true_node
;
8422 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8424 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8425 unshare_expr (byte_position (bfield
)));
8427 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8428 gimple_seq_add_seq (end
, seq
);
8429 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8430 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8431 build_int_cst (pbool
, 0));
8434 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8435 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8436 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8437 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8438 tree condv
= create_tmp_var (boolean_type_node
);
8439 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8440 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8442 gimple_seq_add_stmt (end
, g
);
8443 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8444 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8446 /* If this reduction doesn't need destruction and parallel
8447 has been cancelled, there is nothing to do for this
8448 reduction, so jump around the merge operation. */
8449 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8450 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8451 build_zero_cst (TREE_TYPE (cancellable
)),
8453 gimple_seq_add_stmt (end
, g
);
8454 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8458 if (TREE_TYPE (ptr
) == ptr_type_node
)
8460 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8461 unshare_expr (byte_position (field
)));
8463 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8464 gimple_seq_add_seq (end
, seq
);
8465 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8466 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8467 build_int_cst (pbool
, 0));
8470 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8471 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8473 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8474 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8475 ref
= build_simple_mem_ref (ref
);
8476 /* reduction(-:var) sums up the partial results, so it acts
8477 identically to reduction(+:var). */
8478 if (rcode
== MINUS_EXPR
)
8480 if (TREE_CODE (decl
) == MEM_REF
)
8482 tree type
= TREE_TYPE (new_var
);
8483 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8484 tree i
= create_tmp_var (TREE_TYPE (v
));
8485 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8488 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8489 tree vv
= create_tmp_var (TREE_TYPE (v
));
8490 gimplify_assign (vv
, v
, start
);
8493 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8494 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8495 new_var
= build_fold_addr_expr (new_var
);
8496 new_var
= fold_convert (ptype
, new_var
);
8497 ref
= fold_convert (ptype
, ref
);
8498 tree m
= create_tmp_var (ptype
);
8499 gimplify_assign (m
, new_var
, end
);
8501 m
= create_tmp_var (ptype
);
8502 gimplify_assign (m
, ref
, end
);
8504 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8505 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8506 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8507 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8508 tree priv
= build_simple_mem_ref (new_var
);
8509 tree out
= build_simple_mem_ref (ref
);
8510 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8512 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8513 tree decl_placeholder
8514 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8515 tree lab6
= NULL_TREE
;
8518 /* If this reduction needs destruction and parallel
8519 has been cancelled, jump around the merge operation
8520 to the destruction. */
8521 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8522 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8523 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8524 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8526 gimple_seq_add_stmt (end
, g
);
8527 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8529 SET_DECL_VALUE_EXPR (placeholder
, out
);
8530 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8531 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8532 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8533 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8534 gimple_seq_add_seq (end
,
8535 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8536 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8537 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8539 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8540 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8543 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8544 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8547 gimple_seq tseq
= NULL
;
8548 gimplify_stmt (&x
, &tseq
);
8549 gimple_seq_add_seq (end
, tseq
);
8554 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8555 out
= unshare_expr (out
);
8556 gimplify_assign (out
, x
, end
);
8559 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8560 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8561 gimple_seq_add_stmt (end
, g
);
8562 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8563 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8564 gimple_seq_add_stmt (end
, g
);
8565 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8566 build_int_cst (TREE_TYPE (i
), 1));
8567 gimple_seq_add_stmt (end
, g
);
8568 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8569 gimple_seq_add_stmt (end
, g
);
8570 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8572 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8574 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8575 tree oldv
= NULL_TREE
;
8576 tree lab6
= NULL_TREE
;
8579 /* If this reduction needs destruction and parallel
8580 has been cancelled, jump around the merge operation
8581 to the destruction. */
8582 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8583 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8584 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8585 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8587 gimple_seq_add_stmt (end
, g
);
8588 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8590 if (omp_is_reference (decl
)
8591 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8593 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8594 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8595 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8596 gimplify_assign (refv
, ref
, end
);
8597 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8598 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8599 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8600 tree d
= maybe_lookup_decl (decl
, ctx
);
8602 if (DECL_HAS_VALUE_EXPR_P (d
))
8603 oldv
= DECL_VALUE_EXPR (d
);
8604 if (omp_is_reference (var
))
8606 tree v
= fold_convert (TREE_TYPE (d
),
8607 build_fold_addr_expr (new_var
));
8608 SET_DECL_VALUE_EXPR (d
, v
);
8611 SET_DECL_VALUE_EXPR (d
, new_var
);
8612 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8613 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8615 SET_DECL_VALUE_EXPR (d
, oldv
);
8618 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8619 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8621 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8622 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8623 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8624 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8626 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8627 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8630 gimple_seq tseq
= NULL
;
8631 gimplify_stmt (&x
, &tseq
);
8632 gimple_seq_add_seq (end
, tseq
);
8637 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8638 ref
= unshare_expr (ref
);
8639 gimplify_assign (ref
, x
, end
);
8641 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8643 field
= DECL_CHAIN (bfield
);
8647 if (code
== OMP_TASKGROUP
)
8649 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8650 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8651 gimple_seq_add_stmt (start
, g
);
8656 if (code
== OMP_FOR
)
8657 c
= gimple_omp_for_clauses (ctx
->stmt
);
8658 else if (code
== OMP_SECTIONS
)
8659 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8661 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8662 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8663 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8664 build_fold_addr_expr (avar
));
8665 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8668 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8669 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8671 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8672 gimple_seq_add_stmt (end
, g
);
8673 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8674 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8676 enum built_in_function bfn
8677 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8678 t
= builtin_decl_explicit (bfn
);
8679 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8683 arg
= create_tmp_var (c_bool_type
);
8684 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8688 arg
= build_int_cst (c_bool_type
, 0);
8689 g
= gimple_build_call (t
, 1, arg
);
8693 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8694 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8696 gimple_seq_add_stmt (end
, g
);
8697 t
= build_constructor (atype
, NULL
);
8698 TREE_THIS_VOLATILE (t
) = 1;
8699 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8702 /* Expand code for an OpenMP taskgroup directive. */
8705 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8707 gimple
*stmt
= gsi_stmt (*gsi_p
);
8710 gimple_seq dseq
= NULL
;
8711 tree block
= make_node (BLOCK
);
8713 bind
= gimple_build_bind (NULL
, NULL
, block
);
8714 gsi_replace (gsi_p
, bind
, true);
8715 gimple_bind_add_stmt (bind
, stmt
);
8717 push_gimplify_context ();
8719 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8721 gimple_bind_add_stmt (bind
, x
);
8723 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8724 gimple_omp_taskgroup_clauses (stmt
),
8725 gimple_bind_body_ptr (bind
), &dseq
);
8727 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8728 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8729 gimple_omp_set_body (stmt
, NULL
);
8731 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8732 gimple_bind_add_seq (bind
, dseq
);
8734 pop_gimplify_context (bind
);
8736 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8737 BLOCK_VARS (block
) = ctx
->block_vars
;
8741 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8744 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8747 struct omp_for_data fd
;
8748 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8751 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8752 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8753 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8757 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8758 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8759 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8760 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8762 /* Merge depend clauses from multiple adjacent
8763 #pragma omp ordered depend(sink:...) constructs
8764 into one #pragma omp ordered depend(sink:...), so that
8765 we can optimize them together. */
8766 gimple_stmt_iterator gsi
= *gsi_p
;
8768 while (!gsi_end_p (gsi
))
8770 gimple
*stmt
= gsi_stmt (gsi
);
8771 if (is_gimple_debug (stmt
)
8772 || gimple_code (stmt
) == GIMPLE_NOP
)
8777 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8779 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8780 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8782 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8783 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8786 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8788 gsi_remove (&gsi
, true);
8792 /* Canonicalize sink dependence clauses into one folded clause if
8795 The basic algorithm is to create a sink vector whose first
8796 element is the GCD of all the first elements, and whose remaining
8797 elements are the minimum of the subsequent columns.
8799 We ignore dependence vectors whose first element is zero because
8800 such dependencies are known to be executed by the same thread.
8802 We take into account the direction of the loop, so a minimum
8803 becomes a maximum if the loop is iterating forwards. We also
8804 ignore sink clauses where the loop direction is unknown, or where
8805 the offsets are clearly invalid because they are not a multiple
8806 of the loop increment.
8810 #pragma omp for ordered(2)
8811 for (i=0; i < N; ++i)
8812 for (j=0; j < M; ++j)
8814 #pragma omp ordered \
8815 depend(sink:i-8,j-2) \
8816 depend(sink:i,j-1) \ // Completely ignored because i+0.
8817 depend(sink:i-4,j-3) \
8818 depend(sink:i-6,j-4)
8819 #pragma omp ordered depend(source)
8824 depend(sink:-gcd(8,4,6),-min(2,3,4))
8829 /* FIXME: Computing GCD's where the first element is zero is
8830 non-trivial in the presence of collapsed loops. Do this later. */
8831 if (fd
.collapse
> 1)
8834 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8836 /* wide_int is not a POD so it must be default-constructed. */
8837 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8838 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8840 tree folded_dep
= NULL_TREE
;
8841 /* TRUE if the first dimension's offset is negative. */
8842 bool neg_offset_p
= false;
8844 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8846 while ((c
= *list_p
) != NULL
)
8848 bool remove
= false;
8850 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8851 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8852 goto next_ordered_clause
;
8855 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8856 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8857 vec
= TREE_CHAIN (vec
), ++i
)
8859 gcc_assert (i
< len
);
8861 /* omp_extract_for_data has canonicalized the condition. */
8862 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8863 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8864 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8865 bool maybe_lexically_later
= true;
8867 /* While the committee makes up its mind, bail if we have any
8868 non-constant steps. */
8869 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8870 goto lower_omp_ordered_ret
;
8872 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8873 if (POINTER_TYPE_P (itype
))
8875 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8876 TYPE_PRECISION (itype
),
8879 /* Ignore invalid offsets that are not multiples of the step. */
8880 if (!wi::multiple_of_p (wi::abs (offset
),
8881 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8884 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8885 "ignoring sink clause with offset that is not "
8886 "a multiple of the loop step");
8888 goto next_ordered_clause
;
8891 /* Calculate the first dimension. The first dimension of
8892 the folded dependency vector is the GCD of the first
8893 elements, while ignoring any first elements whose offset
8897 /* Ignore dependence vectors whose first dimension is 0. */
8901 goto next_ordered_clause
;
8905 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8907 error_at (OMP_CLAUSE_LOCATION (c
),
8908 "first offset must be in opposite direction "
8909 "of loop iterations");
8910 goto lower_omp_ordered_ret
;
8914 neg_offset_p
= forward
;
8915 /* Initialize the first time around. */
8916 if (folded_dep
== NULL_TREE
)
8919 folded_deps
[0] = offset
;
8922 folded_deps
[0] = wi::gcd (folded_deps
[0],
8926 /* Calculate minimum for the remaining dimensions. */
8929 folded_deps
[len
+ i
- 1] = offset
;
8930 if (folded_dep
== c
)
8931 folded_deps
[i
] = offset
;
8932 else if (maybe_lexically_later
8933 && !wi::eq_p (folded_deps
[i
], offset
))
8935 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8939 for (j
= 1; j
<= i
; j
++)
8940 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8943 maybe_lexically_later
= false;
8947 gcc_assert (i
== len
);
8951 next_ordered_clause
:
8953 *list_p
= OMP_CLAUSE_CHAIN (c
);
8955 list_p
= &OMP_CLAUSE_CHAIN (c
);
8961 folded_deps
[0] = -folded_deps
[0];
8963 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8964 if (POINTER_TYPE_P (itype
))
8967 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8968 = wide_int_to_tree (itype
, folded_deps
[0]);
8969 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8970 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8973 lower_omp_ordered_ret
:
8975 /* Ordered without clauses is #pragma omp threads, while we want
8976 a nop instead if we remove all clauses. */
8977 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8978 gsi_replace (gsi_p
, gimple_build_nop (), true);
8982 /* Expand code for an OpenMP ordered directive. */
8985 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8988 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8989 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8992 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8994 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8997 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8998 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8999 OMP_CLAUSE_THREADS
);
9001 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9004 /* FIXME: This is needs to be moved to the expansion to verify various
9005 conditions only testable on cfg with dominators computed, and also
9006 all the depend clauses to be merged still might need to be available
9007 for the runtime checks. */
9009 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9013 push_gimplify_context ();
9015 block
= make_node (BLOCK
);
9016 bind
= gimple_build_bind (NULL
, NULL
, block
);
9017 gsi_replace (gsi_p
, bind
, true);
9018 gimple_bind_add_stmt (bind
, stmt
);
9022 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9023 build_int_cst (NULL_TREE
, threads
));
9024 cfun
->has_simduid_loops
= true;
9027 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9029 gimple_bind_add_stmt (bind
, x
);
9031 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9034 counter
= create_tmp_var (integer_type_node
);
9035 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9036 gimple_call_set_lhs (g
, counter
);
9037 gimple_bind_add_stmt (bind
, g
);
9039 body
= create_artificial_label (UNKNOWN_LOCATION
);
9040 test
= create_artificial_label (UNKNOWN_LOCATION
);
9041 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9043 tree simt_pred
= create_tmp_var (integer_type_node
);
9044 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9045 gimple_call_set_lhs (g
, simt_pred
);
9046 gimple_bind_add_stmt (bind
, g
);
9048 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9049 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9050 gimple_bind_add_stmt (bind
, g
);
9052 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9054 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9055 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9056 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9057 gimple_omp_set_body (stmt
, NULL
);
9061 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9062 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9063 gimple_bind_add_stmt (bind
, g
);
9065 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9066 tree nonneg
= create_tmp_var (integer_type_node
);
9067 gimple_seq tseq
= NULL
;
9068 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9069 gimple_bind_add_seq (bind
, tseq
);
9071 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9072 gimple_call_set_lhs (g
, nonneg
);
9073 gimple_bind_add_stmt (bind
, g
);
9075 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9076 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9077 gimple_bind_add_stmt (bind
, g
);
9079 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9082 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9083 build_int_cst (NULL_TREE
, threads
));
9085 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9087 gimple_bind_add_stmt (bind
, x
);
9089 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9091 pop_gimplify_context (bind
);
9093 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9094 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9098 /* Expand code for an OpenMP scan directive and the structured block
9099 before the scan directive. */
9102 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9104 gimple
*stmt
= gsi_stmt (*gsi_p
);
9106 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9107 tree lane
= NULL_TREE
;
9108 gimple_seq before
= NULL
;
9109 omp_context
*octx
= ctx
->outer
;
9111 if (octx
->scan_exclusive
&& !has_clauses
)
9113 gimple_stmt_iterator gsi2
= *gsi_p
;
9115 gimple
*stmt2
= gsi_stmt (gsi2
);
9116 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9117 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9118 the one with exclusive clause(s), comes first. */
9120 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9121 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9123 gsi_remove (gsi_p
, false);
9124 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9125 ctx
= maybe_lookup_ctx (stmt2
);
9127 lower_omp_scan (gsi_p
, ctx
);
9132 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9133 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9134 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9135 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9136 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9137 && !gimple_omp_for_combined_p (octx
->stmt
));
9138 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9139 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9142 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9143 OMP_CLAUSE__SIMDUID_
))
9145 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9146 lane
= create_tmp_var (unsigned_type_node
);
9147 tree t
= build_int_cst (integer_type_node
,
9149 : octx
->scan_inclusive
? 2 : 3);
9151 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9152 gimple_call_set_lhs (g
, lane
);
9153 gimple_seq_add_stmt (&before
, g
);
9156 if (is_simd
|| is_for
)
9158 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9159 c
; c
= OMP_CLAUSE_CHAIN (c
))
9160 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9161 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9163 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9164 tree var
= OMP_CLAUSE_DECL (c
);
9165 tree new_var
= lookup_decl (var
, octx
);
9167 tree var2
= NULL_TREE
;
9168 tree var3
= NULL_TREE
;
9169 tree var4
= NULL_TREE
;
9170 tree lane0
= NULL_TREE
;
9171 tree new_vard
= new_var
;
9172 if (omp_is_reference (var
))
9174 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9177 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9179 val
= DECL_VALUE_EXPR (new_vard
);
9180 if (new_vard
!= new_var
)
9182 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9183 val
= TREE_OPERAND (val
, 0);
9185 if (TREE_CODE (val
) == ARRAY_REF
9186 && VAR_P (TREE_OPERAND (val
, 0)))
9188 tree v
= TREE_OPERAND (val
, 0);
9189 if (lookup_attribute ("omp simd array",
9190 DECL_ATTRIBUTES (v
)))
9192 val
= unshare_expr (val
);
9193 lane0
= TREE_OPERAND (val
, 1);
9194 TREE_OPERAND (val
, 1) = lane
;
9195 var2
= lookup_decl (v
, octx
);
9196 if (octx
->scan_exclusive
)
9197 var4
= lookup_decl (var2
, octx
);
9199 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9200 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9203 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9204 var2
, lane
, NULL_TREE
, NULL_TREE
);
9205 TREE_THIS_NOTRAP (var2
) = 1;
9206 if (octx
->scan_exclusive
)
9208 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9209 var4
, lane
, NULL_TREE
,
9211 TREE_THIS_NOTRAP (var4
) = 1;
9222 var2
= build_outer_var_ref (var
, octx
);
9223 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9225 var3
= maybe_lookup_decl (new_vard
, octx
);
9226 if (var3
== new_vard
|| var3
== NULL_TREE
)
9228 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9230 var4
= maybe_lookup_decl (var3
, octx
);
9231 if (var4
== var3
|| var4
== NULL_TREE
)
9233 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9244 && octx
->scan_exclusive
9246 && var4
== NULL_TREE
)
9247 var4
= create_tmp_var (TREE_TYPE (val
));
9249 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9251 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9256 /* If we've added a separate identity element
9257 variable, copy it over into val. */
9258 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9260 gimplify_and_add (x
, &before
);
9262 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9264 /* Otherwise, assign to it the identity element. */
9265 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9267 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9268 tree ref
= build_outer_var_ref (var
, octx
);
9269 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9270 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9273 if (new_vard
!= new_var
)
9274 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9275 SET_DECL_VALUE_EXPR (new_vard
, val
);
9277 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9278 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9279 lower_omp (&tseq
, octx
);
9281 SET_DECL_VALUE_EXPR (new_vard
, x
);
9282 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9283 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9284 gimple_seq_add_seq (&before
, tseq
);
9286 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9292 if (octx
->scan_exclusive
)
9294 tree v4
= unshare_expr (var4
);
9295 tree v2
= unshare_expr (var2
);
9296 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9297 gimplify_and_add (x
, &before
);
9299 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9300 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9301 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9303 if (x
&& new_vard
!= new_var
)
9304 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9306 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9307 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9308 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9309 lower_omp (&tseq
, octx
);
9310 gimple_seq_add_seq (&before
, tseq
);
9311 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9313 SET_DECL_VALUE_EXPR (new_vard
, x
);
9314 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9315 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9316 if (octx
->scan_inclusive
)
9318 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9320 gimplify_and_add (x
, &before
);
9322 else if (lane0
== NULL_TREE
)
9324 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9326 gimplify_and_add (x
, &before
);
9334 /* input phase. Set val to initializer before
9336 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9337 gimplify_assign (val
, x
, &before
);
9342 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9343 if (code
== MINUS_EXPR
)
9346 tree x
= build2 (code
, TREE_TYPE (var2
),
9347 unshare_expr (var2
), unshare_expr (val
));
9348 if (octx
->scan_inclusive
)
9350 gimplify_assign (unshare_expr (var2
), x
, &before
);
9351 gimplify_assign (val
, var2
, &before
);
9355 gimplify_assign (unshare_expr (var4
),
9356 unshare_expr (var2
), &before
);
9357 gimplify_assign (var2
, x
, &before
);
9358 if (lane0
== NULL_TREE
)
9359 gimplify_assign (val
, var4
, &before
);
9363 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9365 tree vexpr
= unshare_expr (var4
);
9366 TREE_OPERAND (vexpr
, 1) = lane0
;
9367 if (new_vard
!= new_var
)
9368 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9369 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9373 if (is_simd
&& !is_for_simd
)
9375 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9376 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9377 gsi_replace (gsi_p
, gimple_build_nop (), true);
9380 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9383 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9384 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9389 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9390 substitution of a couple of function calls. But in the NAMED case,
9391 requires that languages coordinate a symbol name. It is therefore
9392 best put here in common code. */
9394 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9397 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9400 tree name
, lock
, unlock
;
9401 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9403 location_t loc
= gimple_location (stmt
);
9406 name
= gimple_omp_critical_name (stmt
);
9411 if (!critical_name_mutexes
)
9412 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9414 tree
*n
= critical_name_mutexes
->get (name
);
9419 decl
= create_tmp_var_raw (ptr_type_node
);
9421 new_str
= ACONCAT ((".gomp_critical_user_",
9422 IDENTIFIER_POINTER (name
), NULL
));
9423 DECL_NAME (decl
) = get_identifier (new_str
);
9424 TREE_PUBLIC (decl
) = 1;
9425 TREE_STATIC (decl
) = 1;
9426 DECL_COMMON (decl
) = 1;
9427 DECL_ARTIFICIAL (decl
) = 1;
9428 DECL_IGNORED_P (decl
) = 1;
9430 varpool_node::finalize_decl (decl
);
9432 critical_name_mutexes
->put (name
, decl
);
9437 /* If '#pragma omp critical' is inside offloaded region or
9438 inside function marked as offloadable, the symbol must be
9439 marked as offloadable too. */
9441 if (cgraph_node::get (current_function_decl
)->offloadable
)
9442 varpool_node::get_create (decl
)->offloadable
= 1;
9444 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9445 if (is_gimple_omp_offloaded (octx
->stmt
))
9447 varpool_node::get_create (decl
)->offloadable
= 1;
9451 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9452 lock
= build_call_expr_loc (loc
, lock
, 1,
9453 build_fold_addr_expr_loc (loc
, decl
));
9455 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9456 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9457 build_fold_addr_expr_loc (loc
, decl
));
9461 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9462 lock
= build_call_expr_loc (loc
, lock
, 0);
9464 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9465 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9468 push_gimplify_context ();
9470 block
= make_node (BLOCK
);
9471 bind
= gimple_build_bind (NULL
, NULL
, block
);
9472 gsi_replace (gsi_p
, bind
, true);
9473 gimple_bind_add_stmt (bind
, stmt
);
9475 tbody
= gimple_bind_body (bind
);
9476 gimplify_and_add (lock
, &tbody
);
9477 gimple_bind_set_body (bind
, tbody
);
9479 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9480 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9481 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9482 gimple_omp_set_body (stmt
, NULL
);
9484 tbody
= gimple_bind_body (bind
);
9485 gimplify_and_add (unlock
, &tbody
);
9486 gimple_bind_set_body (bind
, tbody
);
9488 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9490 pop_gimplify_context (bind
);
9491 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9492 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9495 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9496 for a lastprivate clause. Given a loop control predicate of (V
9497 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9498 is appended to *DLIST, iterator initialization is appended to
9499 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9500 to be emitted in a critical section. */
9503 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9504 gimple_seq
*dlist
, gimple_seq
*clist
,
9505 struct omp_context
*ctx
)
9507 tree clauses
, cond
, vinit
;
9508 enum tree_code cond_code
;
9511 cond_code
= fd
->loop
.cond_code
;
9512 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9514 /* When possible, use a strict equality expression. This can let VRP
9515 type optimizations deduce the value and remove a copy. */
9516 if (tree_fits_shwi_p (fd
->loop
.step
))
9518 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9519 if (step
== 1 || step
== -1)
9520 cond_code
= EQ_EXPR
;
9523 tree n2
= fd
->loop
.n2
;
9524 if (fd
->collapse
> 1
9525 && TREE_CODE (n2
) != INTEGER_CST
9526 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9528 struct omp_context
*taskreg_ctx
= NULL
;
9529 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9531 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9532 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9533 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9535 if (gimple_omp_for_combined_into_p (gfor
))
9537 gcc_assert (ctx
->outer
->outer
9538 && is_parallel_ctx (ctx
->outer
->outer
));
9539 taskreg_ctx
= ctx
->outer
->outer
;
9543 struct omp_for_data outer_fd
;
9544 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9545 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9548 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9549 taskreg_ctx
= ctx
->outer
->outer
;
9551 else if (is_taskreg_ctx (ctx
->outer
))
9552 taskreg_ctx
= ctx
->outer
;
9556 tree taskreg_clauses
9557 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9558 tree innerc
= omp_find_clause (taskreg_clauses
,
9559 OMP_CLAUSE__LOOPTEMP_
);
9560 gcc_assert (innerc
);
9561 int count
= fd
->collapse
;
9563 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
9564 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
9565 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
9567 for (i
= 0; i
< count
; i
++)
9569 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9570 OMP_CLAUSE__LOOPTEMP_
);
9571 gcc_assert (innerc
);
9573 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9574 OMP_CLAUSE__LOOPTEMP_
);
9576 n2
= fold_convert (TREE_TYPE (n2
),
9577 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9581 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9583 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9585 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9586 if (!gimple_seq_empty_p (stmts
))
9588 gimple_seq_add_seq (&stmts
, *dlist
);
9591 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9592 vinit
= fd
->loop
.n1
;
9593 if (cond_code
== EQ_EXPR
9594 && tree_fits_shwi_p (fd
->loop
.n2
)
9595 && ! integer_zerop (fd
->loop
.n2
))
9596 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9598 vinit
= unshare_expr (vinit
);
9600 /* Initialize the iterator variable, so that threads that don't execute
9601 any iterations don't execute the lastprivate clauses by accident. */
9602 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9606 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9609 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9610 struct walk_stmt_info
*wi
)
9612 gimple
*stmt
= gsi_stmt (*gsi_p
);
9614 *handled_ops_p
= true;
9615 switch (gimple_code (stmt
))
9619 case GIMPLE_OMP_FOR
:
9620 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9621 && gimple_omp_for_combined_into_p (stmt
))
9622 *handled_ops_p
= false;
9625 case GIMPLE_OMP_SCAN
:
9626 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9627 return integer_zero_node
;
9634 /* Helper function for lower_omp_for, add transformations for a worksharing
9635 loop with scan directives inside of it.
9636 For worksharing loop not combined with simd, transform:
9637 #pragma omp for reduction(inscan,+:r) private(i)
9638 for (i = 0; i < n; i = i + 1)
9643 #pragma omp scan inclusive(r)
9649 into two worksharing loops + code to merge results:
9651 num_threads = omp_get_num_threads ();
9652 thread_num = omp_get_thread_num ();
9653 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9658 // For UDRs this is UDR init, or if ctors are needed, copy from
9659 // var3 that has been constructed to contain the neutral element.
9663 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9664 // a shared array with num_threads elements and rprivb to a local array
9665 // number of elements equal to the number of (contiguous) iterations the
9666 // current thread will perform. controlb and controlp variables are
9667 // temporaries to handle deallocation of rprivb at the end of second
9669 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9670 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9671 for (i = 0; i < n; i = i + 1)
9674 // For UDRs this is UDR init or copy from var3.
9676 // This is the input phase from user code.
9680 // For UDRs this is UDR merge.
9682 // Rather than handing it over to the user, save to local thread's
9684 rprivb[ivar] = var2;
9685 // For exclusive scan, the above two statements are swapped.
9689 // And remember the final value from this thread's into the shared
9691 rpriva[(sizetype) thread_num] = var2;
9692 // If more than one thread, compute using Work-Efficient prefix sum
9693 // the inclusive parallel scan of the rpriva array.
9694 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9699 num_threadsu = (unsigned int) num_threads;
9700 thread_numup1 = (unsigned int) thread_num + 1;
9703 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9707 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9712 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9713 mul = REALPART_EXPR <cplx>;
9714 ovf = IMAGPART_EXPR <cplx>;
9715 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9718 andvm1 = andv + 4294967295;
9720 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9722 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9723 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9724 rpriva[l] = rpriva[l - k] + rpriva[l];
9726 if (down == 0) goto <D.2121>; else goto <D.2122>;
9734 if (k != 0) goto <D.2108>; else goto <D.2103>;
9736 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9738 // For UDRs this is UDR init or copy from var3.
9742 var2 = rpriva[thread_num - 1];
9745 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9746 reduction(inscan,+:r) private(i)
9747 for (i = 0; i < n; i = i + 1)
9750 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9751 r = var2 + rprivb[ivar];
9754 // This is the scan phase from user code.
9756 // Plus a bump of the iterator.
9762 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9763 struct omp_for_data
*fd
, omp_context
*ctx
)
9765 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9766 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9768 gimple_seq body
= gimple_omp_body (stmt
);
9769 gimple_stmt_iterator input1_gsi
= gsi_none ();
9770 struct walk_stmt_info wi
;
9771 memset (&wi
, 0, sizeof (wi
));
9773 wi
.info
= (void *) &input1_gsi
;
9774 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9775 gcc_assert (!gsi_end_p (input1_gsi
));
9777 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9778 gimple_stmt_iterator gsi
= input1_gsi
;
9780 gimple_stmt_iterator scan1_gsi
= gsi
;
9781 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9782 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9784 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9785 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9786 gimple_omp_set_body (input_stmt1
, NULL
);
9787 gimple_omp_set_body (scan_stmt1
, NULL
);
9788 gimple_omp_set_body (stmt
, NULL
);
9790 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9791 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9792 gimple_omp_set_body (stmt
, body
);
9793 gimple_omp_set_body (input_stmt1
, input_body
);
9795 gimple_stmt_iterator input2_gsi
= gsi_none ();
9796 memset (&wi
, 0, sizeof (wi
));
9798 wi
.info
= (void *) &input2_gsi
;
9799 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9800 gcc_assert (!gsi_end_p (input2_gsi
));
9802 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9805 gimple_stmt_iterator scan2_gsi
= gsi
;
9806 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9807 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9808 gimple_omp_set_body (scan_stmt2
, scan_body
);
9810 gimple_stmt_iterator input3_gsi
= gsi_none ();
9811 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9812 gimple_stmt_iterator input4_gsi
= gsi_none ();
9813 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9814 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9815 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9816 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9819 memset (&wi
, 0, sizeof (wi
));
9821 wi
.info
= (void *) &input3_gsi
;
9822 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9823 gcc_assert (!gsi_end_p (input3_gsi
));
9825 input_stmt3
= gsi_stmt (input3_gsi
);
9829 scan_stmt3
= gsi_stmt (gsi
);
9830 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9832 memset (&wi
, 0, sizeof (wi
));
9834 wi
.info
= (void *) &input4_gsi
;
9835 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9836 gcc_assert (!gsi_end_p (input4_gsi
));
9838 input_stmt4
= gsi_stmt (input4_gsi
);
9842 scan_stmt4
= gsi_stmt (gsi
);
9843 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9845 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9846 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9849 tree num_threads
= create_tmp_var (integer_type_node
);
9850 tree thread_num
= create_tmp_var (integer_type_node
);
9851 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9852 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9853 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9854 gimple_call_set_lhs (g
, num_threads
);
9855 gimple_seq_add_stmt (body_p
, g
);
9856 g
= gimple_build_call (threadnum_decl
, 0);
9857 gimple_call_set_lhs (g
, thread_num
);
9858 gimple_seq_add_stmt (body_p
, g
);
9860 tree ivar
= create_tmp_var (sizetype
);
9861 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9862 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9863 tree k
= create_tmp_var (unsigned_type_node
);
9864 tree l
= create_tmp_var (unsigned_type_node
);
9866 gimple_seq clist
= NULL
, mdlist
= NULL
;
9867 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9868 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9869 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9870 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9871 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9872 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9873 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9875 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9876 tree var
= OMP_CLAUSE_DECL (c
);
9877 tree new_var
= lookup_decl (var
, ctx
);
9878 tree var3
= NULL_TREE
;
9879 tree new_vard
= new_var
;
9880 if (omp_is_reference (var
))
9881 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9882 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9884 var3
= maybe_lookup_decl (new_vard
, ctx
);
9885 if (var3
== new_vard
)
9889 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9890 tree rpriva
= create_tmp_var (ptype
);
9891 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9892 OMP_CLAUSE_DECL (nc
) = rpriva
;
9894 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9896 tree rprivb
= create_tmp_var (ptype
);
9897 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9898 OMP_CLAUSE_DECL (nc
) = rprivb
;
9899 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9901 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9903 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9904 if (new_vard
!= new_var
)
9905 TREE_ADDRESSABLE (var2
) = 1;
9906 gimple_add_tmp_var (var2
);
9908 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9909 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9910 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9911 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9912 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9914 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9915 thread_num
, integer_minus_one_node
);
9916 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9917 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9918 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9919 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9920 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9922 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9923 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9924 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9925 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9926 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9928 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9929 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9930 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9931 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9932 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9933 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9935 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9936 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9937 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9938 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9940 tree var4
= is_for_simd
? new_var
: var2
;
9941 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
9944 var5
= lookup_decl (var
, input_simd_ctx
);
9945 var6
= lookup_decl (var
, scan_simd_ctx
);
9946 if (new_vard
!= new_var
)
9948 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
9949 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
9952 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9954 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9957 x
= lang_hooks
.decls
.omp_clause_default_ctor
9958 (c
, var2
, build_outer_var_ref (var
, ctx
));
9960 gimplify_and_add (x
, &clist
);
9962 x
= build_outer_var_ref (var
, ctx
);
9963 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
9965 gimplify_and_add (x
, &thr01_list
);
9967 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9968 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9971 x
= unshare_expr (var4
);
9972 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9973 gimplify_and_add (x
, &thrn1_list
);
9974 x
= unshare_expr (var4
);
9975 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9976 gimplify_and_add (x
, &thr02_list
);
9978 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9980 /* Otherwise, assign to it the identity element. */
9981 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9982 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9985 if (new_vard
!= new_var
)
9986 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9987 SET_DECL_VALUE_EXPR (new_vard
, val
);
9988 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9990 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9991 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9992 lower_omp (&tseq
, ctx
);
9993 gimple_seq_add_seq (&thrn1_list
, tseq
);
9994 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9995 lower_omp (&tseq
, ctx
);
9996 gimple_seq_add_seq (&thr02_list
, tseq
);
9997 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9998 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9999 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10001 SET_DECL_VALUE_EXPR (new_vard
, y
);
10004 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10005 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10009 x
= unshare_expr (var4
);
10010 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
10011 gimplify_and_add (x
, &thrn2_list
);
10015 x
= unshare_expr (rprivb_ref
);
10016 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
10017 gimplify_and_add (x
, &scan1_list
);
10021 if (ctx
->scan_exclusive
)
10023 x
= unshare_expr (rprivb_ref
);
10024 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10025 gimplify_and_add (x
, &scan1_list
);
10028 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10029 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10030 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10031 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10032 lower_omp (&tseq
, ctx
);
10033 gimple_seq_add_seq (&scan1_list
, tseq
);
10035 if (ctx
->scan_inclusive
)
10037 x
= unshare_expr (rprivb_ref
);
10038 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10039 gimplify_and_add (x
, &scan1_list
);
10043 x
= unshare_expr (rpriva_ref
);
10044 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
10045 unshare_expr (var4
));
10046 gimplify_and_add (x
, &mdlist
);
10048 x
= unshare_expr (is_for_simd
? var6
: new_var
);
10049 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
10050 gimplify_and_add (x
, &input2_list
);
10053 if (new_vard
!= new_var
)
10054 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10056 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10057 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10058 SET_DECL_VALUE_EXPR (new_vard
, val
);
10059 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10062 SET_DECL_VALUE_EXPR (placeholder
, var6
);
10063 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10066 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10067 lower_omp (&tseq
, ctx
);
10069 SET_DECL_VALUE_EXPR (new_vard
, y
);
10072 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10073 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10077 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
10078 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10079 lower_omp (&tseq
, ctx
);
10081 gimple_seq_add_seq (&input2_list
, tseq
);
10083 x
= build_outer_var_ref (var
, ctx
);
10084 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
10085 gimplify_and_add (x
, &last_list
);
10087 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
10088 gimplify_and_add (x
, &reduc_list
);
10089 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10090 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10092 if (new_vard
!= new_var
)
10093 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10094 SET_DECL_VALUE_EXPR (new_vard
, val
);
10095 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10096 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10097 lower_omp (&tseq
, ctx
);
10098 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10099 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10100 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10102 SET_DECL_VALUE_EXPR (new_vard
, y
);
10105 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10106 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10108 gimple_seq_add_seq (&reduc_list
, tseq
);
10109 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
10110 gimplify_and_add (x
, &reduc_list
);
10112 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
10114 gimplify_and_add (x
, dlist
);
10118 x
= build_outer_var_ref (var
, ctx
);
10119 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
10121 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10122 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
10124 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10126 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10128 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10129 if (code
== MINUS_EXPR
)
10133 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10136 if (ctx
->scan_exclusive
)
10137 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10139 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10140 gimplify_assign (var2
, x
, &scan1_list
);
10141 if (ctx
->scan_inclusive
)
10142 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10146 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10149 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10150 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10152 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10155 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10156 unshare_expr (rprival_ref
));
10157 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10161 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10162 gimple_seq_add_stmt (&scan1_list
, g
);
10163 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10164 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10165 ? scan_stmt4
: scan_stmt2
), g
);
10167 tree controlb
= create_tmp_var (boolean_type_node
);
10168 tree controlp
= create_tmp_var (ptr_type_node
);
10169 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10170 OMP_CLAUSE_DECL (nc
) = controlb
;
10171 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10173 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10174 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10175 OMP_CLAUSE_DECL (nc
) = controlp
;
10176 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10178 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10179 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10180 OMP_CLAUSE_DECL (nc
) = controlb
;
10181 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10183 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10184 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10185 OMP_CLAUSE_DECL (nc
) = controlp
;
10186 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10188 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10190 *cp1
= gimple_omp_for_clauses (stmt
);
10191 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10192 *cp2
= gimple_omp_for_clauses (new_stmt
);
10193 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10197 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10198 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10200 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10202 gsi_remove (&input3_gsi
, true);
10203 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10205 gsi_remove (&scan3_gsi
, true);
10206 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10208 gsi_remove (&input4_gsi
, true);
10209 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10211 gsi_remove (&scan4_gsi
, true);
10215 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10216 gimple_omp_set_body (input_stmt2
, input2_list
);
10219 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10221 gsi_remove (&input1_gsi
, true);
10222 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10224 gsi_remove (&scan1_gsi
, true);
10225 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10227 gsi_remove (&input2_gsi
, true);
10228 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10230 gsi_remove (&scan2_gsi
, true);
10232 gimple_seq_add_seq (body_p
, clist
);
10234 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10235 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10236 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10237 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10238 gimple_seq_add_stmt (body_p
, g
);
10239 g
= gimple_build_label (lab1
);
10240 gimple_seq_add_stmt (body_p
, g
);
10241 gimple_seq_add_seq (body_p
, thr01_list
);
10242 g
= gimple_build_goto (lab3
);
10243 gimple_seq_add_stmt (body_p
, g
);
10244 g
= gimple_build_label (lab2
);
10245 gimple_seq_add_stmt (body_p
, g
);
10246 gimple_seq_add_seq (body_p
, thrn1_list
);
10247 g
= gimple_build_label (lab3
);
10248 gimple_seq_add_stmt (body_p
, g
);
10250 g
= gimple_build_assign (ivar
, size_zero_node
);
10251 gimple_seq_add_stmt (body_p
, g
);
10253 gimple_seq_add_stmt (body_p
, stmt
);
10254 gimple_seq_add_seq (body_p
, body
);
10255 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10258 g
= gimple_build_omp_return (true);
10259 gimple_seq_add_stmt (body_p
, g
);
10260 gimple_seq_add_seq (body_p
, mdlist
);
10262 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10263 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10264 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10265 gimple_seq_add_stmt (body_p
, g
);
10266 g
= gimple_build_label (lab1
);
10267 gimple_seq_add_stmt (body_p
, g
);
10269 g
= omp_build_barrier (NULL
);
10270 gimple_seq_add_stmt (body_p
, g
);
10272 tree down
= create_tmp_var (unsigned_type_node
);
10273 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10274 gimple_seq_add_stmt (body_p
, g
);
10276 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10277 gimple_seq_add_stmt (body_p
, g
);
10279 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10280 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10281 gimple_seq_add_stmt (body_p
, g
);
10283 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10284 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10285 gimple_seq_add_stmt (body_p
, g
);
10287 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10288 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10289 build_int_cst (unsigned_type_node
, 1));
10290 gimple_seq_add_stmt (body_p
, g
);
10292 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10293 g
= gimple_build_label (lab3
);
10294 gimple_seq_add_stmt (body_p
, g
);
10296 tree twok
= create_tmp_var (unsigned_type_node
);
10297 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10298 gimple_seq_add_stmt (body_p
, g
);
10300 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10301 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10302 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10303 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10304 gimple_seq_add_stmt (body_p
, g
);
10305 g
= gimple_build_label (lab4
);
10306 gimple_seq_add_stmt (body_p
, g
);
10307 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10308 gimple_seq_add_stmt (body_p
, g
);
10309 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10310 gimple_seq_add_stmt (body_p
, g
);
10312 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10313 gimple_seq_add_stmt (body_p
, g
);
10314 g
= gimple_build_label (lab6
);
10315 gimple_seq_add_stmt (body_p
, g
);
10317 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10318 gimple_seq_add_stmt (body_p
, g
);
10320 g
= gimple_build_label (lab5
);
10321 gimple_seq_add_stmt (body_p
, g
);
10323 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10324 gimple_seq_add_stmt (body_p
, g
);
10326 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10327 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10328 gimple_call_set_lhs (g
, cplx
);
10329 gimple_seq_add_stmt (body_p
, g
);
10330 tree mul
= create_tmp_var (unsigned_type_node
);
10331 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10332 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10333 gimple_seq_add_stmt (body_p
, g
);
10334 tree ovf
= create_tmp_var (unsigned_type_node
);
10335 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10336 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10337 gimple_seq_add_stmt (body_p
, g
);
10339 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10340 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10341 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10343 gimple_seq_add_stmt (body_p
, g
);
10344 g
= gimple_build_label (lab7
);
10345 gimple_seq_add_stmt (body_p
, g
);
10347 tree andv
= create_tmp_var (unsigned_type_node
);
10348 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10349 gimple_seq_add_stmt (body_p
, g
);
10350 tree andvm1
= create_tmp_var (unsigned_type_node
);
10351 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10352 build_minus_one_cst (unsigned_type_node
));
10353 gimple_seq_add_stmt (body_p
, g
);
10355 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10356 gimple_seq_add_stmt (body_p
, g
);
10358 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10359 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10360 gimple_seq_add_stmt (body_p
, g
);
10361 g
= gimple_build_label (lab9
);
10362 gimple_seq_add_stmt (body_p
, g
);
10363 gimple_seq_add_seq (body_p
, reduc_list
);
10364 g
= gimple_build_label (lab8
);
10365 gimple_seq_add_stmt (body_p
, g
);
10367 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10368 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10369 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10370 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10372 gimple_seq_add_stmt (body_p
, g
);
10373 g
= gimple_build_label (lab10
);
10374 gimple_seq_add_stmt (body_p
, g
);
10375 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10376 gimple_seq_add_stmt (body_p
, g
);
10377 g
= gimple_build_goto (lab12
);
10378 gimple_seq_add_stmt (body_p
, g
);
10379 g
= gimple_build_label (lab11
);
10380 gimple_seq_add_stmt (body_p
, g
);
10381 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10382 gimple_seq_add_stmt (body_p
, g
);
10383 g
= gimple_build_label (lab12
);
10384 gimple_seq_add_stmt (body_p
, g
);
10386 g
= omp_build_barrier (NULL
);
10387 gimple_seq_add_stmt (body_p
, g
);
10389 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10391 gimple_seq_add_stmt (body_p
, g
);
10393 g
= gimple_build_label (lab2
);
10394 gimple_seq_add_stmt (body_p
, g
);
10396 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10397 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10398 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10399 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10400 gimple_seq_add_stmt (body_p
, g
);
10401 g
= gimple_build_label (lab1
);
10402 gimple_seq_add_stmt (body_p
, g
);
10403 gimple_seq_add_seq (body_p
, thr02_list
);
10404 g
= gimple_build_goto (lab3
);
10405 gimple_seq_add_stmt (body_p
, g
);
10406 g
= gimple_build_label (lab2
);
10407 gimple_seq_add_stmt (body_p
, g
);
10408 gimple_seq_add_seq (body_p
, thrn2_list
);
10409 g
= gimple_build_label (lab3
);
10410 gimple_seq_add_stmt (body_p
, g
);
10412 g
= gimple_build_assign (ivar
, size_zero_node
);
10413 gimple_seq_add_stmt (body_p
, g
);
10414 gimple_seq_add_stmt (body_p
, new_stmt
);
10415 gimple_seq_add_seq (body_p
, new_body
);
10417 gimple_seq new_dlist
= NULL
;
10418 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10419 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10420 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10421 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10422 integer_minus_one_node
);
10423 gimple_seq_add_stmt (&new_dlist
, g
);
10424 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10425 gimple_seq_add_stmt (&new_dlist
, g
);
10426 g
= gimple_build_label (lab1
);
10427 gimple_seq_add_stmt (&new_dlist
, g
);
10428 gimple_seq_add_seq (&new_dlist
, last_list
);
10429 g
= gimple_build_label (lab2
);
10430 gimple_seq_add_stmt (&new_dlist
, g
);
10431 gimple_seq_add_seq (&new_dlist
, *dlist
);
10432 *dlist
= new_dlist
;
10435 /* Lower code for an OMP loop directive. */
10438 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10440 tree
*rhs_p
, block
;
10441 struct omp_for_data fd
, *fdp
= NULL
;
10442 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10444 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10445 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10446 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10449 push_gimplify_context ();
10451 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10453 block
= make_node (BLOCK
);
10454 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10455 /* Replace at gsi right away, so that 'stmt' is no member
10456 of a sequence anymore as we're going to add to a different
10458 gsi_replace (gsi_p
, new_stmt
, true);
10460 /* Move declaration of temporaries in the loop body before we make
10462 omp_for_body
= gimple_omp_body (stmt
);
10463 if (!gimple_seq_empty_p (omp_for_body
)
10464 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10467 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10468 tree vars
= gimple_bind_vars (inner_bind
);
10469 gimple_bind_append_vars (new_stmt
, vars
);
10470 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10471 keep them on the inner_bind and it's block. */
10472 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10473 if (gimple_bind_block (inner_bind
))
10474 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10477 if (gimple_omp_for_combined_into_p (stmt
))
10479 omp_extract_for_data (stmt
, &fd
, NULL
);
10482 /* We need two temporaries with fd.loop.v type (istart/iend)
10483 and then (fd.collapse - 1) temporaries with the same
10484 type for count2 ... countN-1 vars if not constant. */
10486 tree type
= fd
.iter_type
;
10487 if (fd
.collapse
> 1
10488 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10489 count
+= fd
.collapse
- 1;
10491 tree type2
= NULL_TREE
;
10493 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10494 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10495 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10497 tree clauses
= *pc
;
10498 if (fd
.collapse
> 1
10500 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
10501 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10502 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
10503 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10505 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
10506 type2
= TREE_TYPE (v
);
10512 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10513 OMP_CLAUSE__LOOPTEMP_
);
10514 if (ctx
->simt_stmt
)
10515 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10516 OMP_CLAUSE__LOOPTEMP_
);
10517 for (i
= 0; i
< count
+ count2
; i
++)
10522 gcc_assert (outerc
);
10523 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10524 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10525 OMP_CLAUSE__LOOPTEMP_
);
10529 /* If there are 2 adjacent SIMD stmts, one with _simt_
10530 clause, another without, make sure they have the same
10531 decls in _looptemp_ clauses, because the outer stmt
10532 they are combined into will look up just one inner_stmt. */
10533 if (ctx
->simt_stmt
)
10534 temp
= OMP_CLAUSE_DECL (simtc
);
10536 temp
= create_tmp_var (i
>= count
? type2
: type
);
10537 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10539 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10540 OMP_CLAUSE_DECL (*pc
) = temp
;
10541 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10542 if (ctx
->simt_stmt
)
10543 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10544 OMP_CLAUSE__LOOPTEMP_
);
10549 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10553 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10554 OMP_CLAUSE_REDUCTION
);
10555 tree rtmp
= NULL_TREE
;
10558 tree type
= build_pointer_type (pointer_sized_int_node
);
10559 tree temp
= create_tmp_var (type
);
10560 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10561 OMP_CLAUSE_DECL (c
) = temp
;
10562 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10563 gimple_omp_for_set_clauses (stmt
, c
);
10564 lower_omp_task_reductions (ctx
, OMP_FOR
,
10565 gimple_omp_for_clauses (stmt
),
10566 &tred_ilist
, &tred_dlist
);
10568 rtmp
= make_ssa_name (type
);
10569 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10572 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10575 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10577 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10578 gimple_omp_for_pre_body (stmt
));
10580 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10582 /* Lower the header expressions. At this point, we can assume that
10583 the header is of the form:
10585 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10587 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10588 using the .omp_data_s mapping, if needed. */
10589 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10591 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10592 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10594 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10595 TREE_VEC_ELT (*rhs_p
, 1)
10596 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10597 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10598 TREE_VEC_ELT (*rhs_p
, 2)
10599 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10601 else if (!is_gimple_min_invariant (*rhs_p
))
10602 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10603 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10604 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10606 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10607 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10609 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10610 TREE_VEC_ELT (*rhs_p
, 1)
10611 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10612 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10613 TREE_VEC_ELT (*rhs_p
, 2)
10614 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10616 else if (!is_gimple_min_invariant (*rhs_p
))
10617 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10618 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10619 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10621 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10622 if (!is_gimple_min_invariant (*rhs_p
))
10623 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10626 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10628 gimple_seq_add_seq (&body
, cnt_list
);
10630 /* Once lowered, extract the bounds and clauses. */
10631 omp_extract_for_data (stmt
, &fd
, NULL
);
10633 if (is_gimple_omp_oacc (ctx
->stmt
)
10634 && !ctx_in_oacc_kernels_region (ctx
))
10635 lower_oacc_head_tail (gimple_location (stmt
),
10636 gimple_omp_for_clauses (stmt
),
10637 &oacc_head
, &oacc_tail
, ctx
);
10639 /* Add OpenACC partitioning and reduction markers just before the loop. */
10641 gimple_seq_add_seq (&body
, oacc_head
);
10643 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10645 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10646 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10647 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10648 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10650 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10651 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10652 OMP_CLAUSE_LINEAR_STEP (c
)
10653 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10657 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10658 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10659 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10662 gimple_seq_add_stmt (&body
, stmt
);
10663 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10666 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10669 /* After the loop, add exit clauses. */
10670 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10674 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10675 gcall
*g
= gimple_build_call (fndecl
, 0);
10676 gimple_seq_add_stmt (&body
, g
);
10677 gimple_seq_add_seq (&body
, clist
);
10678 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10679 g
= gimple_build_call (fndecl
, 0);
10680 gimple_seq_add_stmt (&body
, g
);
10683 if (ctx
->cancellable
)
10684 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10686 gimple_seq_add_seq (&body
, dlist
);
10690 gimple_seq_add_seq (&tred_ilist
, body
);
10694 body
= maybe_catch_exception (body
);
10696 /* Region exit marker goes at the end of the loop body. */
10697 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10698 gimple_seq_add_stmt (&body
, g
);
10700 gimple_seq_add_seq (&body
, tred_dlist
);
10702 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10705 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10707 /* Add OpenACC joining and reduction markers just after the loop. */
10709 gimple_seq_add_seq (&body
, oacc_tail
);
10711 pop_gimplify_context (new_stmt
);
10713 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10714 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10715 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10716 if (BLOCK_VARS (block
))
10717 TREE_USED (block
) = 1;
10719 gimple_bind_set_body (new_stmt
, body
);
10720 gimple_omp_set_body (stmt
, NULL
);
10721 gimple_omp_for_set_pre_body (stmt
, NULL
);
10724 /* Callback for walk_stmts. Check if the current statement only contains
10725 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10728 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10729 bool *handled_ops_p
,
10730 struct walk_stmt_info
*wi
)
10732 int *info
= (int *) wi
->info
;
10733 gimple
*stmt
= gsi_stmt (*gsi_p
);
10735 *handled_ops_p
= true;
10736 switch (gimple_code (stmt
))
10742 case GIMPLE_OMP_FOR
:
10743 case GIMPLE_OMP_SECTIONS
:
10744 *info
= *info
== 0 ? 1 : -1;
10753 struct omp_taskcopy_context
10755 /* This field must be at the beginning, as we do "inheritance": Some
10756 callback functions for tree-inline.c (e.g., omp_copy_decl)
10757 receive a copy_body_data pointer that is up-casted to an
10758 omp_context pointer. */
10764 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10766 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10768 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10769 return create_tmp_var (TREE_TYPE (var
));
10775 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10777 tree name
, new_fields
= NULL
, type
, f
;
10779 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10780 name
= DECL_NAME (TYPE_NAME (orig_type
));
10781 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10782 TYPE_DECL
, name
, type
);
10783 TYPE_NAME (type
) = name
;
10785 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10787 tree new_f
= copy_node (f
);
10788 DECL_CONTEXT (new_f
) = type
;
10789 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10790 TREE_CHAIN (new_f
) = new_fields
;
10791 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10792 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10793 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10795 new_fields
= new_f
;
10796 tcctx
->cb
.decl_map
->put (f
, new_f
);
10798 TYPE_FIELDS (type
) = nreverse (new_fields
);
10799 layout_type (type
);
10803 /* Create task copyfn. */
10806 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10808 struct function
*child_cfun
;
10809 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10810 tree record_type
, srecord_type
, bind
, list
;
10811 bool record_needs_remap
= false, srecord_needs_remap
= false;
10813 struct omp_taskcopy_context tcctx
;
10814 location_t loc
= gimple_location (task_stmt
);
10815 size_t looptempno
= 0;
10817 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10818 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10819 gcc_assert (child_cfun
->cfg
== NULL
);
10820 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10822 /* Reset DECL_CONTEXT on function arguments. */
10823 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10824 DECL_CONTEXT (t
) = child_fn
;
10826 /* Populate the function. */
10827 push_gimplify_context ();
10828 push_cfun (child_cfun
);
10830 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10831 TREE_SIDE_EFFECTS (bind
) = 1;
10833 DECL_SAVED_TREE (child_fn
) = bind
;
10834 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10836 /* Remap src and dst argument types if needed. */
10837 record_type
= ctx
->record_type
;
10838 srecord_type
= ctx
->srecord_type
;
10839 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10840 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10842 record_needs_remap
= true;
10845 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10846 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10848 srecord_needs_remap
= true;
10852 if (record_needs_remap
|| srecord_needs_remap
)
10854 memset (&tcctx
, '\0', sizeof (tcctx
));
10855 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10856 tcctx
.cb
.dst_fn
= child_fn
;
10857 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10858 gcc_checking_assert (tcctx
.cb
.src_node
);
10859 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10860 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10861 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10862 tcctx
.cb
.eh_lp_nr
= 0;
10863 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10864 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10867 if (record_needs_remap
)
10868 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10869 if (srecord_needs_remap
)
10870 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10873 tcctx
.cb
.decl_map
= NULL
;
10875 arg
= DECL_ARGUMENTS (child_fn
);
10876 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10877 sarg
= DECL_CHAIN (arg
);
10878 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10880 /* First pass: initialize temporaries used in record_type and srecord_type
10881 sizes and field offsets. */
10882 if (tcctx
.cb
.decl_map
)
10883 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10884 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10888 decl
= OMP_CLAUSE_DECL (c
);
10889 p
= tcctx
.cb
.decl_map
->get (decl
);
10892 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10893 sf
= (tree
) n
->value
;
10894 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10895 src
= build_simple_mem_ref_loc (loc
, sarg
);
10896 src
= omp_build_component_ref (src
, sf
);
10897 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10898 append_to_statement_list (t
, &list
);
10901 /* Second pass: copy shared var pointers and copy construct non-VLA
10902 firstprivate vars. */
10903 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10904 switch (OMP_CLAUSE_CODE (c
))
10906 splay_tree_key key
;
10907 case OMP_CLAUSE_SHARED
:
10908 decl
= OMP_CLAUSE_DECL (c
);
10909 key
= (splay_tree_key
) decl
;
10910 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10911 key
= (splay_tree_key
) &DECL_UID (decl
);
10912 n
= splay_tree_lookup (ctx
->field_map
, key
);
10915 f
= (tree
) n
->value
;
10916 if (tcctx
.cb
.decl_map
)
10917 f
= *tcctx
.cb
.decl_map
->get (f
);
10918 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10919 sf
= (tree
) n
->value
;
10920 if (tcctx
.cb
.decl_map
)
10921 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10922 src
= build_simple_mem_ref_loc (loc
, sarg
);
10923 src
= omp_build_component_ref (src
, sf
);
10924 dst
= build_simple_mem_ref_loc (loc
, arg
);
10925 dst
= omp_build_component_ref (dst
, f
);
10926 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10927 append_to_statement_list (t
, &list
);
10929 case OMP_CLAUSE_REDUCTION
:
10930 case OMP_CLAUSE_IN_REDUCTION
:
10931 decl
= OMP_CLAUSE_DECL (c
);
10932 if (TREE_CODE (decl
) == MEM_REF
)
10934 decl
= TREE_OPERAND (decl
, 0);
10935 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10936 decl
= TREE_OPERAND (decl
, 0);
10937 if (TREE_CODE (decl
) == INDIRECT_REF
10938 || TREE_CODE (decl
) == ADDR_EXPR
)
10939 decl
= TREE_OPERAND (decl
, 0);
10941 key
= (splay_tree_key
) decl
;
10942 n
= splay_tree_lookup (ctx
->field_map
, key
);
10945 f
= (tree
) n
->value
;
10946 if (tcctx
.cb
.decl_map
)
10947 f
= *tcctx
.cb
.decl_map
->get (f
);
10948 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10949 sf
= (tree
) n
->value
;
10950 if (tcctx
.cb
.decl_map
)
10951 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10952 src
= build_simple_mem_ref_loc (loc
, sarg
);
10953 src
= omp_build_component_ref (src
, sf
);
10954 if (decl
!= OMP_CLAUSE_DECL (c
)
10955 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10956 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10957 src
= build_simple_mem_ref_loc (loc
, src
);
10958 dst
= build_simple_mem_ref_loc (loc
, arg
);
10959 dst
= omp_build_component_ref (dst
, f
);
10960 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10961 append_to_statement_list (t
, &list
);
10963 case OMP_CLAUSE__LOOPTEMP_
:
10964 /* Fields for first two _looptemp_ clauses are initialized by
10965 GOMP_taskloop*, the rest are handled like firstprivate. */
10966 if (looptempno
< 2)
10972 case OMP_CLAUSE__REDUCTEMP_
:
10973 case OMP_CLAUSE_FIRSTPRIVATE
:
10974 decl
= OMP_CLAUSE_DECL (c
);
10975 if (is_variable_sized (decl
))
10977 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10980 f
= (tree
) n
->value
;
10981 if (tcctx
.cb
.decl_map
)
10982 f
= *tcctx
.cb
.decl_map
->get (f
);
10983 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10986 sf
= (tree
) n
->value
;
10987 if (tcctx
.cb
.decl_map
)
10988 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10989 src
= build_simple_mem_ref_loc (loc
, sarg
);
10990 src
= omp_build_component_ref (src
, sf
);
10991 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10992 src
= build_simple_mem_ref_loc (loc
, src
);
10996 dst
= build_simple_mem_ref_loc (loc
, arg
);
10997 dst
= omp_build_component_ref (dst
, f
);
10998 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10999 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11001 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11002 append_to_statement_list (t
, &list
);
11004 case OMP_CLAUSE_PRIVATE
:
11005 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
11007 decl
= OMP_CLAUSE_DECL (c
);
11008 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11009 f
= (tree
) n
->value
;
11010 if (tcctx
.cb
.decl_map
)
11011 f
= *tcctx
.cb
.decl_map
->get (f
);
11012 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11015 sf
= (tree
) n
->value
;
11016 if (tcctx
.cb
.decl_map
)
11017 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11018 src
= build_simple_mem_ref_loc (loc
, sarg
);
11019 src
= omp_build_component_ref (src
, sf
);
11020 if (use_pointer_for_field (decl
, NULL
))
11021 src
= build_simple_mem_ref_loc (loc
, src
);
11025 dst
= build_simple_mem_ref_loc (loc
, arg
);
11026 dst
= omp_build_component_ref (dst
, f
);
11027 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11028 append_to_statement_list (t
, &list
);
11034 /* Last pass: handle VLA firstprivates. */
11035 if (tcctx
.cb
.decl_map
)
11036 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11037 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11041 decl
= OMP_CLAUSE_DECL (c
);
11042 if (!is_variable_sized (decl
))
11044 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11047 f
= (tree
) n
->value
;
11048 f
= *tcctx
.cb
.decl_map
->get (f
);
11049 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
11050 ind
= DECL_VALUE_EXPR (decl
);
11051 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
11052 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
11053 n
= splay_tree_lookup (ctx
->sfield_map
,
11054 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11055 sf
= (tree
) n
->value
;
11056 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11057 src
= build_simple_mem_ref_loc (loc
, sarg
);
11058 src
= omp_build_component_ref (src
, sf
);
11059 src
= build_simple_mem_ref_loc (loc
, src
);
11060 dst
= build_simple_mem_ref_loc (loc
, arg
);
11061 dst
= omp_build_component_ref (dst
, f
);
11062 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11063 append_to_statement_list (t
, &list
);
11064 n
= splay_tree_lookup (ctx
->field_map
,
11065 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11066 df
= (tree
) n
->value
;
11067 df
= *tcctx
.cb
.decl_map
->get (df
);
11068 ptr
= build_simple_mem_ref_loc (loc
, arg
);
11069 ptr
= omp_build_component_ref (ptr
, df
);
11070 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
11071 build_fold_addr_expr_loc (loc
, dst
));
11072 append_to_statement_list (t
, &list
);
11075 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
11076 append_to_statement_list (t
, &list
);
11078 if (tcctx
.cb
.decl_map
)
11079 delete tcctx
.cb
.decl_map
;
11080 pop_gimplify_context (NULL
);
11081 BIND_EXPR_BODY (bind
) = list
;
11086 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
11090 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
11092 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
11093 gcc_assert (clauses
);
11094 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11095 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
11096 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11098 case OMP_CLAUSE_DEPEND_LAST
:
11099 /* Lowering already done at gimplification. */
11101 case OMP_CLAUSE_DEPEND_IN
:
11104 case OMP_CLAUSE_DEPEND_OUT
:
11105 case OMP_CLAUSE_DEPEND_INOUT
:
11108 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11111 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11114 case OMP_CLAUSE_DEPEND_SOURCE
:
11115 case OMP_CLAUSE_DEPEND_SINK
:
11118 gcc_unreachable ();
11120 if (cnt
[1] || cnt
[3])
11122 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
11123 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
11124 tree array
= create_tmp_var (type
);
11125 TREE_ADDRESSABLE (array
) = 1;
11126 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
11130 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
11131 gimple_seq_add_stmt (iseq
, g
);
11132 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
11135 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
11136 gimple_seq_add_stmt (iseq
, g
);
11137 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
11139 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
11140 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
11141 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
11142 gimple_seq_add_stmt (iseq
, g
);
11144 for (i
= 0; i
< 4; i
++)
11148 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11149 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
11153 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11155 case OMP_CLAUSE_DEPEND_IN
:
11159 case OMP_CLAUSE_DEPEND_OUT
:
11160 case OMP_CLAUSE_DEPEND_INOUT
:
11164 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11168 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11173 gcc_unreachable ();
11175 tree t
= OMP_CLAUSE_DECL (c
);
11176 t
= fold_convert (ptr_type_node
, t
);
11177 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11178 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11179 NULL_TREE
, NULL_TREE
);
11180 g
= gimple_build_assign (r
, t
);
11181 gimple_seq_add_stmt (iseq
, g
);
11184 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11185 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11186 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11187 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11189 tree clobber
= build_clobber (type
);
11190 g
= gimple_build_assign (array
, clobber
);
11191 gimple_seq_add_stmt (oseq
, g
);
11194 /* Lower the OpenMP parallel or task directive in the current statement
11195 in GSI_P. CTX holds context information for the directive. */
11198 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11202 gimple
*stmt
= gsi_stmt (*gsi_p
);
11203 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11204 gimple_seq par_body
;
11205 location_t loc
= gimple_location (stmt
);
11207 clauses
= gimple_omp_taskreg_clauses (stmt
);
11208 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11209 && gimple_omp_task_taskwait_p (stmt
))
11217 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11218 par_body
= gimple_bind_body (par_bind
);
11220 child_fn
= ctx
->cb
.dst_fn
;
11221 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11222 && !gimple_omp_parallel_combined_p (stmt
))
11224 struct walk_stmt_info wi
;
11227 memset (&wi
, 0, sizeof (wi
));
11229 wi
.val_only
= true;
11230 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11232 gimple_omp_parallel_set_combined_p (stmt
, true);
11234 gimple_seq dep_ilist
= NULL
;
11235 gimple_seq dep_olist
= NULL
;
11236 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11237 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11239 push_gimplify_context ();
11240 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11241 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11242 &dep_ilist
, &dep_olist
);
11245 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11246 && gimple_omp_task_taskwait_p (stmt
))
11250 gsi_replace (gsi_p
, dep_bind
, true);
11251 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11252 gimple_bind_add_stmt (dep_bind
, stmt
);
11253 gimple_bind_add_seq (dep_bind
, dep_olist
);
11254 pop_gimplify_context (dep_bind
);
11259 if (ctx
->srecord_type
)
11260 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11262 gimple_seq tskred_ilist
= NULL
;
11263 gimple_seq tskred_olist
= NULL
;
11264 if ((is_task_ctx (ctx
)
11265 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11266 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11267 OMP_CLAUSE_REDUCTION
))
11268 || (is_parallel_ctx (ctx
)
11269 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11270 OMP_CLAUSE__REDUCTEMP_
)))
11272 if (dep_bind
== NULL
)
11274 push_gimplify_context ();
11275 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11277 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11279 gimple_omp_taskreg_clauses (ctx
->stmt
),
11280 &tskred_ilist
, &tskred_olist
);
11283 push_gimplify_context ();
11285 gimple_seq par_olist
= NULL
;
11286 gimple_seq par_ilist
= NULL
;
11287 gimple_seq par_rlist
= NULL
;
11288 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11289 lower_omp (&par_body
, ctx
);
11290 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
11291 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11293 /* Declare all the variables created by mapping and the variables
11294 declared in the scope of the parallel body. */
11295 record_vars_into (ctx
->block_vars
, child_fn
);
11296 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11297 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11299 if (ctx
->record_type
)
11302 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11303 : ctx
->record_type
, ".omp_data_o");
11304 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11305 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11306 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11309 gimple_seq olist
= NULL
;
11310 gimple_seq ilist
= NULL
;
11311 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11312 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11314 if (ctx
->record_type
)
11316 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
11317 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11321 /* Once all the expansions are done, sequence all the different
11322 fragments inside gimple_omp_body. */
11324 gimple_seq new_body
= NULL
;
11326 if (ctx
->record_type
)
11328 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11329 /* fixup_child_record_type might have changed receiver_decl's type. */
11330 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11331 gimple_seq_add_stmt (&new_body
,
11332 gimple_build_assign (ctx
->receiver_decl
, t
));
11335 gimple_seq_add_seq (&new_body
, par_ilist
);
11336 gimple_seq_add_seq (&new_body
, par_body
);
11337 gimple_seq_add_seq (&new_body
, par_rlist
);
11338 if (ctx
->cancellable
)
11339 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11340 gimple_seq_add_seq (&new_body
, par_olist
);
11341 new_body
= maybe_catch_exception (new_body
);
11342 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11343 gimple_seq_add_stmt (&new_body
,
11344 gimple_build_omp_continue (integer_zero_node
,
11345 integer_zero_node
));
11346 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11347 gimple_omp_set_body (stmt
, new_body
);
11349 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11350 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11352 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11353 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11354 gimple_bind_add_seq (bind
, ilist
);
11355 gimple_bind_add_stmt (bind
, stmt
);
11356 gimple_bind_add_seq (bind
, olist
);
11358 pop_gimplify_context (NULL
);
11362 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11363 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11364 gimple_bind_add_stmt (dep_bind
, bind
);
11365 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11366 gimple_bind_add_seq (dep_bind
, dep_olist
);
11367 pop_gimplify_context (dep_bind
);
11371 /* Lower the GIMPLE_OMP_TARGET in the current statement
11372 in GSI_P. CTX holds context information for the directive. */
11375 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11378 tree child_fn
, t
, c
;
11379 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11380 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11381 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11382 location_t loc
= gimple_location (stmt
);
11383 bool offloaded
, data_region
;
11384 unsigned int map_cnt
= 0;
11386 offloaded
= is_gimple_omp_offloaded (stmt
);
11387 switch (gimple_omp_target_kind (stmt
))
11389 case GF_OMP_TARGET_KIND_REGION
:
11390 case GF_OMP_TARGET_KIND_UPDATE
:
11391 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11392 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11393 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11394 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11395 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
11396 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11397 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11398 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11399 data_region
= false;
11401 case GF_OMP_TARGET_KIND_DATA
:
11402 case GF_OMP_TARGET_KIND_OACC_DATA
:
11403 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11404 data_region
= true;
11407 gcc_unreachable ();
11410 clauses
= gimple_omp_target_clauses (stmt
);
11412 gimple_seq dep_ilist
= NULL
;
11413 gimple_seq dep_olist
= NULL
;
11414 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11416 push_gimplify_context ();
11417 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11418 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11419 &dep_ilist
, &dep_olist
);
11426 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11427 tgt_body
= gimple_bind_body (tgt_bind
);
11429 else if (data_region
)
11430 tgt_body
= gimple_omp_body (stmt
);
11431 child_fn
= ctx
->cb
.dst_fn
;
11433 push_gimplify_context ();
11436 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11437 switch (OMP_CLAUSE_CODE (c
))
11443 case OMP_CLAUSE_MAP
:
11445 /* First check what we're prepared to handle in the following. */
11446 switch (OMP_CLAUSE_MAP_KIND (c
))
11448 case GOMP_MAP_ALLOC
:
11450 case GOMP_MAP_FROM
:
11451 case GOMP_MAP_TOFROM
:
11452 case GOMP_MAP_POINTER
:
11453 case GOMP_MAP_TO_PSET
:
11454 case GOMP_MAP_DELETE
:
11455 case GOMP_MAP_RELEASE
:
11456 case GOMP_MAP_ALWAYS_TO
:
11457 case GOMP_MAP_ALWAYS_FROM
:
11458 case GOMP_MAP_ALWAYS_TOFROM
:
11459 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11460 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11461 case GOMP_MAP_STRUCT
:
11462 case GOMP_MAP_ALWAYS_POINTER
:
11464 case GOMP_MAP_IF_PRESENT
:
11465 case GOMP_MAP_FORCE_ALLOC
:
11466 case GOMP_MAP_FORCE_TO
:
11467 case GOMP_MAP_FORCE_FROM
:
11468 case GOMP_MAP_FORCE_TOFROM
:
11469 case GOMP_MAP_FORCE_PRESENT
:
11470 case GOMP_MAP_FORCE_DEVICEPTR
:
11471 case GOMP_MAP_DEVICE_RESIDENT
:
11472 case GOMP_MAP_LINK
:
11473 case GOMP_MAP_ATTACH
:
11474 case GOMP_MAP_DETACH
:
11475 case GOMP_MAP_FORCE_DETACH
:
11476 gcc_assert (is_gimple_omp_oacc (stmt
));
11479 gcc_unreachable ();
11483 case OMP_CLAUSE_TO
:
11484 case OMP_CLAUSE_FROM
:
11486 var
= OMP_CLAUSE_DECL (c
);
11489 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11490 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11491 && (OMP_CLAUSE_MAP_KIND (c
)
11492 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11497 if (DECL_SIZE (var
)
11498 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11500 tree var2
= DECL_VALUE_EXPR (var
);
11501 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11502 var2
= TREE_OPERAND (var2
, 0);
11503 gcc_assert (DECL_P (var2
));
11508 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11509 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11510 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11512 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11514 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11515 && varpool_node::get_create (var
)->offloadable
)
11518 tree type
= build_pointer_type (TREE_TYPE (var
));
11519 tree new_var
= lookup_decl (var
, ctx
);
11520 x
= create_tmp_var_raw (type
, get_name (new_var
));
11521 gimple_add_tmp_var (x
);
11522 x
= build_simple_mem_ref (x
);
11523 SET_DECL_VALUE_EXPR (new_var
, x
);
11524 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11529 if (!maybe_lookup_field (var
, ctx
))
11532 /* Don't remap compute constructs' reduction variables, because the
11533 intermediate result must be local to each gang. */
11534 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11535 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11537 x
= build_receiver_ref (var
, true, ctx
);
11538 tree new_var
= lookup_decl (var
, ctx
);
11540 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11541 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11542 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11543 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11544 x
= build_simple_mem_ref (x
);
11545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11547 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11548 if (omp_is_reference (new_var
)
11549 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
11550 || DECL_BY_REFERENCE (var
)))
11552 /* Create a local object to hold the instance
11554 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11555 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11556 tree inst
= create_tmp_var (type
, id
);
11557 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11558 x
= build_fold_addr_expr (inst
);
11560 gimplify_assign (new_var
, x
, &fplist
);
11562 else if (DECL_P (new_var
))
11564 SET_DECL_VALUE_EXPR (new_var
, x
);
11565 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11568 gcc_unreachable ();
11573 case OMP_CLAUSE_FIRSTPRIVATE
:
11574 if (is_oacc_parallel_or_serial (ctx
))
11575 goto oacc_firstprivate
;
11577 var
= OMP_CLAUSE_DECL (c
);
11578 if (!omp_is_reference (var
)
11579 && !is_gimple_reg_type (TREE_TYPE (var
)))
11581 tree new_var
= lookup_decl (var
, ctx
);
11582 if (is_variable_sized (var
))
11584 tree pvar
= DECL_VALUE_EXPR (var
);
11585 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11586 pvar
= TREE_OPERAND (pvar
, 0);
11587 gcc_assert (DECL_P (pvar
));
11588 tree new_pvar
= lookup_decl (pvar
, ctx
);
11589 x
= build_fold_indirect_ref (new_pvar
);
11590 TREE_THIS_NOTRAP (x
) = 1;
11593 x
= build_receiver_ref (var
, true, ctx
);
11594 SET_DECL_VALUE_EXPR (new_var
, x
);
11595 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11599 case OMP_CLAUSE_PRIVATE
:
11600 if (is_gimple_omp_oacc (ctx
->stmt
))
11602 var
= OMP_CLAUSE_DECL (c
);
11603 if (is_variable_sized (var
))
11605 tree new_var
= lookup_decl (var
, ctx
);
11606 tree pvar
= DECL_VALUE_EXPR (var
);
11607 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11608 pvar
= TREE_OPERAND (pvar
, 0);
11609 gcc_assert (DECL_P (pvar
));
11610 tree new_pvar
= lookup_decl (pvar
, ctx
);
11611 x
= build_fold_indirect_ref (new_pvar
);
11612 TREE_THIS_NOTRAP (x
) = 1;
11613 SET_DECL_VALUE_EXPR (new_var
, x
);
11614 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11618 case OMP_CLAUSE_USE_DEVICE_PTR
:
11619 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11620 case OMP_CLAUSE_IS_DEVICE_PTR
:
11621 var
= OMP_CLAUSE_DECL (c
);
11623 if (is_variable_sized (var
))
11625 tree new_var
= lookup_decl (var
, ctx
);
11626 tree pvar
= DECL_VALUE_EXPR (var
);
11627 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11628 pvar
= TREE_OPERAND (pvar
, 0);
11629 gcc_assert (DECL_P (pvar
));
11630 tree new_pvar
= lookup_decl (pvar
, ctx
);
11631 x
= build_fold_indirect_ref (new_pvar
);
11632 TREE_THIS_NOTRAP (x
) = 1;
11633 SET_DECL_VALUE_EXPR (new_var
, x
);
11634 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11636 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11637 && !omp_is_reference (var
)
11638 && !omp_is_allocatable_or_ptr (var
)
11639 && !lang_hooks
.decls
.omp_array_data (var
, true))
11640 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11642 tree new_var
= lookup_decl (var
, ctx
);
11643 tree type
= build_pointer_type (TREE_TYPE (var
));
11644 x
= create_tmp_var_raw (type
, get_name (new_var
));
11645 gimple_add_tmp_var (x
);
11646 x
= build_simple_mem_ref (x
);
11647 SET_DECL_VALUE_EXPR (new_var
, x
);
11648 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11652 tree new_var
= lookup_decl (var
, ctx
);
11653 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11654 gimple_add_tmp_var (x
);
11655 SET_DECL_VALUE_EXPR (new_var
, x
);
11656 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11663 target_nesting_level
++;
11664 lower_omp (&tgt_body
, ctx
);
11665 target_nesting_level
--;
11667 else if (data_region
)
11668 lower_omp (&tgt_body
, ctx
);
11672 /* Declare all the variables created by mapping and the variables
11673 declared in the scope of the target body. */
11674 record_vars_into (ctx
->block_vars
, child_fn
);
11675 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11676 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11681 if (ctx
->record_type
)
11684 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11685 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11686 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11687 t
= make_tree_vec (3);
11688 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11689 TREE_VEC_ELT (t
, 1)
11690 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11691 ".omp_data_sizes");
11692 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11693 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11694 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11695 tree tkind_type
= short_unsigned_type_node
;
11696 int talign_shift
= 8;
11697 TREE_VEC_ELT (t
, 2)
11698 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11699 ".omp_data_kinds");
11700 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11701 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11702 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11703 gimple_omp_target_set_data_arg (stmt
, t
);
11705 vec
<constructor_elt
, va_gc
> *vsize
;
11706 vec
<constructor_elt
, va_gc
> *vkind
;
11707 vec_alloc (vsize
, map_cnt
);
11708 vec_alloc (vkind
, map_cnt
);
11709 unsigned int map_idx
= 0;
11711 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11712 switch (OMP_CLAUSE_CODE (c
))
11714 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11715 unsigned int talign
;
11720 case OMP_CLAUSE_MAP
:
11721 case OMP_CLAUSE_TO
:
11722 case OMP_CLAUSE_FROM
:
11723 oacc_firstprivate_map
:
11725 ovar
= OMP_CLAUSE_DECL (c
);
11726 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11727 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11728 || (OMP_CLAUSE_MAP_KIND (c
)
11729 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11731 if (!DECL_P (ovar
))
11733 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11734 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11736 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11737 == get_base_address (ovar
));
11738 nc
= OMP_CLAUSE_CHAIN (c
);
11739 ovar
= OMP_CLAUSE_DECL (nc
);
11743 tree x
= build_sender_ref (ovar
, ctx
);
11745 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11746 gimplify_assign (x
, v
, &ilist
);
11752 if (DECL_SIZE (ovar
)
11753 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11755 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11756 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11757 ovar2
= TREE_OPERAND (ovar2
, 0);
11758 gcc_assert (DECL_P (ovar2
));
11761 if (!maybe_lookup_field (ovar
, ctx
))
11765 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11766 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11767 talign
= DECL_ALIGN_UNIT (ovar
);
11770 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11771 x
= build_sender_ref (ovar
, ctx
);
11773 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11774 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11775 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11776 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11778 gcc_assert (offloaded
);
11780 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11781 mark_addressable (avar
);
11782 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11783 talign
= DECL_ALIGN_UNIT (avar
);
11784 avar
= build_fold_addr_expr (avar
);
11785 gimplify_assign (x
, avar
, &ilist
);
11787 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11789 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11790 if (!omp_is_reference (var
))
11792 if (is_gimple_reg (var
)
11793 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11794 TREE_NO_WARNING (var
) = 1;
11795 var
= build_fold_addr_expr (var
);
11798 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11799 gimplify_assign (x
, var
, &ilist
);
11801 else if (is_gimple_reg (var
))
11803 gcc_assert (offloaded
);
11804 tree avar
= create_tmp_var (TREE_TYPE (var
));
11805 mark_addressable (avar
);
11806 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11807 if (GOMP_MAP_COPY_TO_P (map_kind
)
11808 || map_kind
== GOMP_MAP_POINTER
11809 || map_kind
== GOMP_MAP_TO_PSET
11810 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11812 /* If we need to initialize a temporary
11813 with VAR because it is not addressable, and
11814 the variable hasn't been initialized yet, then
11815 we'll get a warning for the store to avar.
11816 Don't warn in that case, the mapping might
11818 TREE_NO_WARNING (var
) = 1;
11819 gimplify_assign (avar
, var
, &ilist
);
11821 avar
= build_fold_addr_expr (avar
);
11822 gimplify_assign (x
, avar
, &ilist
);
11823 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11824 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11825 && !TYPE_READONLY (TREE_TYPE (var
)))
11827 x
= unshare_expr (x
);
11828 x
= build_simple_mem_ref (x
);
11829 gimplify_assign (var
, x
, &olist
);
11834 /* While MAP is handled explicitly by the FE,
11835 for 'target update', only the identified is passed. */
11836 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
11837 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
11838 && (omp_is_allocatable_or_ptr (var
)
11839 && omp_check_optional_argument (var
, false)))
11840 var
= build_fold_indirect_ref (var
);
11841 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
11842 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
11843 || (!omp_is_allocatable_or_ptr (var
)
11844 && !omp_check_optional_argument (var
, false)))
11845 var
= build_fold_addr_expr (var
);
11846 gimplify_assign (x
, var
, &ilist
);
11850 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11852 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11853 s
= TREE_TYPE (ovar
);
11854 if (TREE_CODE (s
) == REFERENCE_TYPE
11855 || omp_check_optional_argument (ovar
, false))
11857 s
= TYPE_SIZE_UNIT (s
);
11860 s
= OMP_CLAUSE_SIZE (c
);
11861 if (s
== NULL_TREE
)
11862 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11863 s
= fold_convert (size_type_node
, s
);
11864 purpose
= size_int (map_idx
++);
11865 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11866 if (TREE_CODE (s
) != INTEGER_CST
)
11867 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11869 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11870 switch (OMP_CLAUSE_CODE (c
))
11872 case OMP_CLAUSE_MAP
:
11873 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11874 tkind_zero
= tkind
;
11875 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11878 case GOMP_MAP_ALLOC
:
11879 case GOMP_MAP_IF_PRESENT
:
11881 case GOMP_MAP_FROM
:
11882 case GOMP_MAP_TOFROM
:
11883 case GOMP_MAP_ALWAYS_TO
:
11884 case GOMP_MAP_ALWAYS_FROM
:
11885 case GOMP_MAP_ALWAYS_TOFROM
:
11886 case GOMP_MAP_RELEASE
:
11887 case GOMP_MAP_FORCE_TO
:
11888 case GOMP_MAP_FORCE_FROM
:
11889 case GOMP_MAP_FORCE_TOFROM
:
11890 case GOMP_MAP_FORCE_PRESENT
:
11891 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11893 case GOMP_MAP_DELETE
:
11894 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11898 if (tkind_zero
!= tkind
)
11900 if (integer_zerop (s
))
11901 tkind
= tkind_zero
;
11902 else if (integer_nonzerop (s
))
11903 tkind_zero
= tkind
;
11906 case OMP_CLAUSE_FIRSTPRIVATE
:
11907 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11908 tkind
= GOMP_MAP_TO
;
11909 tkind_zero
= tkind
;
11911 case OMP_CLAUSE_TO
:
11912 tkind
= GOMP_MAP_TO
;
11913 tkind_zero
= tkind
;
11915 case OMP_CLAUSE_FROM
:
11916 tkind
= GOMP_MAP_FROM
;
11917 tkind_zero
= tkind
;
11920 gcc_unreachable ();
11922 gcc_checking_assert (tkind
11923 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11924 gcc_checking_assert (tkind_zero
11925 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11926 talign
= ceil_log2 (talign
);
11927 tkind
|= talign
<< talign_shift
;
11928 tkind_zero
|= talign
<< talign_shift
;
11929 gcc_checking_assert (tkind
11930 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11931 gcc_checking_assert (tkind_zero
11932 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11933 if (tkind
== tkind_zero
)
11934 x
= build_int_cstu (tkind_type
, tkind
);
11937 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11938 x
= build3 (COND_EXPR
, tkind_type
,
11939 fold_build2 (EQ_EXPR
, boolean_type_node
,
11940 unshare_expr (s
), size_zero_node
),
11941 build_int_cstu (tkind_type
, tkind_zero
),
11942 build_int_cstu (tkind_type
, tkind
));
11944 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11949 case OMP_CLAUSE_FIRSTPRIVATE
:
11950 if (is_oacc_parallel_or_serial (ctx
))
11951 goto oacc_firstprivate_map
;
11952 ovar
= OMP_CLAUSE_DECL (c
);
11953 if (omp_is_reference (ovar
))
11954 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11956 talign
= DECL_ALIGN_UNIT (ovar
);
11957 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11958 x
= build_sender_ref (ovar
, ctx
);
11959 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11960 type
= TREE_TYPE (ovar
);
11961 if (omp_is_reference (ovar
))
11962 type
= TREE_TYPE (type
);
11963 if ((INTEGRAL_TYPE_P (type
)
11964 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11965 || TREE_CODE (type
) == POINTER_TYPE
)
11967 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11969 if (omp_is_reference (var
))
11970 t
= build_simple_mem_ref (var
);
11971 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11972 TREE_NO_WARNING (var
) = 1;
11973 if (TREE_CODE (type
) != POINTER_TYPE
)
11974 t
= fold_convert (pointer_sized_int_node
, t
);
11975 t
= fold_convert (TREE_TYPE (x
), t
);
11976 gimplify_assign (x
, t
, &ilist
);
11978 else if (omp_is_reference (var
))
11979 gimplify_assign (x
, var
, &ilist
);
11980 else if (is_gimple_reg (var
))
11982 tree avar
= create_tmp_var (TREE_TYPE (var
));
11983 mark_addressable (avar
);
11984 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11985 TREE_NO_WARNING (var
) = 1;
11986 gimplify_assign (avar
, var
, &ilist
);
11987 avar
= build_fold_addr_expr (avar
);
11988 gimplify_assign (x
, avar
, &ilist
);
11992 var
= build_fold_addr_expr (var
);
11993 gimplify_assign (x
, var
, &ilist
);
11995 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11997 else if (omp_is_reference (ovar
))
11998 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12000 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
12001 s
= fold_convert (size_type_node
, s
);
12002 purpose
= size_int (map_idx
++);
12003 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12004 if (TREE_CODE (s
) != INTEGER_CST
)
12005 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
12007 gcc_checking_assert (tkind
12008 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12009 talign
= ceil_log2 (talign
);
12010 tkind
|= talign
<< talign_shift
;
12011 gcc_checking_assert (tkind
12012 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12013 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12014 build_int_cstu (tkind_type
, tkind
));
12017 case OMP_CLAUSE_USE_DEVICE_PTR
:
12018 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12019 case OMP_CLAUSE_IS_DEVICE_PTR
:
12020 ovar
= OMP_CLAUSE_DECL (c
);
12021 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12023 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12025 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
12026 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
12027 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
12029 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12031 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
12032 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
12036 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
12037 x
= build_sender_ref (ovar
, ctx
);
12040 if (is_gimple_omp_oacc (ctx
->stmt
))
12042 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
12044 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
12045 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
12048 type
= TREE_TYPE (ovar
);
12049 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12050 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
12051 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12052 && !omp_is_reference (ovar
)
12053 && !omp_is_allocatable_or_ptr (ovar
))
12054 || TREE_CODE (type
) == ARRAY_TYPE
)
12055 var
= build_fold_addr_expr (var
);
12058 if (omp_is_reference (ovar
)
12059 || omp_check_optional_argument (ovar
, false)
12060 || omp_is_allocatable_or_ptr (ovar
))
12062 type
= TREE_TYPE (type
);
12063 if (TREE_CODE (type
) != ARRAY_TYPE
12064 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12065 && !omp_is_allocatable_or_ptr (ovar
))
12066 || (omp_is_reference (ovar
)
12067 && omp_is_allocatable_or_ptr (ovar
))))
12068 var
= build_simple_mem_ref (var
);
12069 var
= fold_convert (TREE_TYPE (x
), var
);
12073 present
= omp_check_optional_argument (ovar
, true);
12076 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12077 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12078 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12079 tree new_x
= unshare_expr (x
);
12080 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
12082 gcond
*cond
= gimple_build_cond_from_tree (present
,
12085 gimple_seq_add_stmt (&ilist
, cond
);
12086 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
12087 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
12088 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
12089 gimple_seq_add_stmt (&ilist
,
12090 gimple_build_label (notnull_label
));
12091 gimplify_assign (x
, var
, &ilist
);
12092 gimple_seq_add_stmt (&ilist
,
12093 gimple_build_label (opt_arg_label
));
12096 gimplify_assign (x
, var
, &ilist
);
12098 purpose
= size_int (map_idx
++);
12099 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12100 gcc_checking_assert (tkind
12101 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12102 gcc_checking_assert (tkind
12103 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12104 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12105 build_int_cstu (tkind_type
, tkind
));
12109 gcc_assert (map_idx
== map_cnt
);
12111 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
12112 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
12113 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
12114 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
12115 for (int i
= 1; i
<= 2; i
++)
12116 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
12118 gimple_seq initlist
= NULL
;
12119 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
12120 TREE_VEC_ELT (t
, i
)),
12121 &initlist
, true, NULL_TREE
);
12122 gimple_seq_add_seq (&ilist
, initlist
);
12124 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
12125 gimple_seq_add_stmt (&olist
,
12126 gimple_build_assign (TREE_VEC_ELT (t
, i
),
12130 tree clobber
= build_clobber (ctx
->record_type
);
12131 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12135 /* Once all the expansions are done, sequence all the different
12136 fragments inside gimple_omp_body. */
12141 && ctx
->record_type
)
12143 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12144 /* fixup_child_record_type might have changed receiver_decl's type. */
12145 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12146 gimple_seq_add_stmt (&new_body
,
12147 gimple_build_assign (ctx
->receiver_decl
, t
));
12149 gimple_seq_add_seq (&new_body
, fplist
);
12151 if (offloaded
|| data_region
)
12153 tree prev
= NULL_TREE
;
12154 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12155 switch (OMP_CLAUSE_CODE (c
))
12160 case OMP_CLAUSE_FIRSTPRIVATE
:
12161 if (is_gimple_omp_oacc (ctx
->stmt
))
12163 var
= OMP_CLAUSE_DECL (c
);
12164 if (omp_is_reference (var
)
12165 || is_gimple_reg_type (TREE_TYPE (var
)))
12167 tree new_var
= lookup_decl (var
, ctx
);
12169 type
= TREE_TYPE (var
);
12170 if (omp_is_reference (var
))
12171 type
= TREE_TYPE (type
);
12172 if ((INTEGRAL_TYPE_P (type
)
12173 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12174 || TREE_CODE (type
) == POINTER_TYPE
)
12176 x
= build_receiver_ref (var
, false, ctx
);
12177 if (TREE_CODE (type
) != POINTER_TYPE
)
12178 x
= fold_convert (pointer_sized_int_node
, x
);
12179 x
= fold_convert (type
, x
);
12180 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12182 if (omp_is_reference (var
))
12184 tree v
= create_tmp_var_raw (type
, get_name (var
));
12185 gimple_add_tmp_var (v
);
12186 TREE_ADDRESSABLE (v
) = 1;
12187 gimple_seq_add_stmt (&new_body
,
12188 gimple_build_assign (v
, x
));
12189 x
= build_fold_addr_expr (v
);
12191 gimple_seq_add_stmt (&new_body
,
12192 gimple_build_assign (new_var
, x
));
12196 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
12197 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12199 gimple_seq_add_stmt (&new_body
,
12200 gimple_build_assign (new_var
, x
));
12203 else if (is_variable_sized (var
))
12205 tree pvar
= DECL_VALUE_EXPR (var
);
12206 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12207 pvar
= TREE_OPERAND (pvar
, 0);
12208 gcc_assert (DECL_P (pvar
));
12209 tree new_var
= lookup_decl (pvar
, ctx
);
12210 x
= build_receiver_ref (var
, false, ctx
);
12211 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12212 gimple_seq_add_stmt (&new_body
,
12213 gimple_build_assign (new_var
, x
));
12216 case OMP_CLAUSE_PRIVATE
:
12217 if (is_gimple_omp_oacc (ctx
->stmt
))
12219 var
= OMP_CLAUSE_DECL (c
);
12220 if (omp_is_reference (var
))
12222 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12223 tree new_var
= lookup_decl (var
, ctx
);
12224 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12225 if (TREE_CONSTANT (x
))
12227 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
12229 gimple_add_tmp_var (x
);
12230 TREE_ADDRESSABLE (x
) = 1;
12231 x
= build_fold_addr_expr_loc (clause_loc
, x
);
12236 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12237 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12238 gimple_seq_add_stmt (&new_body
,
12239 gimple_build_assign (new_var
, x
));
12242 case OMP_CLAUSE_USE_DEVICE_PTR
:
12243 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12244 case OMP_CLAUSE_IS_DEVICE_PTR
:
12246 gimple_seq assign_body
;
12247 bool is_array_data
;
12248 bool do_optional_check
;
12249 assign_body
= NULL
;
12250 do_optional_check
= false;
12251 var
= OMP_CLAUSE_DECL (c
);
12252 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
12254 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12255 x
= build_sender_ref (is_array_data
12256 ? (splay_tree_key
) &DECL_NAME (var
)
12257 : (splay_tree_key
) &DECL_UID (var
), ctx
);
12259 x
= build_receiver_ref (var
, false, ctx
);
12263 bool is_ref
= omp_is_reference (var
);
12264 do_optional_check
= true;
12265 /* First, we copy the descriptor data from the host; then
12266 we update its data to point to the target address. */
12267 new_var
= lookup_decl (var
, ctx
);
12268 new_var
= DECL_VALUE_EXPR (new_var
);
12273 var
= build_fold_indirect_ref (var
);
12274 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
12276 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
12277 gimple_add_tmp_var (v
);
12278 TREE_ADDRESSABLE (v
) = 1;
12279 gimple_seq_add_stmt (&assign_body
,
12280 gimple_build_assign (v
, var
));
12281 tree rhs
= build_fold_addr_expr (v
);
12282 gimple_seq_add_stmt (&assign_body
,
12283 gimple_build_assign (new_var
, rhs
));
12286 gimple_seq_add_stmt (&assign_body
,
12287 gimple_build_assign (new_var
, var
));
12289 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
12291 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12292 gimple_seq_add_stmt (&assign_body
,
12293 gimple_build_assign (v2
, x
));
12295 else if (is_variable_sized (var
))
12297 tree pvar
= DECL_VALUE_EXPR (var
);
12298 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12299 pvar
= TREE_OPERAND (pvar
, 0);
12300 gcc_assert (DECL_P (pvar
));
12301 new_var
= lookup_decl (pvar
, ctx
);
12302 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12303 gimple_seq_add_stmt (&assign_body
,
12304 gimple_build_assign (new_var
, x
));
12306 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12307 && !omp_is_reference (var
)
12308 && !omp_is_allocatable_or_ptr (var
))
12309 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12311 new_var
= lookup_decl (var
, ctx
);
12312 new_var
= DECL_VALUE_EXPR (new_var
);
12313 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12314 new_var
= TREE_OPERAND (new_var
, 0);
12315 gcc_assert (DECL_P (new_var
));
12316 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12317 gimple_seq_add_stmt (&assign_body
,
12318 gimple_build_assign (new_var
, x
));
12322 tree type
= TREE_TYPE (var
);
12323 new_var
= lookup_decl (var
, ctx
);
12324 if (omp_is_reference (var
))
12326 type
= TREE_TYPE (type
);
12327 if (TREE_CODE (type
) != ARRAY_TYPE
12328 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12329 || (omp_is_reference (var
)
12330 && omp_is_allocatable_or_ptr (var
))))
12332 tree v
= create_tmp_var_raw (type
, get_name (var
));
12333 gimple_add_tmp_var (v
);
12334 TREE_ADDRESSABLE (v
) = 1;
12335 x
= fold_convert (type
, x
);
12336 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
12338 gimple_seq_add_stmt (&assign_body
,
12339 gimple_build_assign (v
, x
));
12340 x
= build_fold_addr_expr (v
);
12341 do_optional_check
= true;
12344 new_var
= DECL_VALUE_EXPR (new_var
);
12345 x
= fold_convert (TREE_TYPE (new_var
), x
);
12346 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12347 gimple_seq_add_stmt (&assign_body
,
12348 gimple_build_assign (new_var
, x
));
12351 present
= (do_optional_check
12352 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
12356 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12357 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12358 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12359 glabel
*null_glabel
= gimple_build_label (null_label
);
12360 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
12361 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
12362 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12364 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
12366 gcond
*cond
= gimple_build_cond_from_tree (present
,
12369 gimple_seq_add_stmt (&new_body
, cond
);
12370 gimple_seq_add_stmt (&new_body
, null_glabel
);
12371 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
12372 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
12373 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
12374 gimple_seq_add_seq (&new_body
, assign_body
);
12375 gimple_seq_add_stmt (&new_body
,
12376 gimple_build_label (opt_arg_label
));
12379 gimple_seq_add_seq (&new_body
, assign_body
);
12382 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12383 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12384 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12385 or references to VLAs. */
12386 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12387 switch (OMP_CLAUSE_CODE (c
))
12392 case OMP_CLAUSE_MAP
:
12393 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12394 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12396 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12397 poly_int64 offset
= 0;
12399 var
= OMP_CLAUSE_DECL (c
);
12401 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12402 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12404 && varpool_node::get_create (var
)->offloadable
)
12406 if (TREE_CODE (var
) == INDIRECT_REF
12407 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12408 var
= TREE_OPERAND (var
, 0);
12409 if (TREE_CODE (var
) == COMPONENT_REF
)
12411 var
= get_addr_base_and_unit_offset (var
, &offset
);
12412 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12414 else if (DECL_SIZE (var
)
12415 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12417 tree var2
= DECL_VALUE_EXPR (var
);
12418 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12419 var2
= TREE_OPERAND (var2
, 0);
12420 gcc_assert (DECL_P (var2
));
12423 tree new_var
= lookup_decl (var
, ctx
), x
;
12424 tree type
= TREE_TYPE (new_var
);
12426 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12427 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12430 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12432 new_var
= build2 (MEM_REF
, type
,
12433 build_fold_addr_expr (new_var
),
12434 build_int_cst (build_pointer_type (type
),
12437 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12439 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12440 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12441 new_var
= build2 (MEM_REF
, type
,
12442 build_fold_addr_expr (new_var
),
12443 build_int_cst (build_pointer_type (type
),
12447 is_ref
= omp_is_reference (var
);
12448 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12450 bool ref_to_array
= false;
12453 type
= TREE_TYPE (type
);
12454 if (TREE_CODE (type
) == ARRAY_TYPE
)
12456 type
= build_pointer_type (type
);
12457 ref_to_array
= true;
12460 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12462 tree decl2
= DECL_VALUE_EXPR (new_var
);
12463 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12464 decl2
= TREE_OPERAND (decl2
, 0);
12465 gcc_assert (DECL_P (decl2
));
12467 type
= TREE_TYPE (new_var
);
12469 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12470 x
= fold_convert_loc (clause_loc
, type
, x
);
12471 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12473 tree bias
= OMP_CLAUSE_SIZE (c
);
12475 bias
= lookup_decl (bias
, ctx
);
12476 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12477 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12479 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12480 TREE_TYPE (x
), x
, bias
);
12483 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12484 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12485 if (is_ref
&& !ref_to_array
)
12487 tree t
= create_tmp_var_raw (type
, get_name (var
));
12488 gimple_add_tmp_var (t
);
12489 TREE_ADDRESSABLE (t
) = 1;
12490 gimple_seq_add_stmt (&new_body
,
12491 gimple_build_assign (t
, x
));
12492 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12494 gimple_seq_add_stmt (&new_body
,
12495 gimple_build_assign (new_var
, x
));
12498 else if (OMP_CLAUSE_CHAIN (c
)
12499 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12501 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12502 == GOMP_MAP_FIRSTPRIVATE_POINTER
12503 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12504 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12507 case OMP_CLAUSE_PRIVATE
:
12508 var
= OMP_CLAUSE_DECL (c
);
12509 if (is_variable_sized (var
))
12511 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12512 tree new_var
= lookup_decl (var
, ctx
);
12513 tree pvar
= DECL_VALUE_EXPR (var
);
12514 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12515 pvar
= TREE_OPERAND (pvar
, 0);
12516 gcc_assert (DECL_P (pvar
));
12517 tree new_pvar
= lookup_decl (pvar
, ctx
);
12518 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12519 tree al
= size_int (DECL_ALIGN (var
));
12520 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12521 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12522 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12523 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12524 gimple_seq_add_stmt (&new_body
,
12525 gimple_build_assign (new_pvar
, x
));
12527 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12529 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12530 tree new_var
= lookup_decl (var
, ctx
);
12531 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12532 if (TREE_CONSTANT (x
))
12537 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12538 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12539 tree al
= size_int (TYPE_ALIGN (rtype
));
12540 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12543 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12544 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12545 gimple_seq_add_stmt (&new_body
,
12546 gimple_build_assign (new_var
, x
));
12551 gimple_seq fork_seq
= NULL
;
12552 gimple_seq join_seq
= NULL
;
12554 if (is_oacc_parallel_or_serial (ctx
))
12556 /* If there are reductions on the offloaded region itself, treat
12557 them as a dummy GANG loop. */
12558 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12560 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12561 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12564 gimple_seq_add_seq (&new_body
, fork_seq
);
12565 gimple_seq_add_seq (&new_body
, tgt_body
);
12566 gimple_seq_add_seq (&new_body
, join_seq
);
12569 new_body
= maybe_catch_exception (new_body
);
12571 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12572 gimple_omp_set_body (stmt
, new_body
);
12575 bind
= gimple_build_bind (NULL
, NULL
,
12576 tgt_bind
? gimple_bind_block (tgt_bind
)
12578 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12579 gimple_bind_add_seq (bind
, ilist
);
12580 gimple_bind_add_stmt (bind
, stmt
);
12581 gimple_bind_add_seq (bind
, olist
);
12583 pop_gimplify_context (NULL
);
12587 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12588 gimple_bind_add_stmt (dep_bind
, bind
);
12589 gimple_bind_add_seq (dep_bind
, dep_olist
);
12590 pop_gimplify_context (dep_bind
);
12594 /* Expand code for an OpenMP teams directive. */
12597 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12599 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12600 push_gimplify_context ();
12602 tree block
= make_node (BLOCK
);
12603 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12604 gsi_replace (gsi_p
, bind
, true);
12605 gimple_seq bind_body
= NULL
;
12606 gimple_seq dlist
= NULL
;
12607 gimple_seq olist
= NULL
;
12609 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12610 OMP_CLAUSE_NUM_TEAMS
);
12611 if (num_teams
== NULL_TREE
)
12612 num_teams
= build_int_cst (unsigned_type_node
, 0);
12615 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12616 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12617 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12619 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12620 OMP_CLAUSE_THREAD_LIMIT
);
12621 if (thread_limit
== NULL_TREE
)
12622 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12625 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12626 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12627 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12631 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12632 &bind_body
, &dlist
, ctx
, NULL
);
12633 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12634 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12636 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12638 location_t loc
= gimple_location (teams_stmt
);
12639 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12640 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12641 gimple_set_location (call
, loc
);
12642 gimple_seq_add_stmt (&bind_body
, call
);
12644 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12645 gimple_omp_set_body (teams_stmt
, NULL
);
12646 gimple_seq_add_seq (&bind_body
, olist
);
12647 gimple_seq_add_seq (&bind_body
, dlist
);
12648 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12649 gimple_bind_set_body (bind
, bind_body
);
12651 pop_gimplify_context (bind
);
12653 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12654 BLOCK_VARS (block
) = ctx
->block_vars
;
12655 if (BLOCK_VARS (block
))
12656 TREE_USED (block
) = 1;
12659 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12660 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12661 of OMP context, but with task_shared_vars set. */
12664 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12669 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12670 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12673 if (task_shared_vars
12675 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12678 /* If a global variable has been privatized, TREE_CONSTANT on
12679 ADDR_EXPR might be wrong. */
12680 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12681 recompute_tree_invariant_for_addr_expr (t
);
12683 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12687 /* Data to be communicated between lower_omp_regimplify_operands and
12688 lower_omp_regimplify_operands_p. */
12690 struct lower_omp_regimplify_operands_data
12696 /* Helper function for lower_omp_regimplify_operands. Find
12697 omp_member_access_dummy_var vars and adjust temporarily their
12698 DECL_VALUE_EXPRs if needed. */
12701 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12704 tree t
= omp_member_access_dummy_var (*tp
);
12707 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12708 lower_omp_regimplify_operands_data
*ldata
12709 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12710 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12713 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12714 ldata
->decls
->safe_push (*tp
);
12715 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12716 SET_DECL_VALUE_EXPR (*tp
, v
);
12719 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12723 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12724 of omp_member_access_dummy_var vars during regimplification. */
12727 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12728 gimple_stmt_iterator
*gsi_p
)
12730 auto_vec
<tree
, 10> decls
;
12733 struct walk_stmt_info wi
;
12734 memset (&wi
, '\0', sizeof (wi
));
12735 struct lower_omp_regimplify_operands_data data
;
12737 data
.decls
= &decls
;
12739 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12741 gimple_regimplify_operands (stmt
, gsi_p
);
12742 while (!decls
.is_empty ())
12744 tree t
= decls
.pop ();
12745 tree v
= decls
.pop ();
12746 SET_DECL_VALUE_EXPR (t
, v
);
12751 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12753 gimple
*stmt
= gsi_stmt (*gsi_p
);
12754 struct walk_stmt_info wi
;
12757 if (gimple_has_location (stmt
))
12758 input_location
= gimple_location (stmt
);
12760 if (task_shared_vars
)
12761 memset (&wi
, '\0', sizeof (wi
));
12763 /* If we have issued syntax errors, avoid doing any heavy lifting.
12764 Just replace the OMP directives with a NOP to avoid
12765 confusing RTL expansion. */
12766 if (seen_error () && is_gimple_omp (stmt
))
12768 gsi_replace (gsi_p
, gimple_build_nop (), true);
12772 switch (gimple_code (stmt
))
12776 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12777 if ((ctx
|| task_shared_vars
)
12778 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12779 lower_omp_regimplify_p
,
12780 ctx
? NULL
: &wi
, NULL
)
12781 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12782 lower_omp_regimplify_p
,
12783 ctx
? NULL
: &wi
, NULL
)))
12784 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12788 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12790 case GIMPLE_EH_FILTER
:
12791 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12794 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12795 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12797 case GIMPLE_TRANSACTION
:
12798 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12802 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12803 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12805 case GIMPLE_OMP_PARALLEL
:
12806 case GIMPLE_OMP_TASK
:
12807 ctx
= maybe_lookup_ctx (stmt
);
12809 if (ctx
->cancellable
)
12810 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12811 lower_omp_taskreg (gsi_p
, ctx
);
12813 case GIMPLE_OMP_FOR
:
12814 ctx
= maybe_lookup_ctx (stmt
);
12816 if (ctx
->cancellable
)
12817 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12818 lower_omp_for (gsi_p
, ctx
);
12820 case GIMPLE_OMP_SECTIONS
:
12821 ctx
= maybe_lookup_ctx (stmt
);
12823 if (ctx
->cancellable
)
12824 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12825 lower_omp_sections (gsi_p
, ctx
);
12827 case GIMPLE_OMP_SINGLE
:
12828 ctx
= maybe_lookup_ctx (stmt
);
12830 lower_omp_single (gsi_p
, ctx
);
12832 case GIMPLE_OMP_MASTER
:
12833 ctx
= maybe_lookup_ctx (stmt
);
12835 lower_omp_master (gsi_p
, ctx
);
12837 case GIMPLE_OMP_TASKGROUP
:
12838 ctx
= maybe_lookup_ctx (stmt
);
12840 lower_omp_taskgroup (gsi_p
, ctx
);
12842 case GIMPLE_OMP_ORDERED
:
12843 ctx
= maybe_lookup_ctx (stmt
);
12845 lower_omp_ordered (gsi_p
, ctx
);
12847 case GIMPLE_OMP_SCAN
:
12848 ctx
= maybe_lookup_ctx (stmt
);
12850 lower_omp_scan (gsi_p
, ctx
);
12852 case GIMPLE_OMP_CRITICAL
:
12853 ctx
= maybe_lookup_ctx (stmt
);
12855 lower_omp_critical (gsi_p
, ctx
);
12857 case GIMPLE_OMP_ATOMIC_LOAD
:
12858 if ((ctx
|| task_shared_vars
)
12859 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12860 as_a
<gomp_atomic_load
*> (stmt
)),
12861 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12862 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12864 case GIMPLE_OMP_TARGET
:
12865 ctx
= maybe_lookup_ctx (stmt
);
12867 lower_omp_target (gsi_p
, ctx
);
12869 case GIMPLE_OMP_TEAMS
:
12870 ctx
= maybe_lookup_ctx (stmt
);
12872 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12873 lower_omp_taskreg (gsi_p
, ctx
);
12875 lower_omp_teams (gsi_p
, ctx
);
12879 call_stmt
= as_a
<gcall
*> (stmt
);
12880 fndecl
= gimple_call_fndecl (call_stmt
);
12882 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12883 switch (DECL_FUNCTION_CODE (fndecl
))
12885 case BUILT_IN_GOMP_BARRIER
:
12889 case BUILT_IN_GOMP_CANCEL
:
12890 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12893 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12894 cctx
= cctx
->outer
;
12895 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12896 if (!cctx
->cancellable
)
12898 if (DECL_FUNCTION_CODE (fndecl
)
12899 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12901 stmt
= gimple_build_nop ();
12902 gsi_replace (gsi_p
, stmt
, false);
12906 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12908 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12909 gimple_call_set_fndecl (call_stmt
, fndecl
);
12910 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12913 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12914 gimple_call_set_lhs (call_stmt
, lhs
);
12915 tree fallthru_label
;
12916 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12918 g
= gimple_build_label (fallthru_label
);
12919 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12920 g
= gimple_build_cond (NE_EXPR
, lhs
,
12921 fold_convert (TREE_TYPE (lhs
),
12922 boolean_false_node
),
12923 cctx
->cancel_label
, fallthru_label
);
12924 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12931 case GIMPLE_ASSIGN
:
12932 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12934 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12935 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12936 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12937 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12938 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12939 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12940 && (gimple_omp_target_kind (up
->stmt
)
12941 == GF_OMP_TARGET_KIND_DATA
)))
12943 else if (!up
->lastprivate_conditional_map
)
12945 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12946 if (TREE_CODE (lhs
) == MEM_REF
12947 && DECL_P (TREE_OPERAND (lhs
, 0))
12948 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12949 0))) == REFERENCE_TYPE
)
12950 lhs
= TREE_OPERAND (lhs
, 0);
12952 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12955 if (up
->combined_into_simd_safelen1
)
12958 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
12961 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12962 clauses
= gimple_omp_for_clauses (up
->stmt
);
12964 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12965 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12966 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12967 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12968 OMP_CLAUSE__CONDTEMP_
);
12969 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12970 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12971 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12978 if ((ctx
|| task_shared_vars
)
12979 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12982 /* Just remove clobbers, this should happen only if we have
12983 "privatized" local addressable variables in SIMD regions,
12984 the clobber isn't needed in that case and gimplifying address
12985 of the ARRAY_REF into a pointer and creating MEM_REF based
12986 clobber would create worse code than we get with the clobber
12988 if (gimple_clobber_p (stmt
))
12990 gsi_replace (gsi_p
, gimple_build_nop (), true);
12993 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
13000 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
13002 location_t saved_location
= input_location
;
13003 gimple_stmt_iterator gsi
;
13004 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
13005 lower_omp_1 (&gsi
, ctx
);
13006 /* During gimplification, we haven't folded statments inside offloading
13007 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13008 if (target_nesting_level
|| taskreg_nesting_level
)
13009 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
13011 input_location
= saved_location
;
13014 /* Main entry point. */
13016 static unsigned int
13017 execute_lower_omp (void)
13023 /* This pass always runs, to provide PROP_gimple_lomp.
13024 But often, there is nothing to do. */
13025 if (flag_openacc
== 0 && flag_openmp
== 0
13026 && flag_openmp_simd
== 0)
13029 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
13030 delete_omp_context
);
13032 body
= gimple_body (current_function_decl
);
13034 scan_omp (&body
, NULL
);
13035 gcc_assert (taskreg_nesting_level
== 0);
13036 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
13037 finish_taskreg_scan (ctx
);
13038 taskreg_contexts
.release ();
13040 if (all_contexts
->root
)
13042 if (task_shared_vars
)
13043 push_gimplify_context ();
13044 lower_omp (&body
, NULL
);
13045 if (task_shared_vars
)
13046 pop_gimplify_context (NULL
);
13051 splay_tree_delete (all_contexts
);
13052 all_contexts
= NULL
;
13054 BITMAP_FREE (task_shared_vars
);
13055 BITMAP_FREE (global_nonaddressable_vars
);
13057 /* If current function is a method, remove artificial dummy VAR_DECL created
13058 for non-static data member privatization, they aren't needed for
13059 debuginfo nor anything else, have been already replaced everywhere in the
13060 IL and cause problems with LTO. */
13061 if (DECL_ARGUMENTS (current_function_decl
)
13062 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
13063 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
13065 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
13071 const pass_data pass_data_lower_omp
=
13073 GIMPLE_PASS
, /* type */
13074 "omplower", /* name */
13075 OPTGROUP_OMP
, /* optinfo_flags */
13076 TV_NONE
, /* tv_id */
13077 PROP_gimple_any
, /* properties_required */
13078 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
13079 0, /* properties_destroyed */
13080 0, /* todo_flags_start */
13081 0, /* todo_flags_finish */
13084 class pass_lower_omp
: public gimple_opt_pass
13087 pass_lower_omp (gcc::context
*ctxt
)
13088 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
13091 /* opt_pass methods: */
13092 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
13094 }; // class pass_lower_omp
13096 } // anon namespace
13099 make_pass_lower_omp (gcc::context
*ctxt
)
13101 return new pass_lower_omp (ctxt
);
13104 /* The following is a utility to diagnose structured block violations.
13105 It is not part of the "omplower" pass, as that's invoked too late. It
13106 should be invoked by the respective front ends after gimplification. */
13108 static splay_tree all_labels
;
13110 /* Check for mismatched contexts and generate an error if needed. Return
13111 true if an error is detected. */
13114 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
13115 gimple
*branch_ctx
, gimple
*label_ctx
)
13117 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
13118 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
13120 if (label_ctx
== branch_ctx
)
13123 const char* kind
= NULL
;
13127 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
13128 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
13130 gcc_checking_assert (kind
== NULL
);
13136 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
13140 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13141 so we could traverse it and issue a correct "exit" or "enter" error
13142 message upon a structured block violation.
13144 We built the context by building a list with tree_cons'ing, but there is
13145 no easy counterpart in gimple tuples. It seems like far too much work
13146 for issuing exit/enter error messages. If someone really misses the
13147 distinct error message... patches welcome. */
13150 /* Try to avoid confusing the user by producing and error message
13151 with correct "exit" or "enter" verbiage. We prefer "exit"
13152 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13153 if (branch_ctx
== NULL
)
13159 if (TREE_VALUE (label_ctx
) == branch_ctx
)
13164 label_ctx
= TREE_CHAIN (label_ctx
);
13169 error ("invalid exit from %s structured block", kind
);
13171 error ("invalid entry to %s structured block", kind
);
13174 /* If it's obvious we have an invalid entry, be specific about the error. */
13175 if (branch_ctx
== NULL
)
13176 error ("invalid entry to %s structured block", kind
);
13179 /* Otherwise, be vague and lazy, but efficient. */
13180 error ("invalid branch to/from %s structured block", kind
);
13183 gsi_replace (gsi_p
, gimple_build_nop (), false);
13187 /* Pass 1: Create a minimal tree of structured blocks, and record
13188 where each label is found. */
13191 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13192 struct walk_stmt_info
*wi
)
13194 gimple
*context
= (gimple
*) wi
->info
;
13195 gimple
*inner_context
;
13196 gimple
*stmt
= gsi_stmt (*gsi_p
);
13198 *handled_ops_p
= true;
13200 switch (gimple_code (stmt
))
13204 case GIMPLE_OMP_PARALLEL
:
13205 case GIMPLE_OMP_TASK
:
13206 case GIMPLE_OMP_SECTIONS
:
13207 case GIMPLE_OMP_SINGLE
:
13208 case GIMPLE_OMP_SECTION
:
13209 case GIMPLE_OMP_MASTER
:
13210 case GIMPLE_OMP_ORDERED
:
13211 case GIMPLE_OMP_SCAN
:
13212 case GIMPLE_OMP_CRITICAL
:
13213 case GIMPLE_OMP_TARGET
:
13214 case GIMPLE_OMP_TEAMS
:
13215 case GIMPLE_OMP_TASKGROUP
:
13216 /* The minimal context here is just the current OMP construct. */
13217 inner_context
= stmt
;
13218 wi
->info
= inner_context
;
13219 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13220 wi
->info
= context
;
13223 case GIMPLE_OMP_FOR
:
13224 inner_context
= stmt
;
13225 wi
->info
= inner_context
;
13226 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13228 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13229 diagnose_sb_1
, NULL
, wi
);
13230 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13231 wi
->info
= context
;
13235 splay_tree_insert (all_labels
,
13236 (splay_tree_key
) gimple_label_label (
13237 as_a
<glabel
*> (stmt
)),
13238 (splay_tree_value
) context
);
13248 /* Pass 2: Check each branch and see if its context differs from that of
13249 the destination label's context. */
13252 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13253 struct walk_stmt_info
*wi
)
13255 gimple
*context
= (gimple
*) wi
->info
;
13257 gimple
*stmt
= gsi_stmt (*gsi_p
);
13259 *handled_ops_p
= true;
13261 switch (gimple_code (stmt
))
13265 case GIMPLE_OMP_PARALLEL
:
13266 case GIMPLE_OMP_TASK
:
13267 case GIMPLE_OMP_SECTIONS
:
13268 case GIMPLE_OMP_SINGLE
:
13269 case GIMPLE_OMP_SECTION
:
13270 case GIMPLE_OMP_MASTER
:
13271 case GIMPLE_OMP_ORDERED
:
13272 case GIMPLE_OMP_SCAN
:
13273 case GIMPLE_OMP_CRITICAL
:
13274 case GIMPLE_OMP_TARGET
:
13275 case GIMPLE_OMP_TEAMS
:
13276 case GIMPLE_OMP_TASKGROUP
:
13278 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13279 wi
->info
= context
;
13282 case GIMPLE_OMP_FOR
:
13284 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13286 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
13287 diagnose_sb_2
, NULL
, wi
);
13288 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13289 wi
->info
= context
;
13294 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13295 tree lab
= gimple_cond_true_label (cond_stmt
);
13298 n
= splay_tree_lookup (all_labels
,
13299 (splay_tree_key
) lab
);
13300 diagnose_sb_0 (gsi_p
, context
,
13301 n
? (gimple
*) n
->value
: NULL
);
13303 lab
= gimple_cond_false_label (cond_stmt
);
13306 n
= splay_tree_lookup (all_labels
,
13307 (splay_tree_key
) lab
);
13308 diagnose_sb_0 (gsi_p
, context
,
13309 n
? (gimple
*) n
->value
: NULL
);
13316 tree lab
= gimple_goto_dest (stmt
);
13317 if (TREE_CODE (lab
) != LABEL_DECL
)
13320 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13321 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13325 case GIMPLE_SWITCH
:
13327 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13329 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13331 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13332 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13333 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13339 case GIMPLE_RETURN
:
13340 diagnose_sb_0 (gsi_p
, context
, NULL
);
13350 static unsigned int
13351 diagnose_omp_structured_block_errors (void)
13353 struct walk_stmt_info wi
;
13354 gimple_seq body
= gimple_body (current_function_decl
);
13356 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13358 memset (&wi
, 0, sizeof (wi
));
13359 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13361 memset (&wi
, 0, sizeof (wi
));
13362 wi
.want_locations
= true;
13363 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13365 gimple_set_body (current_function_decl
, body
);
13367 splay_tree_delete (all_labels
);
13375 const pass_data pass_data_diagnose_omp_blocks
=
13377 GIMPLE_PASS
, /* type */
13378 "*diagnose_omp_blocks", /* name */
13379 OPTGROUP_OMP
, /* optinfo_flags */
13380 TV_NONE
, /* tv_id */
13381 PROP_gimple_any
, /* properties_required */
13382 0, /* properties_provided */
13383 0, /* properties_destroyed */
13384 0, /* todo_flags_start */
13385 0, /* todo_flags_finish */
13388 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13391 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13392 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13395 /* opt_pass methods: */
13396 virtual bool gate (function
*)
13398 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13400 virtual unsigned int execute (function
*)
13402 return diagnose_omp_structured_block_errors ();
13405 }; // class pass_diagnose_omp_blocks
13407 } // anon namespace
13410 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13412 return new pass_diagnose_omp_blocks (ctxt
);
13416 #include "gt-omp-low.h"