1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* And a hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* Nesting depth of this context. Used to beautify error messages re
127 invalid gotos. The outermost ctx is depth 1, with depth 0 being
128 reserved for the main body of the function. */
131 /* True if this parallel directive is nested within another. */
134 /* True if this construct can be cancelled. */
138 static splay_tree all_contexts
;
139 static int taskreg_nesting_level
;
140 static int target_nesting_level
;
141 static bitmap task_shared_vars
;
142 static vec
<omp_context
*> taskreg_contexts
;
144 static void scan_omp (gimple_seq
*, omp_context
*);
145 static tree
scan_omp_1_op (tree
*, int *, void *);
147 #define WALK_SUBSTMTS \
151 case GIMPLE_EH_FILTER: \
152 case GIMPLE_TRANSACTION: \
153 /* The sub-statements for these should be walked. */ \
154 *handled_ops_p = false; \
157 /* Return true if CTX corresponds to an oacc parallel region. */
160 is_oacc_parallel (omp_context
*ctx
)
162 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
163 return ((outer_type
== GIMPLE_OMP_TARGET
)
164 && (gimple_omp_target_kind (ctx
->stmt
)
165 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
168 /* Return true if CTX corresponds to an oacc kernels region. */
171 is_oacc_kernels (omp_context
*ctx
)
173 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
174 return ((outer_type
== GIMPLE_OMP_TARGET
)
175 && (gimple_omp_target_kind (ctx
->stmt
)
176 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
179 /* If DECL is the artificial dummy VAR_DECL created for non-static
180 data member privatization, return the underlying "this" parameter,
181 otherwise return NULL. */
184 omp_member_access_dummy_var (tree decl
)
187 || !DECL_ARTIFICIAL (decl
)
188 || !DECL_IGNORED_P (decl
)
189 || !DECL_HAS_VALUE_EXPR_P (decl
)
190 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
193 tree v
= DECL_VALUE_EXPR (decl
);
194 if (TREE_CODE (v
) != COMPONENT_REF
)
198 switch (TREE_CODE (v
))
204 case POINTER_PLUS_EXPR
:
205 v
= TREE_OPERAND (v
, 0);
208 if (DECL_CONTEXT (v
) == current_function_decl
209 && DECL_ARTIFICIAL (v
)
210 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
218 /* Helper for unshare_and_remap, called through walk_tree. */
221 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
223 tree
*pair
= (tree
*) data
;
226 *tp
= unshare_expr (pair
[1]);
229 else if (IS_TYPE_OR_DECL_P (*tp
))
234 /* Return unshare_expr (X) with all occurrences of FROM
238 unshare_and_remap (tree x
, tree from
, tree to
)
240 tree pair
[2] = { from
, to
};
241 x
= unshare_expr (x
);
242 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
246 /* Convenience function for calling scan_omp_1_op on tree operands. */
249 scan_omp_op (tree
*tp
, omp_context
*ctx
)
251 struct walk_stmt_info wi
;
253 memset (&wi
, 0, sizeof (wi
));
255 wi
.want_locations
= true;
257 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
260 static void lower_omp (gimple_seq
*, omp_context
*);
261 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
262 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
264 /* Return true if CTX is for an omp parallel. */
267 is_parallel_ctx (omp_context
*ctx
)
269 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
273 /* Return true if CTX is for an omp task. */
276 is_task_ctx (omp_context
*ctx
)
278 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
282 /* Return true if CTX is for an omp taskloop. */
285 is_taskloop_ctx (omp_context
*ctx
)
287 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
288 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
292 /* Return true if CTX is for a host omp teams. */
295 is_host_teams_ctx (omp_context
*ctx
)
297 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
298 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
301 /* Return true if CTX is for an omp parallel or omp task or host omp teams
302 (the last one is strictly not a task region in OpenMP speak, but we
303 need to treat it similarly). */
306 is_taskreg_ctx (omp_context
*ctx
)
308 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
311 /* Return true if EXPR is variable sized. */
314 is_variable_sized (const_tree expr
)
316 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
319 /* Lookup variables. The "maybe" form
320 allows for the variable form to not have been entered, otherwise we
321 assert that the variable must have been entered. */
324 lookup_decl (tree var
, omp_context
*ctx
)
326 tree
*n
= ctx
->cb
.decl_map
->get (var
);
331 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
333 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
334 return n
? *n
: NULL_TREE
;
338 lookup_field (tree var
, omp_context
*ctx
)
341 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
342 return (tree
) n
->value
;
346 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
349 n
= splay_tree_lookup (ctx
->sfield_map
350 ? ctx
->sfield_map
: ctx
->field_map
, key
);
351 return (tree
) n
->value
;
355 lookup_sfield (tree var
, omp_context
*ctx
)
357 return lookup_sfield ((splay_tree_key
) var
, ctx
);
361 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
364 n
= splay_tree_lookup (ctx
->field_map
, key
);
365 return n
? (tree
) n
->value
: NULL_TREE
;
369 maybe_lookup_field (tree var
, omp_context
*ctx
)
371 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
374 /* Return true if DECL should be copied by pointer. SHARED_CTX is
375 the parallel context if DECL is to be shared. */
378 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
380 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
381 || TYPE_ATOMIC (TREE_TYPE (decl
)))
384 /* We can only use copy-in/copy-out semantics for shared variables
385 when we know the value is not accessible from an outer scope. */
388 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
390 /* ??? Trivially accessible from anywhere. But why would we even
391 be passing an address in this case? Should we simply assert
392 this to be false, or should we have a cleanup pass that removes
393 these from the list of mappings? */
394 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
397 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
398 without analyzing the expression whether or not its location
399 is accessible to anyone else. In the case of nested parallel
400 regions it certainly may be. */
401 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
404 /* Do not use copy-in/copy-out for variables that have their
406 if (TREE_ADDRESSABLE (decl
))
409 /* lower_send_shared_vars only uses copy-in, but not copy-out
411 if (TREE_READONLY (decl
)
412 || ((TREE_CODE (decl
) == RESULT_DECL
413 || TREE_CODE (decl
) == PARM_DECL
)
414 && DECL_BY_REFERENCE (decl
)))
417 /* Disallow copy-in/out in nested parallel if
418 decl is shared in outer parallel, otherwise
419 each thread could store the shared variable
420 in its own copy-in location, making the
421 variable no longer really shared. */
422 if (shared_ctx
->is_nested
)
426 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
427 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
434 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
435 c
; c
= OMP_CLAUSE_CHAIN (c
))
436 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
437 && OMP_CLAUSE_DECL (c
) == decl
)
441 goto maybe_mark_addressable_and_ret
;
445 /* For tasks avoid using copy-in/out. As tasks can be
446 deferred or executed in different thread, when GOMP_task
447 returns, the task hasn't necessarily terminated. */
448 if (is_task_ctx (shared_ctx
))
451 maybe_mark_addressable_and_ret
:
452 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
453 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
455 /* Taking address of OUTER in lower_send_shared_vars
456 might need regimplification of everything that uses the
458 if (!task_shared_vars
)
459 task_shared_vars
= BITMAP_ALLOC (NULL
);
460 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
461 TREE_ADDRESSABLE (outer
) = 1;
470 /* Construct a new automatic decl similar to VAR. */
473 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
475 tree copy
= copy_var_decl (var
, name
, type
);
477 DECL_CONTEXT (copy
) = current_function_decl
;
478 DECL_CHAIN (copy
) = ctx
->block_vars
;
479 /* If VAR is listed in task_shared_vars, it means it wasn't
480 originally addressable and is just because task needs to take
481 it's address. But we don't need to take address of privatizations
483 if (TREE_ADDRESSABLE (var
)
485 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
486 TREE_ADDRESSABLE (copy
) = 0;
487 ctx
->block_vars
= copy
;
493 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
495 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
498 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
501 omp_build_component_ref (tree obj
, tree field
)
503 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
504 if (TREE_THIS_VOLATILE (field
))
505 TREE_THIS_VOLATILE (ret
) |= 1;
506 if (TREE_READONLY (field
))
507 TREE_READONLY (ret
) |= 1;
511 /* Build tree nodes to access the field for VAR on the receiver side. */
514 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
516 tree x
, field
= lookup_field (var
, ctx
);
518 /* If the receiver record type was remapped in the child function,
519 remap the field into the new record type. */
520 x
= maybe_lookup_field (field
, ctx
);
524 x
= build_simple_mem_ref (ctx
->receiver_decl
);
525 TREE_THIS_NOTRAP (x
) = 1;
526 x
= omp_build_component_ref (x
, field
);
529 x
= build_simple_mem_ref (x
);
530 TREE_THIS_NOTRAP (x
) = 1;
536 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
537 of a parallel, this is a component reference; for workshare constructs
538 this is some variable. */
541 build_outer_var_ref (tree var
, omp_context
*ctx
,
542 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
545 omp_context
*outer
= ctx
->outer
;
546 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
547 outer
= outer
->outer
;
549 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
551 else if (is_variable_sized (var
))
553 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
554 x
= build_outer_var_ref (x
, ctx
, code
);
555 x
= build_simple_mem_ref (x
);
557 else if (is_taskreg_ctx (ctx
))
559 bool by_ref
= use_pointer_for_field (var
, NULL
);
560 x
= build_receiver_ref (var
, by_ref
, ctx
);
562 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
563 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
564 || (code
== OMP_CLAUSE_PRIVATE
565 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
566 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
567 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
569 /* #pragma omp simd isn't a worksharing construct, and can reference
570 even private vars in its linear etc. clauses.
571 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
572 to private vars in all worksharing constructs. */
574 if (outer
&& is_taskreg_ctx (outer
))
575 x
= lookup_decl (var
, outer
);
577 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
581 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
585 = splay_tree_lookup (outer
->field_map
,
586 (splay_tree_key
) &DECL_UID (var
));
589 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
592 x
= lookup_decl (var
, outer
);
596 tree field
= (tree
) n
->value
;
597 /* If the receiver record type was remapped in the child function,
598 remap the field into the new record type. */
599 x
= maybe_lookup_field (field
, outer
);
603 x
= build_simple_mem_ref (outer
->receiver_decl
);
604 x
= omp_build_component_ref (x
, field
);
605 if (use_pointer_for_field (var
, outer
))
606 x
= build_simple_mem_ref (x
);
611 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
613 outer
= outer
->outer
;
615 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
617 x
= lookup_decl (var
, outer
);
619 else if (omp_is_reference (var
))
620 /* This can happen with orphaned constructs. If var is reference, it is
621 possible it is shared and as such valid. */
623 else if (omp_member_access_dummy_var (var
))
630 tree t
= omp_member_access_dummy_var (var
);
633 x
= DECL_VALUE_EXPR (var
);
634 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
636 x
= unshare_and_remap (x
, t
, o
);
638 x
= unshare_expr (x
);
642 if (omp_is_reference (var
))
643 x
= build_simple_mem_ref (x
);
648 /* Build tree nodes to access the field for VAR on the sender side. */
651 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
653 tree field
= lookup_sfield (key
, ctx
);
654 return omp_build_component_ref (ctx
->sender_decl
, field
);
658 build_sender_ref (tree var
, omp_context
*ctx
)
660 return build_sender_ref ((splay_tree_key
) var
, ctx
);
663 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
664 BASE_POINTERS_RESTRICT, declare the field with restrict. */
667 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
669 tree field
, type
, sfield
= NULL_TREE
;
670 splay_tree_key key
= (splay_tree_key
) var
;
674 key
= (splay_tree_key
) &DECL_UID (var
);
675 gcc_checking_assert (key
!= (splay_tree_key
) var
);
677 gcc_assert ((mask
& 1) == 0
678 || !splay_tree_lookup (ctx
->field_map
, key
));
679 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
680 || !splay_tree_lookup (ctx
->sfield_map
, key
));
681 gcc_assert ((mask
& 3) == 3
682 || !is_gimple_omp_oacc (ctx
->stmt
));
684 type
= TREE_TYPE (var
);
685 /* Prevent redeclaring the var in the split-off function with a restrict
686 pointer type. Note that we only clear type itself, restrict qualifiers in
687 the pointed-to type will be ignored by points-to analysis. */
688 if (POINTER_TYPE_P (type
)
689 && TYPE_RESTRICT (type
))
690 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
694 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
695 type
= build_pointer_type (build_pointer_type (type
));
698 type
= build_pointer_type (type
);
699 else if ((mask
& 3) == 1 && omp_is_reference (var
))
700 type
= TREE_TYPE (type
);
702 field
= build_decl (DECL_SOURCE_LOCATION (var
),
703 FIELD_DECL
, DECL_NAME (var
), type
);
705 /* Remember what variable this field was created for. This does have a
706 side effect of making dwarf2out ignore this member, so for helpful
707 debugging we clear it later in delete_omp_context. */
708 DECL_ABSTRACT_ORIGIN (field
) = var
;
709 if (type
== TREE_TYPE (var
))
711 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
712 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
713 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
716 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
720 insert_field_into_struct (ctx
->record_type
, field
);
721 if (ctx
->srecord_type
)
723 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
724 FIELD_DECL
, DECL_NAME (var
), type
);
725 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
726 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
727 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
728 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
729 insert_field_into_struct (ctx
->srecord_type
, sfield
);
734 if (ctx
->srecord_type
== NULL_TREE
)
738 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
739 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
740 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
742 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
743 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
744 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
745 insert_field_into_struct (ctx
->srecord_type
, sfield
);
746 splay_tree_insert (ctx
->sfield_map
,
747 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
748 (splay_tree_value
) sfield
);
752 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
753 : ctx
->srecord_type
, field
);
757 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
758 if ((mask
& 2) && ctx
->sfield_map
)
759 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
763 install_var_local (tree var
, omp_context
*ctx
)
765 tree new_var
= omp_copy_decl_1 (var
, ctx
);
766 insert_decl_map (&ctx
->cb
, var
, new_var
);
770 /* Adjust the replacement for DECL in CTX for the new context. This means
771 copying the DECL_VALUE_EXPR, and fixing up the type. */
774 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
778 new_decl
= lookup_decl (decl
, ctx
);
780 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
782 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
783 && DECL_HAS_VALUE_EXPR_P (decl
))
785 tree ve
= DECL_VALUE_EXPR (decl
);
786 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
787 SET_DECL_VALUE_EXPR (new_decl
, ve
);
788 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
791 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
793 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
794 if (size
== error_mark_node
)
795 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
796 DECL_SIZE (new_decl
) = size
;
798 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
799 if (size
== error_mark_node
)
800 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
801 DECL_SIZE_UNIT (new_decl
) = size
;
805 /* The callback for remap_decl. Search all containing contexts for a
806 mapping of the variable; this avoids having to duplicate the splay
807 tree ahead of time. We know a mapping doesn't already exist in the
808 given context. Create new mappings to implement default semantics. */
811 omp_copy_decl (tree var
, copy_body_data
*cb
)
813 omp_context
*ctx
= (omp_context
*) cb
;
816 if (TREE_CODE (var
) == LABEL_DECL
)
818 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
820 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
821 DECL_CONTEXT (new_var
) = current_function_decl
;
822 insert_decl_map (&ctx
->cb
, var
, new_var
);
826 while (!is_taskreg_ctx (ctx
))
831 new_var
= maybe_lookup_decl (var
, ctx
);
836 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
839 return error_mark_node
;
842 /* Create a new context, with OUTER_CTX being the surrounding context. */
845 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
847 omp_context
*ctx
= XCNEW (omp_context
);
849 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
850 (splay_tree_value
) ctx
);
855 ctx
->outer
= outer_ctx
;
856 ctx
->cb
= outer_ctx
->cb
;
857 ctx
->cb
.block
= NULL
;
858 ctx
->depth
= outer_ctx
->depth
+ 1;
862 ctx
->cb
.src_fn
= current_function_decl
;
863 ctx
->cb
.dst_fn
= current_function_decl
;
864 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
865 gcc_checking_assert (ctx
->cb
.src_node
);
866 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
867 ctx
->cb
.src_cfun
= cfun
;
868 ctx
->cb
.copy_decl
= omp_copy_decl
;
869 ctx
->cb
.eh_lp_nr
= 0;
870 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
874 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
879 static gimple_seq
maybe_catch_exception (gimple_seq
);
881 /* Finalize task copyfn. */
884 finalize_task_copyfn (gomp_task
*task_stmt
)
886 struct function
*child_cfun
;
888 gimple_seq seq
= NULL
, new_seq
;
891 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
892 if (child_fn
== NULL_TREE
)
895 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
896 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
898 push_cfun (child_cfun
);
899 bind
= gimplify_body (child_fn
, false);
900 gimple_seq_add_stmt (&seq
, bind
);
901 new_seq
= maybe_catch_exception (seq
);
904 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
906 gimple_seq_add_stmt (&seq
, bind
);
908 gimple_set_body (child_fn
, seq
);
911 /* Inform the callgraph about the new function. */
912 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
913 node
->parallelized_function
= 1;
914 cgraph_node::add_new_function (child_fn
, false);
917 /* Destroy a omp_context data structures. Called through the splay tree
918 value delete callback. */
921 delete_omp_context (splay_tree_value value
)
923 omp_context
*ctx
= (omp_context
*) value
;
925 delete ctx
->cb
.decl_map
;
928 splay_tree_delete (ctx
->field_map
);
930 splay_tree_delete (ctx
->sfield_map
);
932 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
933 it produces corrupt debug information. */
934 if (ctx
->record_type
)
937 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
938 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
940 if (ctx
->srecord_type
)
943 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
944 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
947 if (is_task_ctx (ctx
))
948 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
950 if (ctx
->task_reduction_map
)
952 ctx
->task_reductions
.release ();
953 delete ctx
->task_reduction_map
;
959 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
963 fixup_child_record_type (omp_context
*ctx
)
965 tree f
, type
= ctx
->record_type
;
967 if (!ctx
->receiver_decl
)
969 /* ??? It isn't sufficient to just call remap_type here, because
970 variably_modified_type_p doesn't work the way we expect for
971 record types. Testing each field for whether it needs remapping
972 and creating a new record by hand works, however. */
973 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
974 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
978 tree name
, new_fields
= NULL
;
980 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
981 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
982 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
983 TYPE_DECL
, name
, type
);
984 TYPE_NAME (type
) = name
;
986 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
988 tree new_f
= copy_node (f
);
989 DECL_CONTEXT (new_f
) = type
;
990 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
991 DECL_CHAIN (new_f
) = new_fields
;
992 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
993 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
995 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
999 /* Arrange to be able to look up the receiver field
1000 given the sender field. */
1001 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1002 (splay_tree_value
) new_f
);
1004 TYPE_FIELDS (type
) = nreverse (new_fields
);
1008 /* In a target region we never modify any of the pointers in *.omp_data_i,
1009 so attempt to help the optimizers. */
1010 if (is_gimple_omp_offloaded (ctx
->stmt
))
1011 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1013 TREE_TYPE (ctx
->receiver_decl
)
1014 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1017 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1018 specified by CLAUSES. */
1021 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1024 bool scan_array_reductions
= false;
1026 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1030 switch (OMP_CLAUSE_CODE (c
))
1032 case OMP_CLAUSE_PRIVATE
:
1033 decl
= OMP_CLAUSE_DECL (c
);
1034 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1036 else if (!is_variable_sized (decl
))
1037 install_var_local (decl
, ctx
);
1040 case OMP_CLAUSE_SHARED
:
1041 decl
= OMP_CLAUSE_DECL (c
);
1042 /* Ignore shared directives in teams construct inside of
1043 target construct. */
1044 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1045 && !is_host_teams_ctx (ctx
))
1047 /* Global variables don't need to be copied,
1048 the receiver side will use them directly. */
1049 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1050 if (is_global_var (odecl
))
1052 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1055 gcc_assert (is_taskreg_ctx (ctx
));
1056 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1057 || !is_variable_sized (decl
));
1058 /* Global variables don't need to be copied,
1059 the receiver side will use them directly. */
1060 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1062 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1064 use_pointer_for_field (decl
, ctx
);
1067 by_ref
= use_pointer_for_field (decl
, NULL
);
1068 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1069 || TREE_ADDRESSABLE (decl
)
1071 || omp_is_reference (decl
))
1073 by_ref
= use_pointer_for_field (decl
, ctx
);
1074 install_var_field (decl
, by_ref
, 3, ctx
);
1075 install_var_local (decl
, ctx
);
1078 /* We don't need to copy const scalar vars back. */
1079 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1082 case OMP_CLAUSE_REDUCTION
:
1083 case OMP_CLAUSE_IN_REDUCTION
:
1084 decl
= OMP_CLAUSE_DECL (c
);
1085 if (TREE_CODE (decl
) == MEM_REF
)
1087 tree t
= TREE_OPERAND (decl
, 0);
1088 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1089 t
= TREE_OPERAND (t
, 0);
1090 if (TREE_CODE (t
) == INDIRECT_REF
1091 || TREE_CODE (t
) == ADDR_EXPR
)
1092 t
= TREE_OPERAND (t
, 0);
1093 install_var_local (t
, ctx
);
1094 if (is_taskreg_ctx (ctx
)
1095 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1096 || (is_task_ctx (ctx
)
1097 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1098 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1099 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1100 == POINTER_TYPE
)))))
1101 && !is_variable_sized (t
)
1102 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1103 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1104 && !is_task_ctx (ctx
))))
1106 by_ref
= use_pointer_for_field (t
, NULL
);
1107 if (is_task_ctx (ctx
)
1108 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1109 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1111 install_var_field (t
, false, 1, ctx
);
1112 install_var_field (t
, by_ref
, 2, ctx
);
1115 install_var_field (t
, by_ref
, 3, ctx
);
1119 if (is_task_ctx (ctx
)
1120 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1121 && OMP_CLAUSE_REDUCTION_TASK (c
)
1122 && is_parallel_ctx (ctx
)))
1124 /* Global variables don't need to be copied,
1125 the receiver side will use them directly. */
1126 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1128 by_ref
= use_pointer_for_field (decl
, ctx
);
1129 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1130 install_var_field (decl
, by_ref
, 3, ctx
);
1132 install_var_local (decl
, ctx
);
1135 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1136 && OMP_CLAUSE_REDUCTION_TASK (c
))
1138 install_var_local (decl
, ctx
);
1143 case OMP_CLAUSE_LASTPRIVATE
:
1144 /* Let the corresponding firstprivate clause create
1146 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1150 case OMP_CLAUSE_FIRSTPRIVATE
:
1151 case OMP_CLAUSE_LINEAR
:
1152 decl
= OMP_CLAUSE_DECL (c
);
1154 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1155 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1156 && is_gimple_omp_offloaded (ctx
->stmt
))
1158 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1159 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1160 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1161 install_var_field (decl
, true, 3, ctx
);
1163 install_var_field (decl
, false, 3, ctx
);
1165 if (is_variable_sized (decl
))
1167 if (is_task_ctx (ctx
))
1168 install_var_field (decl
, false, 1, ctx
);
1171 else if (is_taskreg_ctx (ctx
))
1174 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1175 by_ref
= use_pointer_for_field (decl
, NULL
);
1177 if (is_task_ctx (ctx
)
1178 && (global
|| by_ref
|| omp_is_reference (decl
)))
1180 install_var_field (decl
, false, 1, ctx
);
1182 install_var_field (decl
, by_ref
, 2, ctx
);
1185 install_var_field (decl
, by_ref
, 3, ctx
);
1187 install_var_local (decl
, ctx
);
1190 case OMP_CLAUSE_USE_DEVICE_PTR
:
1191 decl
= OMP_CLAUSE_DECL (c
);
1192 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1193 install_var_field (decl
, true, 3, ctx
);
1195 install_var_field (decl
, false, 3, ctx
);
1196 if (DECL_SIZE (decl
)
1197 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1199 tree decl2
= DECL_VALUE_EXPR (decl
);
1200 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1201 decl2
= TREE_OPERAND (decl2
, 0);
1202 gcc_assert (DECL_P (decl2
));
1203 install_var_local (decl2
, ctx
);
1205 install_var_local (decl
, ctx
);
1208 case OMP_CLAUSE_IS_DEVICE_PTR
:
1209 decl
= OMP_CLAUSE_DECL (c
);
1212 case OMP_CLAUSE__LOOPTEMP_
:
1213 case OMP_CLAUSE__REDUCTEMP_
:
1214 gcc_assert (is_taskreg_ctx (ctx
));
1215 decl
= OMP_CLAUSE_DECL (c
);
1216 install_var_field (decl
, false, 3, ctx
);
1217 install_var_local (decl
, ctx
);
1220 case OMP_CLAUSE_COPYPRIVATE
:
1221 case OMP_CLAUSE_COPYIN
:
1222 decl
= OMP_CLAUSE_DECL (c
);
1223 by_ref
= use_pointer_for_field (decl
, NULL
);
1224 install_var_field (decl
, by_ref
, 3, ctx
);
1227 case OMP_CLAUSE_FINAL
:
1229 case OMP_CLAUSE_NUM_THREADS
:
1230 case OMP_CLAUSE_NUM_TEAMS
:
1231 case OMP_CLAUSE_THREAD_LIMIT
:
1232 case OMP_CLAUSE_DEVICE
:
1233 case OMP_CLAUSE_SCHEDULE
:
1234 case OMP_CLAUSE_DIST_SCHEDULE
:
1235 case OMP_CLAUSE_DEPEND
:
1236 case OMP_CLAUSE_PRIORITY
:
1237 case OMP_CLAUSE_GRAINSIZE
:
1238 case OMP_CLAUSE_NUM_TASKS
:
1239 case OMP_CLAUSE_NUM_GANGS
:
1240 case OMP_CLAUSE_NUM_WORKERS
:
1241 case OMP_CLAUSE_VECTOR_LENGTH
:
1243 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1247 case OMP_CLAUSE_FROM
:
1248 case OMP_CLAUSE_MAP
:
1250 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1251 decl
= OMP_CLAUSE_DECL (c
);
1252 /* Global variables with "omp declare target" attribute
1253 don't need to be copied, the receiver side will use them
1254 directly. However, global variables with "omp declare target link"
1255 attribute need to be copied. Or when ALWAYS modifier is used. */
1256 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1258 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1259 && (OMP_CLAUSE_MAP_KIND (c
)
1260 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1261 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1262 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1263 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1264 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1265 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1266 && varpool_node::get_create (decl
)->offloadable
1267 && !lookup_attribute ("omp declare target link",
1268 DECL_ATTRIBUTES (decl
)))
1270 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1271 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1273 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1274 not offloaded; there is nothing to map for those. */
1275 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1276 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1277 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1280 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1281 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1282 || (OMP_CLAUSE_MAP_KIND (c
)
1283 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1285 if (TREE_CODE (decl
) == COMPONENT_REF
1286 || (TREE_CODE (decl
) == INDIRECT_REF
1287 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1288 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1289 == REFERENCE_TYPE
)))
1291 if (DECL_SIZE (decl
)
1292 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1294 tree decl2
= DECL_VALUE_EXPR (decl
);
1295 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1296 decl2
= TREE_OPERAND (decl2
, 0);
1297 gcc_assert (DECL_P (decl2
));
1298 install_var_local (decl2
, ctx
);
1300 install_var_local (decl
, ctx
);
1305 if (DECL_SIZE (decl
)
1306 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1308 tree decl2
= DECL_VALUE_EXPR (decl
);
1309 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1310 decl2
= TREE_OPERAND (decl2
, 0);
1311 gcc_assert (DECL_P (decl2
));
1312 install_var_field (decl2
, true, 3, ctx
);
1313 install_var_local (decl2
, ctx
);
1314 install_var_local (decl
, ctx
);
1318 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1319 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1320 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1321 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1322 install_var_field (decl
, true, 7, ctx
);
1324 install_var_field (decl
, true, 3, ctx
);
1325 if (is_gimple_omp_offloaded (ctx
->stmt
)
1326 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1327 install_var_local (decl
, ctx
);
1332 tree base
= get_base_address (decl
);
1333 tree nc
= OMP_CLAUSE_CHAIN (c
);
1336 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_DECL (nc
) == base
1338 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1339 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1341 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1342 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1348 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1349 decl
= OMP_CLAUSE_DECL (c
);
1351 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1352 (splay_tree_key
) decl
));
1354 = build_decl (OMP_CLAUSE_LOCATION (c
),
1355 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1356 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1357 insert_field_into_struct (ctx
->record_type
, field
);
1358 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1359 (splay_tree_value
) field
);
1364 case OMP_CLAUSE__GRIDDIM_
:
1367 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1368 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1372 case OMP_CLAUSE_NOWAIT
:
1373 case OMP_CLAUSE_ORDERED
:
1374 case OMP_CLAUSE_COLLAPSE
:
1375 case OMP_CLAUSE_UNTIED
:
1376 case OMP_CLAUSE_MERGEABLE
:
1377 case OMP_CLAUSE_PROC_BIND
:
1378 case OMP_CLAUSE_SAFELEN
:
1379 case OMP_CLAUSE_SIMDLEN
:
1380 case OMP_CLAUSE_THREADS
:
1381 case OMP_CLAUSE_SIMD
:
1382 case OMP_CLAUSE_NOGROUP
:
1383 case OMP_CLAUSE_DEFAULTMAP
:
1384 case OMP_CLAUSE_ASYNC
:
1385 case OMP_CLAUSE_WAIT
:
1386 case OMP_CLAUSE_GANG
:
1387 case OMP_CLAUSE_WORKER
:
1388 case OMP_CLAUSE_VECTOR
:
1389 case OMP_CLAUSE_INDEPENDENT
:
1390 case OMP_CLAUSE_AUTO
:
1391 case OMP_CLAUSE_SEQ
:
1392 case OMP_CLAUSE_TILE
:
1393 case OMP_CLAUSE__SIMT_
:
1394 case OMP_CLAUSE_DEFAULT
:
1395 case OMP_CLAUSE_NONTEMPORAL
:
1396 case OMP_CLAUSE_IF_PRESENT
:
1397 case OMP_CLAUSE_FINALIZE
:
1398 case OMP_CLAUSE_TASK_REDUCTION
:
1401 case OMP_CLAUSE_ALIGNED
:
1402 decl
= OMP_CLAUSE_DECL (c
);
1403 if (is_global_var (decl
)
1404 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1405 install_var_local (decl
, ctx
);
1408 case OMP_CLAUSE__CACHE_
:
1414 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1416 switch (OMP_CLAUSE_CODE (c
))
1418 case OMP_CLAUSE_LASTPRIVATE
:
1419 /* Let the corresponding firstprivate clause create
1421 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1422 scan_array_reductions
= true;
1423 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1427 case OMP_CLAUSE_FIRSTPRIVATE
:
1428 case OMP_CLAUSE_PRIVATE
:
1429 case OMP_CLAUSE_LINEAR
:
1430 case OMP_CLAUSE_IS_DEVICE_PTR
:
1431 decl
= OMP_CLAUSE_DECL (c
);
1432 if (is_variable_sized (decl
))
1434 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1435 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1436 && is_gimple_omp_offloaded (ctx
->stmt
))
1438 tree decl2
= DECL_VALUE_EXPR (decl
);
1439 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1440 decl2
= TREE_OPERAND (decl2
, 0);
1441 gcc_assert (DECL_P (decl2
));
1442 install_var_local (decl2
, ctx
);
1443 fixup_remapped_decl (decl2
, ctx
, false);
1445 install_var_local (decl
, ctx
);
1447 fixup_remapped_decl (decl
, ctx
,
1448 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1449 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1450 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1451 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1452 scan_array_reductions
= true;
1455 case OMP_CLAUSE_REDUCTION
:
1456 case OMP_CLAUSE_IN_REDUCTION
:
1457 decl
= OMP_CLAUSE_DECL (c
);
1458 if (TREE_CODE (decl
) != MEM_REF
)
1460 if (is_variable_sized (decl
))
1461 install_var_local (decl
, ctx
);
1462 fixup_remapped_decl (decl
, ctx
, false);
1464 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1465 scan_array_reductions
= true;
1468 case OMP_CLAUSE_TASK_REDUCTION
:
1469 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1470 scan_array_reductions
= true;
1473 case OMP_CLAUSE_SHARED
:
1474 /* Ignore shared directives in teams construct inside of
1475 target construct. */
1476 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1477 && !is_host_teams_ctx (ctx
))
1479 decl
= OMP_CLAUSE_DECL (c
);
1480 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1482 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1484 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1487 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1488 install_var_field (decl
, by_ref
, 11, ctx
);
1491 fixup_remapped_decl (decl
, ctx
, false);
1494 case OMP_CLAUSE_MAP
:
1495 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1497 decl
= OMP_CLAUSE_DECL (c
);
1499 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1500 && (OMP_CLAUSE_MAP_KIND (c
)
1501 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1502 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1503 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1504 && varpool_node::get_create (decl
)->offloadable
)
1508 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1509 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1510 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1511 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1513 tree new_decl
= lookup_decl (decl
, ctx
);
1514 TREE_TYPE (new_decl
)
1515 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1517 else if (DECL_SIZE (decl
)
1518 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1520 tree decl2
= DECL_VALUE_EXPR (decl
);
1521 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1522 decl2
= TREE_OPERAND (decl2
, 0);
1523 gcc_assert (DECL_P (decl2
));
1524 fixup_remapped_decl (decl2
, ctx
, false);
1525 fixup_remapped_decl (decl
, ctx
, true);
1528 fixup_remapped_decl (decl
, ctx
, false);
1532 case OMP_CLAUSE_COPYPRIVATE
:
1533 case OMP_CLAUSE_COPYIN
:
1534 case OMP_CLAUSE_DEFAULT
:
1536 case OMP_CLAUSE_NUM_THREADS
:
1537 case OMP_CLAUSE_NUM_TEAMS
:
1538 case OMP_CLAUSE_THREAD_LIMIT
:
1539 case OMP_CLAUSE_DEVICE
:
1540 case OMP_CLAUSE_SCHEDULE
:
1541 case OMP_CLAUSE_DIST_SCHEDULE
:
1542 case OMP_CLAUSE_NOWAIT
:
1543 case OMP_CLAUSE_ORDERED
:
1544 case OMP_CLAUSE_COLLAPSE
:
1545 case OMP_CLAUSE_UNTIED
:
1546 case OMP_CLAUSE_FINAL
:
1547 case OMP_CLAUSE_MERGEABLE
:
1548 case OMP_CLAUSE_PROC_BIND
:
1549 case OMP_CLAUSE_SAFELEN
:
1550 case OMP_CLAUSE_SIMDLEN
:
1551 case OMP_CLAUSE_ALIGNED
:
1552 case OMP_CLAUSE_DEPEND
:
1553 case OMP_CLAUSE__LOOPTEMP_
:
1554 case OMP_CLAUSE__REDUCTEMP_
:
1556 case OMP_CLAUSE_FROM
:
1557 case OMP_CLAUSE_PRIORITY
:
1558 case OMP_CLAUSE_GRAINSIZE
:
1559 case OMP_CLAUSE_NUM_TASKS
:
1560 case OMP_CLAUSE_THREADS
:
1561 case OMP_CLAUSE_SIMD
:
1562 case OMP_CLAUSE_NOGROUP
:
1563 case OMP_CLAUSE_DEFAULTMAP
:
1564 case OMP_CLAUSE_USE_DEVICE_PTR
:
1565 case OMP_CLAUSE_NONTEMPORAL
:
1566 case OMP_CLAUSE_ASYNC
:
1567 case OMP_CLAUSE_WAIT
:
1568 case OMP_CLAUSE_NUM_GANGS
:
1569 case OMP_CLAUSE_NUM_WORKERS
:
1570 case OMP_CLAUSE_VECTOR_LENGTH
:
1571 case OMP_CLAUSE_GANG
:
1572 case OMP_CLAUSE_WORKER
:
1573 case OMP_CLAUSE_VECTOR
:
1574 case OMP_CLAUSE_INDEPENDENT
:
1575 case OMP_CLAUSE_AUTO
:
1576 case OMP_CLAUSE_SEQ
:
1577 case OMP_CLAUSE_TILE
:
1578 case OMP_CLAUSE__GRIDDIM_
:
1579 case OMP_CLAUSE__SIMT_
:
1580 case OMP_CLAUSE_IF_PRESENT
:
1581 case OMP_CLAUSE_FINALIZE
:
1584 case OMP_CLAUSE__CACHE_
:
1590 gcc_checking_assert (!scan_array_reductions
1591 || !is_gimple_omp_oacc (ctx
->stmt
));
1592 if (scan_array_reductions
)
1594 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1595 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1596 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1597 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1598 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1600 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1601 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1603 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1604 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1605 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1606 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1607 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1608 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1612 /* Create a new name for omp child function. Returns an identifier. */
1615 create_omp_child_function_name (bool task_copy
)
1617 return clone_function_name_numbered (current_function_decl
,
1618 task_copy
? "_omp_cpyfn" : "_omp_fn");
1621 /* Return true if CTX may belong to offloaded code: either if current function
1622 is offloaded, or any enclosing context corresponds to a target region. */
1625 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1627 if (cgraph_node::get (current_function_decl
)->offloadable
)
1629 for (; ctx
; ctx
= ctx
->outer
)
1630 if (is_gimple_omp_offloaded (ctx
->stmt
))
1635 /* Build a decl for the omp child function. It'll not contain a body
1636 yet, just the bare decl. */
1639 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1641 tree decl
, type
, name
, t
;
1643 name
= create_omp_child_function_name (task_copy
);
1645 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1646 ptr_type_node
, NULL_TREE
);
1648 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1650 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1652 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1655 ctx
->cb
.dst_fn
= decl
;
1657 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1659 TREE_STATIC (decl
) = 1;
1660 TREE_USED (decl
) = 1;
1661 DECL_ARTIFICIAL (decl
) = 1;
1662 DECL_IGNORED_P (decl
) = 0;
1663 TREE_PUBLIC (decl
) = 0;
1664 DECL_UNINLINABLE (decl
) = 1;
1665 DECL_EXTERNAL (decl
) = 0;
1666 DECL_CONTEXT (decl
) = NULL_TREE
;
1667 DECL_INITIAL (decl
) = make_node (BLOCK
);
1668 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1669 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1670 /* Remove omp declare simd attribute from the new attributes. */
1671 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1673 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1676 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1677 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1678 *p
= TREE_CHAIN (*p
);
1681 tree chain
= TREE_CHAIN (*p
);
1682 *p
= copy_node (*p
);
1683 p
= &TREE_CHAIN (*p
);
1687 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1688 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1689 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1690 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1691 DECL_FUNCTION_VERSIONED (decl
)
1692 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1694 if (omp_maybe_offloaded_ctx (ctx
))
1696 cgraph_node::get_create (decl
)->offloadable
= 1;
1697 if (ENABLE_OFFLOADING
)
1698 g
->have_offload
= true;
1701 if (cgraph_node::get_create (decl
)->offloadable
1702 && !lookup_attribute ("omp declare target",
1703 DECL_ATTRIBUTES (current_function_decl
)))
1705 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1706 ? "omp target entrypoint"
1707 : "omp declare target");
1708 DECL_ATTRIBUTES (decl
)
1709 = tree_cons (get_identifier (target_attr
),
1710 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1713 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1714 RESULT_DECL
, NULL_TREE
, void_type_node
);
1715 DECL_ARTIFICIAL (t
) = 1;
1716 DECL_IGNORED_P (t
) = 1;
1717 DECL_CONTEXT (t
) = decl
;
1718 DECL_RESULT (decl
) = t
;
1720 tree data_name
= get_identifier (".omp_data_i");
1721 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1723 DECL_ARTIFICIAL (t
) = 1;
1724 DECL_NAMELESS (t
) = 1;
1725 DECL_ARG_TYPE (t
) = ptr_type_node
;
1726 DECL_CONTEXT (t
) = current_function_decl
;
1728 TREE_READONLY (t
) = 1;
1729 DECL_ARGUMENTS (decl
) = t
;
1731 ctx
->receiver_decl
= t
;
1734 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1735 PARM_DECL
, get_identifier (".omp_data_o"),
1737 DECL_ARTIFICIAL (t
) = 1;
1738 DECL_NAMELESS (t
) = 1;
1739 DECL_ARG_TYPE (t
) = ptr_type_node
;
1740 DECL_CONTEXT (t
) = current_function_decl
;
1742 TREE_ADDRESSABLE (t
) = 1;
1743 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1744 DECL_ARGUMENTS (decl
) = t
;
1747 /* Allocate memory for the function structure. The call to
1748 allocate_struct_function clobbers CFUN, so we need to restore
1750 push_struct_function (decl
);
1751 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1752 init_tree_ssa (cfun
);
1756 /* Callback for walk_gimple_seq. Check if combined parallel
1757 contains gimple_omp_for_combined_into_p OMP_FOR. */
1760 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1761 bool *handled_ops_p
,
1762 struct walk_stmt_info
*wi
)
1764 gimple
*stmt
= gsi_stmt (*gsi_p
);
1766 *handled_ops_p
= true;
1767 switch (gimple_code (stmt
))
1771 case GIMPLE_OMP_FOR
:
1772 if (gimple_omp_for_combined_into_p (stmt
)
1773 && gimple_omp_for_kind (stmt
)
1774 == *(const enum gf_mask
*) (wi
->info
))
1777 return integer_zero_node
;
1786 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1789 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1790 omp_context
*outer_ctx
)
1792 struct walk_stmt_info wi
;
1794 memset (&wi
, 0, sizeof (wi
));
1796 wi
.info
= (void *) &msk
;
1797 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1798 if (wi
.info
!= (void *) &msk
)
1800 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1801 struct omp_for_data fd
;
1802 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1803 /* We need two temporaries with fd.loop.v type (istart/iend)
1804 and then (fd.collapse - 1) temporaries with the same
1805 type for count2 ... countN-1 vars if not constant. */
1806 size_t count
= 2, i
;
1807 tree type
= fd
.iter_type
;
1809 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1811 count
+= fd
.collapse
- 1;
1812 /* If there are lastprivate clauses on the inner
1813 GIMPLE_OMP_FOR, add one more temporaries for the total number
1814 of iterations (product of count1 ... countN-1). */
1815 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1816 OMP_CLAUSE_LASTPRIVATE
))
1818 else if (msk
== GF_OMP_FOR_KIND_FOR
1819 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1820 OMP_CLAUSE_LASTPRIVATE
))
1823 for (i
= 0; i
< count
; i
++)
1825 tree temp
= create_tmp_var (type
);
1826 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1827 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1828 OMP_CLAUSE_DECL (c
) = temp
;
1829 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1830 gimple_omp_taskreg_set_clauses (stmt
, c
);
1833 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1834 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1835 OMP_CLAUSE_REDUCTION
))
1837 tree type
= build_pointer_type (pointer_sized_int_node
);
1838 tree temp
= create_tmp_var (type
);
1839 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1840 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1841 OMP_CLAUSE_DECL (c
) = temp
;
1842 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1843 gimple_omp_task_set_clauses (stmt
, c
);
1847 /* Scan an OpenMP parallel directive. */
1850 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1854 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1856 /* Ignore parallel directives with empty bodies, unless there
1857 are copyin clauses. */
1859 && empty_body_p (gimple_omp_body (stmt
))
1860 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1861 OMP_CLAUSE_COPYIN
) == NULL
)
1863 gsi_replace (gsi
, gimple_build_nop (), false);
1867 if (gimple_omp_parallel_combined_p (stmt
))
1868 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1869 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1870 OMP_CLAUSE_REDUCTION
);
1871 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1872 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1874 tree type
= build_pointer_type (pointer_sized_int_node
);
1875 tree temp
= create_tmp_var (type
);
1876 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1878 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1879 OMP_CLAUSE_DECL (c
) = temp
;
1880 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1881 gimple_omp_parallel_set_clauses (stmt
, c
);
1884 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1887 ctx
= new_omp_context (stmt
, outer_ctx
);
1888 taskreg_contexts
.safe_push (ctx
);
1889 if (taskreg_nesting_level
> 1)
1890 ctx
->is_nested
= true;
1891 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1892 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1893 name
= create_tmp_var_name (".omp_data_s");
1894 name
= build_decl (gimple_location (stmt
),
1895 TYPE_DECL
, name
, ctx
->record_type
);
1896 DECL_ARTIFICIAL (name
) = 1;
1897 DECL_NAMELESS (name
) = 1;
1898 TYPE_NAME (ctx
->record_type
) = name
;
1899 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1900 if (!gimple_omp_parallel_grid_phony (stmt
))
1902 create_omp_child_function (ctx
, false);
1903 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1906 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1907 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1909 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1910 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1913 /* Scan an OpenMP task directive. */
1916 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1920 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1922 /* Ignore task directives with empty bodies, unless they have depend
1925 && gimple_omp_body (stmt
)
1926 && empty_body_p (gimple_omp_body (stmt
))
1927 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1929 gsi_replace (gsi
, gimple_build_nop (), false);
1933 if (gimple_omp_task_taskloop_p (stmt
))
1934 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1936 ctx
= new_omp_context (stmt
, outer_ctx
);
1938 if (gimple_omp_task_taskwait_p (stmt
))
1940 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1944 taskreg_contexts
.safe_push (ctx
);
1945 if (taskreg_nesting_level
> 1)
1946 ctx
->is_nested
= true;
1947 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1948 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1949 name
= create_tmp_var_name (".omp_data_s");
1950 name
= build_decl (gimple_location (stmt
),
1951 TYPE_DECL
, name
, ctx
->record_type
);
1952 DECL_ARTIFICIAL (name
) = 1;
1953 DECL_NAMELESS (name
) = 1;
1954 TYPE_NAME (ctx
->record_type
) = name
;
1955 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1956 create_omp_child_function (ctx
, false);
1957 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1959 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1961 if (ctx
->srecord_type
)
1963 name
= create_tmp_var_name (".omp_data_a");
1964 name
= build_decl (gimple_location (stmt
),
1965 TYPE_DECL
, name
, ctx
->srecord_type
);
1966 DECL_ARTIFICIAL (name
) = 1;
1967 DECL_NAMELESS (name
) = 1;
1968 TYPE_NAME (ctx
->srecord_type
) = name
;
1969 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1970 create_omp_child_function (ctx
, true);
1973 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1975 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1977 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1978 t
= build_int_cst (long_integer_type_node
, 0);
1979 gimple_omp_task_set_arg_size (stmt
, t
);
1980 t
= build_int_cst (long_integer_type_node
, 1);
1981 gimple_omp_task_set_arg_align (stmt
, t
);
1985 /* Helper function for finish_taskreg_scan, called through walk_tree.
1986 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1987 tree, replace it in the expression. */
1990 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
1994 omp_context
*ctx
= (omp_context
*) data
;
1995 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
1998 if (DECL_HAS_VALUE_EXPR_P (t
))
1999 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2004 else if (IS_TYPE_OR_DECL_P (*tp
))
2009 /* If any decls have been made addressable during scan_omp,
2010 adjust their fields if needed, and layout record types
2011 of parallel/task constructs. */
2014 finish_taskreg_scan (omp_context
*ctx
)
2016 if (ctx
->record_type
== NULL_TREE
)
2019 /* If any task_shared_vars were needed, verify all
2020 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2021 statements if use_pointer_for_field hasn't changed
2022 because of that. If it did, update field types now. */
2023 if (task_shared_vars
)
2027 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2028 c
; c
= OMP_CLAUSE_CHAIN (c
))
2029 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2030 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2032 tree decl
= OMP_CLAUSE_DECL (c
);
2034 /* Global variables don't need to be copied,
2035 the receiver side will use them directly. */
2036 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2038 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2039 || !use_pointer_for_field (decl
, ctx
))
2041 tree field
= lookup_field (decl
, ctx
);
2042 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2043 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2045 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2046 TREE_THIS_VOLATILE (field
) = 0;
2047 DECL_USER_ALIGN (field
) = 0;
2048 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2049 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2050 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2051 if (ctx
->srecord_type
)
2053 tree sfield
= lookup_sfield (decl
, ctx
);
2054 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2055 TREE_THIS_VOLATILE (sfield
) = 0;
2056 DECL_USER_ALIGN (sfield
) = 0;
2057 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2058 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2059 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2064 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2066 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2067 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2070 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2071 expects to find it at the start of data. */
2072 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2073 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2077 *p
= DECL_CHAIN (*p
);
2081 p
= &DECL_CHAIN (*p
);
2082 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2083 TYPE_FIELDS (ctx
->record_type
) = f
;
2085 layout_type (ctx
->record_type
);
2086 fixup_child_record_type (ctx
);
2088 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2090 layout_type (ctx
->record_type
);
2091 fixup_child_record_type (ctx
);
2095 location_t loc
= gimple_location (ctx
->stmt
);
2096 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2097 /* Move VLA fields to the end. */
2098 p
= &TYPE_FIELDS (ctx
->record_type
);
2100 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2101 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2104 *p
= TREE_CHAIN (*p
);
2105 TREE_CHAIN (*q
) = NULL_TREE
;
2106 q
= &TREE_CHAIN (*q
);
2109 p
= &DECL_CHAIN (*p
);
2111 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2113 /* Move fields corresponding to first and second _looptemp_
2114 clause first. There are filled by GOMP_taskloop
2115 and thus need to be in specific positions. */
2116 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2117 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2118 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2119 OMP_CLAUSE__LOOPTEMP_
);
2120 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2121 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2122 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2123 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2124 p
= &TYPE_FIELDS (ctx
->record_type
);
2126 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2127 *p
= DECL_CHAIN (*p
);
2129 p
= &DECL_CHAIN (*p
);
2130 DECL_CHAIN (f1
) = f2
;
2133 DECL_CHAIN (f2
) = f3
;
2134 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2137 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2138 TYPE_FIELDS (ctx
->record_type
) = f1
;
2139 if (ctx
->srecord_type
)
2141 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2142 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2144 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2145 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2147 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2148 *p
= DECL_CHAIN (*p
);
2150 p
= &DECL_CHAIN (*p
);
2151 DECL_CHAIN (f1
) = f2
;
2152 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2155 DECL_CHAIN (f2
) = f3
;
2156 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2159 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2160 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2163 layout_type (ctx
->record_type
);
2164 fixup_child_record_type (ctx
);
2165 if (ctx
->srecord_type
)
2166 layout_type (ctx
->srecord_type
);
2167 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2168 TYPE_SIZE_UNIT (ctx
->record_type
));
2169 if (TREE_CODE (t
) != INTEGER_CST
)
2171 t
= unshare_expr (t
);
2172 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2174 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2175 t
= build_int_cst (long_integer_type_node
,
2176 TYPE_ALIGN_UNIT (ctx
->record_type
));
2177 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2181 /* Find the enclosing offload context. */
2183 static omp_context
*
2184 enclosing_target_ctx (omp_context
*ctx
)
2186 for (; ctx
; ctx
= ctx
->outer
)
2187 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2193 /* Return true if ctx is part of an oacc kernels region. */
2196 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2198 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2200 gimple
*stmt
= ctx
->stmt
;
2201 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2202 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2209 /* Check the parallelism clauses inside a kernels regions.
2210 Until kernels handling moves to use the same loop indirection
2211 scheme as parallel, we need to do this checking early. */
2214 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2216 bool checking
= true;
2217 unsigned outer_mask
= 0;
2218 unsigned this_mask
= 0;
2219 bool has_seq
= false, has_auto
= false;
2222 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2226 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2228 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2231 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2233 switch (OMP_CLAUSE_CODE (c
))
2235 case OMP_CLAUSE_GANG
:
2236 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2238 case OMP_CLAUSE_WORKER
:
2239 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2241 case OMP_CLAUSE_VECTOR
:
2242 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2244 case OMP_CLAUSE_SEQ
:
2247 case OMP_CLAUSE_AUTO
:
2257 if (has_seq
&& (this_mask
|| has_auto
))
2258 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2259 " OpenACC loop specifiers");
2260 else if (has_auto
&& this_mask
)
2261 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2262 " OpenACC loop specifiers");
2264 if (this_mask
& outer_mask
)
2265 error_at (gimple_location (stmt
), "inner loop uses same"
2266 " OpenACC parallelism as containing loop");
2269 return outer_mask
| this_mask
;
2272 /* Scan a GIMPLE_OMP_FOR. */
2274 static omp_context
*
2275 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2279 tree clauses
= gimple_omp_for_clauses (stmt
);
2281 ctx
= new_omp_context (stmt
, outer_ctx
);
2283 if (is_gimple_omp_oacc (stmt
))
2285 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2287 if (!tgt
|| is_oacc_parallel (tgt
))
2288 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2290 char const *check
= NULL
;
2292 switch (OMP_CLAUSE_CODE (c
))
2294 case OMP_CLAUSE_GANG
:
2298 case OMP_CLAUSE_WORKER
:
2302 case OMP_CLAUSE_VECTOR
:
2310 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2311 error_at (gimple_location (stmt
),
2312 "argument not permitted on %qs clause in"
2313 " OpenACC %<parallel%>", check
);
2316 if (tgt
&& is_oacc_kernels (tgt
))
2318 /* Strip out reductions, as they are not handled yet. */
2319 tree
*prev_ptr
= &clauses
;
2321 while (tree probe
= *prev_ptr
)
2323 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2325 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2326 *prev_ptr
= *next_ptr
;
2328 prev_ptr
= next_ptr
;
2331 gimple_omp_for_set_clauses (stmt
, clauses
);
2332 check_oacc_kernel_gwv (stmt
, ctx
);
2336 scan_sharing_clauses (clauses
, ctx
);
2338 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2339 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2341 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2342 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2343 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2344 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2346 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2350 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2353 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2354 omp_context
*outer_ctx
)
2356 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2357 gsi_replace (gsi
, bind
, false);
2358 gimple_seq seq
= NULL
;
2359 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2360 tree cond
= create_tmp_var_raw (integer_type_node
);
2361 DECL_CONTEXT (cond
) = current_function_decl
;
2362 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2363 gimple_bind_set_vars (bind
, cond
);
2364 gimple_call_set_lhs (g
, cond
);
2365 gimple_seq_add_stmt (&seq
, g
);
2366 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2367 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2368 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2369 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2370 gimple_seq_add_stmt (&seq
, g
);
2371 g
= gimple_build_label (lab1
);
2372 gimple_seq_add_stmt (&seq
, g
);
2373 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2374 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2375 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2376 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2377 gimple_omp_for_set_clauses (new_stmt
, clause
);
2378 gimple_seq_add_stmt (&seq
, new_stmt
);
2379 g
= gimple_build_goto (lab3
);
2380 gimple_seq_add_stmt (&seq
, g
);
2381 g
= gimple_build_label (lab2
);
2382 gimple_seq_add_stmt (&seq
, g
);
2383 gimple_seq_add_stmt (&seq
, stmt
);
2384 g
= gimple_build_label (lab3
);
2385 gimple_seq_add_stmt (&seq
, g
);
2386 gimple_bind_set_body (bind
, seq
);
2388 scan_omp_for (new_stmt
, outer_ctx
);
2389 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2392 /* Scan an OpenMP sections directive. */
2395 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2399 ctx
= new_omp_context (stmt
, outer_ctx
);
2400 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2401 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2404 /* Scan an OpenMP single directive. */
2407 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2412 ctx
= new_omp_context (stmt
, outer_ctx
);
2413 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2414 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2415 name
= create_tmp_var_name (".omp_copy_s");
2416 name
= build_decl (gimple_location (stmt
),
2417 TYPE_DECL
, name
, ctx
->record_type
);
2418 TYPE_NAME (ctx
->record_type
) = name
;
2420 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2421 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2423 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2424 ctx
->record_type
= NULL
;
2426 layout_type (ctx
->record_type
);
2429 /* Scan a GIMPLE_OMP_TARGET. */
2432 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2436 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2437 tree clauses
= gimple_omp_target_clauses (stmt
);
2439 ctx
= new_omp_context (stmt
, outer_ctx
);
2440 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2441 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2442 name
= create_tmp_var_name (".omp_data_t");
2443 name
= build_decl (gimple_location (stmt
),
2444 TYPE_DECL
, name
, ctx
->record_type
);
2445 DECL_ARTIFICIAL (name
) = 1;
2446 DECL_NAMELESS (name
) = 1;
2447 TYPE_NAME (ctx
->record_type
) = name
;
2448 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2452 create_omp_child_function (ctx
, false);
2453 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2456 scan_sharing_clauses (clauses
, ctx
);
2457 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2459 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2460 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2463 TYPE_FIELDS (ctx
->record_type
)
2464 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2467 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2468 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2470 field
= DECL_CHAIN (field
))
2471 gcc_assert (DECL_ALIGN (field
) == align
);
2473 layout_type (ctx
->record_type
);
2475 fixup_child_record_type (ctx
);
2479 /* Scan an OpenMP teams directive. */
2482 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2484 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2486 if (!gimple_omp_teams_host (stmt
))
2488 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2489 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2492 taskreg_contexts
.safe_push (ctx
);
2493 gcc_assert (taskreg_nesting_level
== 1);
2494 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2495 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2496 tree name
= create_tmp_var_name (".omp_data_s");
2497 name
= build_decl (gimple_location (stmt
),
2498 TYPE_DECL
, name
, ctx
->record_type
);
2499 DECL_ARTIFICIAL (name
) = 1;
2500 DECL_NAMELESS (name
) = 1;
2501 TYPE_NAME (ctx
->record_type
) = name
;
2502 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2503 create_omp_child_function (ctx
, false);
2504 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2506 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2507 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2509 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2510 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2513 /* Check nesting restrictions. */
2515 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2519 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2520 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2521 the original copy of its contents. */
2524 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2525 inside an OpenACC CTX. */
2526 if (!(is_gimple_omp (stmt
)
2527 && is_gimple_omp_oacc (stmt
))
2528 /* Except for atomic codes that we share with OpenMP. */
2529 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2530 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2532 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2534 error_at (gimple_location (stmt
),
2535 "non-OpenACC construct inside of OpenACC routine");
2539 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2540 if (is_gimple_omp (octx
->stmt
)
2541 && is_gimple_omp_oacc (octx
->stmt
))
2543 error_at (gimple_location (stmt
),
2544 "non-OpenACC construct inside of OpenACC region");
2551 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2552 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2555 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2557 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2558 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2560 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2561 && (ctx
->outer
== NULL
2562 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2563 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2564 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2565 != GF_OMP_FOR_KIND_FOR
)
2566 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2568 error_at (gimple_location (stmt
),
2569 "%<ordered simd threads%> must be closely "
2570 "nested inside of %<for simd%> region");
2576 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2577 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
2579 error_at (gimple_location (stmt
),
2580 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2581 " or %<#pragma omp atomic%> may not be nested inside"
2582 " %<simd%> region");
2585 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2587 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2588 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2589 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2590 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2592 error_at (gimple_location (stmt
),
2593 "only %<distribute%> or %<parallel%> regions are "
2594 "allowed to be strictly nested inside %<teams%> "
2600 switch (gimple_code (stmt
))
2602 case GIMPLE_OMP_FOR
:
2603 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2605 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2607 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2609 error_at (gimple_location (stmt
),
2610 "%<distribute%> region must be strictly nested "
2611 "inside %<teams%> construct");
2616 /* We split taskloop into task and nested taskloop in it. */
2617 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2619 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2624 switch (gimple_code (ctx
->stmt
))
2626 case GIMPLE_OMP_FOR
:
2627 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2628 == GF_OMP_FOR_KIND_OACC_LOOP
);
2631 case GIMPLE_OMP_TARGET
:
2632 switch (gimple_omp_target_kind (ctx
->stmt
))
2634 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2635 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2646 else if (oacc_get_fn_attrib (current_function_decl
))
2650 error_at (gimple_location (stmt
),
2651 "OpenACC loop directive must be associated with"
2652 " an OpenACC compute region");
2658 if (is_gimple_call (stmt
)
2659 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2660 == BUILT_IN_GOMP_CANCEL
2661 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2662 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2664 const char *bad
= NULL
;
2665 const char *kind
= NULL
;
2666 const char *construct
2667 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2668 == BUILT_IN_GOMP_CANCEL
)
2669 ? "#pragma omp cancel"
2670 : "#pragma omp cancellation point";
2673 error_at (gimple_location (stmt
), "orphaned %qs construct",
2677 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2678 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2682 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2683 bad
= "#pragma omp parallel";
2684 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2685 == BUILT_IN_GOMP_CANCEL
2686 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2687 ctx
->cancellable
= true;
2691 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2692 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2693 bad
= "#pragma omp for";
2694 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2695 == BUILT_IN_GOMP_CANCEL
2696 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2698 ctx
->cancellable
= true;
2699 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2701 warning_at (gimple_location (stmt
), 0,
2702 "%<#pragma omp cancel for%> inside "
2703 "%<nowait%> for construct");
2704 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2705 OMP_CLAUSE_ORDERED
))
2706 warning_at (gimple_location (stmt
), 0,
2707 "%<#pragma omp cancel for%> inside "
2708 "%<ordered%> for construct");
2713 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2714 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2715 bad
= "#pragma omp sections";
2716 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2717 == BUILT_IN_GOMP_CANCEL
2718 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2720 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2722 ctx
->cancellable
= true;
2723 if (omp_find_clause (gimple_omp_sections_clauses
2726 warning_at (gimple_location (stmt
), 0,
2727 "%<#pragma omp cancel sections%> inside "
2728 "%<nowait%> sections construct");
2732 gcc_assert (ctx
->outer
2733 && gimple_code (ctx
->outer
->stmt
)
2734 == GIMPLE_OMP_SECTIONS
);
2735 ctx
->outer
->cancellable
= true;
2736 if (omp_find_clause (gimple_omp_sections_clauses
2739 warning_at (gimple_location (stmt
), 0,
2740 "%<#pragma omp cancel sections%> inside "
2741 "%<nowait%> sections construct");
2747 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TASK
)
2748 bad
= "#pragma omp task";
2751 for (omp_context
*octx
= ctx
->outer
;
2752 octx
; octx
= octx
->outer
)
2754 switch (gimple_code (octx
->stmt
))
2756 case GIMPLE_OMP_TASKGROUP
:
2758 case GIMPLE_OMP_TARGET
:
2759 if (gimple_omp_target_kind (octx
->stmt
)
2760 != GF_OMP_TARGET_KIND_REGION
)
2763 case GIMPLE_OMP_PARALLEL
:
2764 case GIMPLE_OMP_TEAMS
:
2765 error_at (gimple_location (stmt
),
2766 "%<%s taskgroup%> construct not closely "
2767 "nested inside of %<taskgroup%> region",
2775 ctx
->cancellable
= true;
2780 error_at (gimple_location (stmt
), "invalid arguments");
2785 error_at (gimple_location (stmt
),
2786 "%<%s %s%> construct not closely nested inside of %qs",
2787 construct
, kind
, bad
);
2792 case GIMPLE_OMP_SECTIONS
:
2793 case GIMPLE_OMP_SINGLE
:
2794 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2795 switch (gimple_code (ctx
->stmt
))
2797 case GIMPLE_OMP_FOR
:
2798 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2799 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2802 case GIMPLE_OMP_SECTIONS
:
2803 case GIMPLE_OMP_SINGLE
:
2804 case GIMPLE_OMP_ORDERED
:
2805 case GIMPLE_OMP_MASTER
:
2806 case GIMPLE_OMP_TASK
:
2807 case GIMPLE_OMP_CRITICAL
:
2808 if (is_gimple_call (stmt
))
2810 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2811 != BUILT_IN_GOMP_BARRIER
)
2813 error_at (gimple_location (stmt
),
2814 "barrier region may not be closely nested inside "
2815 "of work-sharing, %<critical%>, %<ordered%>, "
2816 "%<master%>, explicit %<task%> or %<taskloop%> "
2820 error_at (gimple_location (stmt
),
2821 "work-sharing region may not be closely nested inside "
2822 "of work-sharing, %<critical%>, %<ordered%>, "
2823 "%<master%>, explicit %<task%> or %<taskloop%> region");
2825 case GIMPLE_OMP_PARALLEL
:
2826 case GIMPLE_OMP_TEAMS
:
2828 case GIMPLE_OMP_TARGET
:
2829 if (gimple_omp_target_kind (ctx
->stmt
)
2830 == GF_OMP_TARGET_KIND_REGION
)
2837 case GIMPLE_OMP_MASTER
:
2838 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2839 switch (gimple_code (ctx
->stmt
))
2841 case GIMPLE_OMP_FOR
:
2842 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2843 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2846 case GIMPLE_OMP_SECTIONS
:
2847 case GIMPLE_OMP_SINGLE
:
2848 case GIMPLE_OMP_TASK
:
2849 error_at (gimple_location (stmt
),
2850 "%<master%> region may not be closely nested inside "
2851 "of work-sharing, explicit %<task%> or %<taskloop%> "
2854 case GIMPLE_OMP_PARALLEL
:
2855 case GIMPLE_OMP_TEAMS
:
2857 case GIMPLE_OMP_TARGET
:
2858 if (gimple_omp_target_kind (ctx
->stmt
)
2859 == GF_OMP_TARGET_KIND_REGION
)
2866 case GIMPLE_OMP_TASK
:
2867 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2868 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2869 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2870 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2872 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2873 error_at (OMP_CLAUSE_LOCATION (c
),
2874 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2875 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2879 case GIMPLE_OMP_ORDERED
:
2880 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2881 c
; c
= OMP_CLAUSE_CHAIN (c
))
2883 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2885 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2886 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2889 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2890 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2891 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2894 /* Look for containing ordered(N) loop. */
2896 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2898 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2899 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2901 error_at (OMP_CLAUSE_LOCATION (c
),
2902 "%<ordered%> construct with %<depend%> clause "
2903 "must be closely nested inside an %<ordered%> "
2907 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2909 error_at (OMP_CLAUSE_LOCATION (c
),
2910 "%<ordered%> construct with %<depend%> clause "
2911 "must be closely nested inside a loop with "
2912 "%<ordered%> clause with a parameter");
2918 error_at (OMP_CLAUSE_LOCATION (c
),
2919 "invalid depend kind in omp %<ordered%> %<depend%>");
2923 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2924 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2926 /* ordered simd must be closely nested inside of simd region,
2927 and simd region must not encounter constructs other than
2928 ordered simd, therefore ordered simd may be either orphaned,
2929 or ctx->stmt must be simd. The latter case is handled already
2933 error_at (gimple_location (stmt
),
2934 "%<ordered%> %<simd%> must be closely nested inside "
2939 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2940 switch (gimple_code (ctx
->stmt
))
2942 case GIMPLE_OMP_CRITICAL
:
2943 case GIMPLE_OMP_TASK
:
2944 case GIMPLE_OMP_ORDERED
:
2945 ordered_in_taskloop
:
2946 error_at (gimple_location (stmt
),
2947 "%<ordered%> region may not be closely nested inside "
2948 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2949 "%<taskloop%> region");
2951 case GIMPLE_OMP_FOR
:
2952 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2953 goto ordered_in_taskloop
;
2955 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2956 OMP_CLAUSE_ORDERED
);
2959 error_at (gimple_location (stmt
),
2960 "%<ordered%> region must be closely nested inside "
2961 "a loop region with an %<ordered%> clause");
2964 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
2965 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
2967 error_at (gimple_location (stmt
),
2968 "%<ordered%> region without %<depend%> clause may "
2969 "not be closely nested inside a loop region with "
2970 "an %<ordered%> clause with a parameter");
2974 case GIMPLE_OMP_TARGET
:
2975 if (gimple_omp_target_kind (ctx
->stmt
)
2976 != GF_OMP_TARGET_KIND_REGION
)
2979 case GIMPLE_OMP_PARALLEL
:
2980 case GIMPLE_OMP_TEAMS
:
2981 error_at (gimple_location (stmt
),
2982 "%<ordered%> region must be closely nested inside "
2983 "a loop region with an %<ordered%> clause");
2989 case GIMPLE_OMP_CRITICAL
:
2992 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
2993 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2994 if (gomp_critical
*other_crit
2995 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
2996 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
2998 error_at (gimple_location (stmt
),
2999 "%<critical%> region may not be nested inside "
3000 "a %<critical%> region with the same name");
3005 case GIMPLE_OMP_TEAMS
:
3008 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3009 || (gimple_omp_target_kind (ctx
->stmt
)
3010 != GF_OMP_TARGET_KIND_REGION
))
3012 /* Teams construct can appear either strictly nested inside of
3013 target construct with no intervening stmts, or can be encountered
3014 only by initial task (so must not appear inside any OpenMP
3016 error_at (gimple_location (stmt
),
3017 "%<teams%> construct must be closely nested inside of "
3018 "%<target%> construct or not nested in any OpenMP "
3023 case GIMPLE_OMP_TARGET
:
3024 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3025 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3026 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3027 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3029 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3030 error_at (OMP_CLAUSE_LOCATION (c
),
3031 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3032 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3035 if (is_gimple_omp_offloaded (stmt
)
3036 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3038 error_at (gimple_location (stmt
),
3039 "OpenACC region inside of OpenACC routine, nested "
3040 "parallelism not supported yet");
3043 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3045 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3047 if (is_gimple_omp (stmt
)
3048 && is_gimple_omp_oacc (stmt
)
3049 && is_gimple_omp (ctx
->stmt
))
3051 error_at (gimple_location (stmt
),
3052 "OpenACC construct inside of non-OpenACC region");
3058 const char *stmt_name
, *ctx_stmt_name
;
3059 switch (gimple_omp_target_kind (stmt
))
3061 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3062 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3063 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3064 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3065 stmt_name
= "target enter data"; break;
3066 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3067 stmt_name
= "target exit data"; break;
3068 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3069 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3070 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3071 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3072 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3073 stmt_name
= "enter/exit data"; break;
3074 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3076 default: gcc_unreachable ();
3078 switch (gimple_omp_target_kind (ctx
->stmt
))
3080 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3081 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3082 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3083 ctx_stmt_name
= "parallel"; break;
3084 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3085 ctx_stmt_name
= "kernels"; break;
3086 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3087 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3088 ctx_stmt_name
= "host_data"; break;
3089 default: gcc_unreachable ();
3092 /* OpenACC/OpenMP mismatch? */
3093 if (is_gimple_omp_oacc (stmt
)
3094 != is_gimple_omp_oacc (ctx
->stmt
))
3096 error_at (gimple_location (stmt
),
3097 "%s %qs construct inside of %s %qs region",
3098 (is_gimple_omp_oacc (stmt
)
3099 ? "OpenACC" : "OpenMP"), stmt_name
,
3100 (is_gimple_omp_oacc (ctx
->stmt
)
3101 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3104 if (is_gimple_omp_offloaded (ctx
->stmt
))
3106 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3107 if (is_gimple_omp_oacc (ctx
->stmt
))
3109 error_at (gimple_location (stmt
),
3110 "%qs construct inside of %qs region",
3111 stmt_name
, ctx_stmt_name
);
3116 warning_at (gimple_location (stmt
), 0,
3117 "%qs construct inside of %qs region",
3118 stmt_name
, ctx_stmt_name
);
3130 /* Helper function scan_omp.
3132 Callback for walk_tree or operators in walk_gimple_stmt used to
3133 scan for OMP directives in TP. */
3136 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3138 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3139 omp_context
*ctx
= (omp_context
*) wi
->info
;
3142 switch (TREE_CODE (t
))
3150 tree repl
= remap_decl (t
, &ctx
->cb
);
3151 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3157 if (ctx
&& TYPE_P (t
))
3158 *tp
= remap_type (t
, &ctx
->cb
);
3159 else if (!DECL_P (t
))
3164 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3165 if (tem
!= TREE_TYPE (t
))
3167 if (TREE_CODE (t
) == INTEGER_CST
)
3168 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3170 TREE_TYPE (t
) = tem
;
3180 /* Return true if FNDECL is a setjmp or a longjmp. */
3183 setjmp_or_longjmp_p (const_tree fndecl
)
3185 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3186 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3189 tree declname
= DECL_NAME (fndecl
);
3192 const char *name
= IDENTIFIER_POINTER (declname
);
3193 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3197 /* Helper function for scan_omp.
3199 Callback for walk_gimple_stmt used to scan for OMP directives in
3200 the current statement in GSI. */
3203 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3204 struct walk_stmt_info
*wi
)
3206 gimple
*stmt
= gsi_stmt (*gsi
);
3207 omp_context
*ctx
= (omp_context
*) wi
->info
;
3209 if (gimple_has_location (stmt
))
3210 input_location
= gimple_location (stmt
);
3212 /* Check the nesting restrictions. */
3213 bool remove
= false;
3214 if (is_gimple_omp (stmt
))
3215 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3216 else if (is_gimple_call (stmt
))
3218 tree fndecl
= gimple_call_fndecl (stmt
);
3221 if (setjmp_or_longjmp_p (fndecl
)
3223 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3224 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3227 error_at (gimple_location (stmt
),
3228 "setjmp/longjmp inside simd construct");
3230 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3231 switch (DECL_FUNCTION_CODE (fndecl
))
3233 case BUILT_IN_GOMP_BARRIER
:
3234 case BUILT_IN_GOMP_CANCEL
:
3235 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3236 case BUILT_IN_GOMP_TASKYIELD
:
3237 case BUILT_IN_GOMP_TASKWAIT
:
3238 case BUILT_IN_GOMP_TASKGROUP_START
:
3239 case BUILT_IN_GOMP_TASKGROUP_END
:
3240 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3249 stmt
= gimple_build_nop ();
3250 gsi_replace (gsi
, stmt
, false);
3253 *handled_ops_p
= true;
3255 switch (gimple_code (stmt
))
3257 case GIMPLE_OMP_PARALLEL
:
3258 taskreg_nesting_level
++;
3259 scan_omp_parallel (gsi
, ctx
);
3260 taskreg_nesting_level
--;
3263 case GIMPLE_OMP_TASK
:
3264 taskreg_nesting_level
++;
3265 scan_omp_task (gsi
, ctx
);
3266 taskreg_nesting_level
--;
3269 case GIMPLE_OMP_FOR
:
3270 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3271 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3272 && omp_maybe_offloaded_ctx (ctx
)
3273 && omp_max_simt_vf ())
3274 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3276 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3279 case GIMPLE_OMP_SECTIONS
:
3280 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3283 case GIMPLE_OMP_SINGLE
:
3284 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3287 case GIMPLE_OMP_SECTION
:
3288 case GIMPLE_OMP_MASTER
:
3289 case GIMPLE_OMP_ORDERED
:
3290 case GIMPLE_OMP_CRITICAL
:
3291 case GIMPLE_OMP_GRID_BODY
:
3292 ctx
= new_omp_context (stmt
, ctx
);
3293 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3296 case GIMPLE_OMP_TASKGROUP
:
3297 ctx
= new_omp_context (stmt
, ctx
);
3298 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3299 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3302 case GIMPLE_OMP_TARGET
:
3303 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3306 case GIMPLE_OMP_TEAMS
:
3307 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3309 taskreg_nesting_level
++;
3310 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3311 taskreg_nesting_level
--;
3314 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3321 *handled_ops_p
= false;
3323 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3325 var
= DECL_CHAIN (var
))
3326 insert_decl_map (&ctx
->cb
, var
, var
);
3330 *handled_ops_p
= false;
3338 /* Scan all the statements starting at the current statement. CTX
3339 contains context information about the OMP directives and
3340 clauses found during the scan. */
3343 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3345 location_t saved_location
;
3346 struct walk_stmt_info wi
;
3348 memset (&wi
, 0, sizeof (wi
));
3350 wi
.want_locations
= true;
3352 saved_location
= input_location
;
3353 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3354 input_location
= saved_location
;
3357 /* Re-gimplification and code generation routines. */
3359 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3360 of BIND if in a method. */
3363 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3365 if (DECL_ARGUMENTS (current_function_decl
)
3366 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3367 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3370 tree vars
= gimple_bind_vars (bind
);
3371 for (tree
*pvar
= &vars
; *pvar
; )
3372 if (omp_member_access_dummy_var (*pvar
))
3373 *pvar
= DECL_CHAIN (*pvar
);
3375 pvar
= &DECL_CHAIN (*pvar
);
3376 gimple_bind_set_vars (bind
, vars
);
3380 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3381 block and its subblocks. */
3384 remove_member_access_dummy_vars (tree block
)
3386 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3387 if (omp_member_access_dummy_var (*pvar
))
3388 *pvar
= DECL_CHAIN (*pvar
);
3390 pvar
= &DECL_CHAIN (*pvar
);
3392 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3393 remove_member_access_dummy_vars (block
);
3396 /* If a context was created for STMT when it was scanned, return it. */
3398 static omp_context
*
3399 maybe_lookup_ctx (gimple
*stmt
)
3402 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3403 return n
? (omp_context
*) n
->value
: NULL
;
3407 /* Find the mapping for DECL in CTX or the immediately enclosing
3408 context that has a mapping for DECL.
3410 If CTX is a nested parallel directive, we may have to use the decl
3411 mappings created in CTX's parent context. Suppose that we have the
3412 following parallel nesting (variable UIDs showed for clarity):
3415 #omp parallel shared(iD.1562) -> outer parallel
3416 iD.1562 = iD.1562 + 1;
3418 #omp parallel shared (iD.1562) -> inner parallel
3419 iD.1562 = iD.1562 - 1;
3421 Each parallel structure will create a distinct .omp_data_s structure
3422 for copying iD.1562 in/out of the directive:
3424 outer parallel .omp_data_s.1.i -> iD.1562
3425 inner parallel .omp_data_s.2.i -> iD.1562
3427 A shared variable mapping will produce a copy-out operation before
3428 the parallel directive and a copy-in operation after it. So, in
3429 this case we would have:
3432 .omp_data_o.1.i = iD.1562;
3433 #omp parallel shared(iD.1562) -> outer parallel
3434 .omp_data_i.1 = &.omp_data_o.1
3435 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3437 .omp_data_o.2.i = iD.1562; -> **
3438 #omp parallel shared(iD.1562) -> inner parallel
3439 .omp_data_i.2 = &.omp_data_o.2
3440 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3443 ** This is a problem. The symbol iD.1562 cannot be referenced
3444 inside the body of the outer parallel region. But since we are
3445 emitting this copy operation while expanding the inner parallel
3446 directive, we need to access the CTX structure of the outer
3447 parallel directive to get the correct mapping:
3449 .omp_data_o.2.i = .omp_data_i.1->i
3451 Since there may be other workshare or parallel directives enclosing
3452 the parallel directive, it may be necessary to walk up the context
3453 parent chain. This is not a problem in general because nested
3454 parallelism happens only rarely. */
3457 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3462 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3463 t
= maybe_lookup_decl (decl
, up
);
3465 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3467 return t
? t
: decl
;
3471 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3472 in outer contexts. */
3475 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3480 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3481 t
= maybe_lookup_decl (decl
, up
);
3483 return t
? t
: decl
;
3487 /* Construct the initialization value for reduction operation OP. */
3490 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3499 case TRUTH_ORIF_EXPR
:
3500 case TRUTH_XOR_EXPR
:
3502 return build_zero_cst (type
);
3505 case TRUTH_AND_EXPR
:
3506 case TRUTH_ANDIF_EXPR
:
3508 return fold_convert_loc (loc
, type
, integer_one_node
);
3511 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3514 if (SCALAR_FLOAT_TYPE_P (type
))
3516 REAL_VALUE_TYPE max
, min
;
3517 if (HONOR_INFINITIES (type
))
3520 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3523 real_maxval (&min
, 1, TYPE_MODE (type
));
3524 return build_real (type
, min
);
3526 else if (POINTER_TYPE_P (type
))
3529 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3530 return wide_int_to_tree (type
, min
);
3534 gcc_assert (INTEGRAL_TYPE_P (type
));
3535 return TYPE_MIN_VALUE (type
);
3539 if (SCALAR_FLOAT_TYPE_P (type
))
3541 REAL_VALUE_TYPE max
;
3542 if (HONOR_INFINITIES (type
))
3545 real_maxval (&max
, 0, TYPE_MODE (type
));
3546 return build_real (type
, max
);
3548 else if (POINTER_TYPE_P (type
))
3551 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3552 return wide_int_to_tree (type
, max
);
3556 gcc_assert (INTEGRAL_TYPE_P (type
));
3557 return TYPE_MAX_VALUE (type
);
3565 /* Construct the initialization value for reduction CLAUSE. */
3568 omp_reduction_init (tree clause
, tree type
)
3570 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3571 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3574 /* Return alignment to be assumed for var in CLAUSE, which should be
3575 OMP_CLAUSE_ALIGNED. */
3578 omp_clause_aligned_alignment (tree clause
)
3580 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3581 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3583 /* Otherwise return implementation defined alignment. */
3584 unsigned int al
= 1;
3585 opt_scalar_mode mode_iter
;
3586 auto_vector_sizes sizes
;
3587 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
);
3589 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3590 vs
= ordered_max (vs
, sizes
[i
]);
3591 static enum mode_class classes
[]
3592 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3593 for (int i
= 0; i
< 4; i
+= 2)
3594 /* The for loop above dictates that we only walk through scalar classes. */
3595 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3597 scalar_mode mode
= mode_iter
.require ();
3598 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3599 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3601 while (maybe_ne (vs
, 0U)
3602 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3603 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3604 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3606 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3607 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3609 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3610 GET_MODE_SIZE (mode
));
3611 type
= build_vector_type (type
, nelts
);
3612 if (TYPE_MODE (type
) != vmode
)
3614 if (TYPE_ALIGN_UNIT (type
) > al
)
3615 al
= TYPE_ALIGN_UNIT (type
);
3617 return build_int_cst (integer_type_node
, al
);
3621 /* This structure is part of the interface between lower_rec_simd_input_clauses
3622 and lower_rec_input_clauses. */
3624 struct omplow_simd_context
{
3625 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3628 vec
<tree
, va_heap
> simt_eargs
;
3629 gimple_seq simt_dlist
;
3630 poly_uint64_pod max_vf
;
3634 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3638 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3639 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3641 if (known_eq (sctx
->max_vf
, 0U))
3643 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3644 if (maybe_gt (sctx
->max_vf
, 1U))
3646 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3647 OMP_CLAUSE_SAFELEN
);
3650 poly_uint64 safe_len
;
3651 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3652 || maybe_lt (safe_len
, 1U))
3655 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3658 if (maybe_gt (sctx
->max_vf
, 1U))
3660 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3661 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3664 if (known_eq (sctx
->max_vf
, 1U))
3669 if (is_gimple_reg (new_var
))
3671 ivar
= lvar
= new_var
;
3674 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3675 ivar
= lvar
= create_tmp_var (type
);
3676 TREE_ADDRESSABLE (ivar
) = 1;
3677 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3678 NULL
, DECL_ATTRIBUTES (ivar
));
3679 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3680 tree clobber
= build_constructor (type
, NULL
);
3681 TREE_THIS_VOLATILE (clobber
) = 1;
3682 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3683 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3687 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3688 tree avar
= create_tmp_var_raw (atype
);
3689 if (TREE_ADDRESSABLE (new_var
))
3690 TREE_ADDRESSABLE (avar
) = 1;
3691 DECL_ATTRIBUTES (avar
)
3692 = tree_cons (get_identifier ("omp simd array"), NULL
,
3693 DECL_ATTRIBUTES (avar
));
3694 gimple_add_tmp_var (avar
);
3695 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3696 NULL_TREE
, NULL_TREE
);
3697 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3698 NULL_TREE
, NULL_TREE
);
3700 if (DECL_P (new_var
))
3702 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3703 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3708 /* Helper function of lower_rec_input_clauses. For a reference
3709 in simd reduction, add an underlying variable it will reference. */
3712 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3714 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3715 if (TREE_CONSTANT (z
))
3717 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3718 get_name (new_vard
));
3719 gimple_add_tmp_var (z
);
3720 TREE_ADDRESSABLE (z
) = 1;
3721 z
= build_fold_addr_expr_loc (loc
, z
);
3722 gimplify_assign (new_vard
, z
, ilist
);
3726 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3727 code to emit (type) (tskred_temp[idx]). */
3730 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
3733 unsigned HOST_WIDE_INT sz
3734 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
3735 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
3736 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
3738 tree v
= create_tmp_var (pointer_sized_int_node
);
3739 gimple
*g
= gimple_build_assign (v
, r
);
3740 gimple_seq_add_stmt (ilist
, g
);
3741 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
3743 v
= create_tmp_var (type
);
3744 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
3745 gimple_seq_add_stmt (ilist
, g
);
3750 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3751 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3752 private variables. Initialization statements go in ILIST, while calls
3753 to destructors go in DLIST. */
3756 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3757 omp_context
*ctx
, struct omp_for_data
*fd
)
3759 tree c
, dtor
, copyin_seq
, x
, ptr
;
3760 bool copyin_by_ref
= false;
3761 bool lastprivate_firstprivate
= false;
3762 bool reduction_omp_orig_ref
= false;
3764 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3765 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3766 omplow_simd_context sctx
= omplow_simd_context ();
3767 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3768 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3769 gimple_seq llist
[3] = { };
3772 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3774 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3775 with data sharing clauses referencing variable sized vars. That
3776 is unnecessarily hard to support and very unlikely to result in
3777 vectorized code anyway. */
3779 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3780 switch (OMP_CLAUSE_CODE (c
))
3782 case OMP_CLAUSE_LINEAR
:
3783 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3786 case OMP_CLAUSE_PRIVATE
:
3787 case OMP_CLAUSE_FIRSTPRIVATE
:
3788 case OMP_CLAUSE_LASTPRIVATE
:
3789 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3792 case OMP_CLAUSE_REDUCTION
:
3793 case OMP_CLAUSE_IN_REDUCTION
:
3794 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3795 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3802 /* Add a placeholder for simduid. */
3803 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
3804 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3806 unsigned task_reduction_cnt
= 0;
3807 unsigned task_reduction_cntorig
= 0;
3808 unsigned task_reduction_cnt_full
= 0;
3809 unsigned task_reduction_cntorig_full
= 0;
3810 unsigned task_reduction_other_cnt
= 0;
3811 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
3812 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
3813 /* Do all the fixed sized types in the first pass, and the variable sized
3814 types in the second pass. This makes sure that the scalar arguments to
3815 the variable sized types are processed before we use them in the
3816 variable sized operations. For task reductions we use 4 passes, in the
3817 first two we ignore them, in the third one gather arguments for
3818 GOMP_task_reduction_remap call and in the last pass actually handle
3819 the task reductions. */
3820 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
3823 if (pass
== 2 && task_reduction_cnt
)
3826 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
3827 + task_reduction_cntorig
);
3828 tskred_avar
= create_tmp_var_raw (tskred_atype
);
3829 gimple_add_tmp_var (tskred_avar
);
3830 TREE_ADDRESSABLE (tskred_avar
) = 1;
3831 task_reduction_cnt_full
= task_reduction_cnt
;
3832 task_reduction_cntorig_full
= task_reduction_cntorig
;
3834 else if (pass
== 3 && task_reduction_cnt
)
3836 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
3838 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
3839 size_int (task_reduction_cntorig
),
3840 build_fold_addr_expr (tskred_avar
));
3841 gimple_seq_add_stmt (ilist
, g
);
3843 if (pass
== 3 && task_reduction_other_cnt
)
3845 /* For reduction clauses, build
3846 tskred_base = (void *) tskred_temp[2]
3847 + omp_get_thread_num () * tskred_temp[1]
3848 or if tskred_temp[1] is known to be constant, that constant
3849 directly. This is the start of the private reduction copy block
3850 for the current thread. */
3851 tree v
= create_tmp_var (integer_type_node
);
3852 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
3853 gimple
*g
= gimple_build_call (x
, 0);
3854 gimple_call_set_lhs (g
, v
);
3855 gimple_seq_add_stmt (ilist
, g
);
3856 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
3857 tskred_temp
= OMP_CLAUSE_DECL (c
);
3858 if (is_taskreg_ctx (ctx
))
3859 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
3860 tree v2
= create_tmp_var (sizetype
);
3861 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
3862 gimple_seq_add_stmt (ilist
, g
);
3863 if (ctx
->task_reductions
[0])
3864 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
3866 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
3867 tree v3
= create_tmp_var (sizetype
);
3868 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
3869 gimple_seq_add_stmt (ilist
, g
);
3870 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
3871 tskred_base
= create_tmp_var (ptr_type_node
);
3872 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
3873 gimple_seq_add_stmt (ilist
, g
);
3875 task_reduction_cnt
= 0;
3876 task_reduction_cntorig
= 0;
3877 task_reduction_other_cnt
= 0;
3878 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3880 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3883 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3884 bool task_reduction_p
= false;
3885 bool task_reduction_needs_orig_p
= false;
3886 tree cond
= NULL_TREE
;
3890 case OMP_CLAUSE_PRIVATE
:
3891 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3894 case OMP_CLAUSE_SHARED
:
3895 /* Ignore shared directives in teams construct inside
3896 of target construct. */
3897 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
3898 && !is_host_teams_ctx (ctx
))
3900 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3902 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3903 || is_global_var (OMP_CLAUSE_DECL (c
)));
3906 case OMP_CLAUSE_FIRSTPRIVATE
:
3907 case OMP_CLAUSE_COPYIN
:
3909 case OMP_CLAUSE_LINEAR
:
3910 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3911 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3912 lastprivate_firstprivate
= true;
3914 case OMP_CLAUSE_REDUCTION
:
3915 case OMP_CLAUSE_IN_REDUCTION
:
3916 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
3918 task_reduction_p
= true;
3919 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
3921 task_reduction_other_cnt
++;
3926 task_reduction_cnt
++;
3927 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3929 var
= OMP_CLAUSE_DECL (c
);
3930 /* If var is a global variable that isn't privatized
3931 in outer contexts, we don't need to look up the
3932 original address, it is always the address of the
3933 global variable itself. */
3935 || omp_is_reference (var
)
3937 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
3939 task_reduction_needs_orig_p
= true;
3940 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
3941 task_reduction_cntorig
++;
3945 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3946 reduction_omp_orig_ref
= true;
3948 case OMP_CLAUSE__REDUCTEMP_
:
3949 if (!is_taskreg_ctx (ctx
))
3952 case OMP_CLAUSE__LOOPTEMP_
:
3953 /* Handle _looptemp_/_reductemp_ clauses only on
3958 case OMP_CLAUSE_LASTPRIVATE
:
3959 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3961 lastprivate_firstprivate
= true;
3962 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3965 /* Even without corresponding firstprivate, if
3966 decl is Fortran allocatable, it needs outer var
3969 && lang_hooks
.decls
.omp_private_outer_ref
3970 (OMP_CLAUSE_DECL (c
)))
3971 lastprivate_firstprivate
= true;
3973 case OMP_CLAUSE_ALIGNED
:
3976 var
= OMP_CLAUSE_DECL (c
);
3977 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3978 && !is_global_var (var
))
3980 new_var
= maybe_lookup_decl (var
, ctx
);
3981 if (new_var
== NULL_TREE
)
3982 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3983 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3984 tree alarg
= omp_clause_aligned_alignment (c
);
3985 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
3986 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
3987 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
3988 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
3989 gimplify_and_add (x
, ilist
);
3991 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
3992 && is_global_var (var
))
3994 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
3995 new_var
= lookup_decl (var
, ctx
);
3996 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3997 t
= build_fold_addr_expr_loc (clause_loc
, t
);
3998 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
3999 tree alarg
= omp_clause_aligned_alignment (c
);
4000 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4001 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4002 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4003 x
= create_tmp_var (ptype
);
4004 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4005 gimplify_and_add (t
, ilist
);
4006 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4007 SET_DECL_VALUE_EXPR (new_var
, t
);
4008 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4015 if (task_reduction_p
!= (pass
>= 2))
4018 new_var
= var
= OMP_CLAUSE_DECL (c
);
4019 if ((c_kind
== OMP_CLAUSE_REDUCTION
4020 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4021 && TREE_CODE (var
) == MEM_REF
)
4023 var
= TREE_OPERAND (var
, 0);
4024 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4025 var
= TREE_OPERAND (var
, 0);
4026 if (TREE_CODE (var
) == INDIRECT_REF
4027 || TREE_CODE (var
) == ADDR_EXPR
)
4028 var
= TREE_OPERAND (var
, 0);
4029 if (is_variable_sized (var
))
4031 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4032 var
= DECL_VALUE_EXPR (var
);
4033 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4034 var
= TREE_OPERAND (var
, 0);
4035 gcc_assert (DECL_P (var
));
4039 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4040 new_var
= lookup_decl (var
, ctx
);
4042 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4047 /* C/C++ array section reductions. */
4048 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4049 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4050 && var
!= OMP_CLAUSE_DECL (c
))
4055 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4056 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4058 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4060 tree b
= TREE_OPERAND (orig_var
, 1);
4061 b
= maybe_lookup_decl (b
, ctx
);
4064 b
= TREE_OPERAND (orig_var
, 1);
4065 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4067 if (integer_zerop (bias
))
4071 bias
= fold_convert_loc (clause_loc
,
4072 TREE_TYPE (b
), bias
);
4073 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4074 TREE_TYPE (b
), b
, bias
);
4076 orig_var
= TREE_OPERAND (orig_var
, 0);
4080 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4081 if (is_global_var (out
)
4082 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4083 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4084 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4089 bool by_ref
= use_pointer_for_field (var
, NULL
);
4090 x
= build_receiver_ref (var
, by_ref
, ctx
);
4091 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4092 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4094 x
= build_fold_addr_expr (x
);
4096 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4097 x
= build_simple_mem_ref (x
);
4098 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4100 if (var
== TREE_OPERAND (orig_var
, 0))
4101 x
= build_fold_addr_expr (x
);
4103 bias
= fold_convert (sizetype
, bias
);
4104 x
= fold_convert (ptr_type_node
, x
);
4105 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4106 TREE_TYPE (x
), x
, bias
);
4107 unsigned cnt
= task_reduction_cnt
- 1;
4108 if (!task_reduction_needs_orig_p
)
4109 cnt
+= (task_reduction_cntorig_full
4110 - task_reduction_cntorig
);
4112 cnt
= task_reduction_cntorig
- 1;
4113 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4114 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4115 gimplify_assign (r
, x
, ilist
);
4119 if (TREE_CODE (orig_var
) == INDIRECT_REF
4120 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4121 orig_var
= TREE_OPERAND (orig_var
, 0);
4122 tree d
= OMP_CLAUSE_DECL (c
);
4123 tree type
= TREE_TYPE (d
);
4124 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4125 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4126 const char *name
= get_name (orig_var
);
4129 tree xv
= create_tmp_var (ptr_type_node
);
4130 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4132 unsigned cnt
= task_reduction_cnt
- 1;
4133 if (!task_reduction_needs_orig_p
)
4134 cnt
+= (task_reduction_cntorig_full
4135 - task_reduction_cntorig
);
4137 cnt
= task_reduction_cntorig
- 1;
4138 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4139 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4141 gimple
*g
= gimple_build_assign (xv
, x
);
4142 gimple_seq_add_stmt (ilist
, g
);
4146 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4148 if (ctx
->task_reductions
[1 + idx
])
4149 off
= fold_convert (sizetype
,
4150 ctx
->task_reductions
[1 + idx
]);
4152 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4154 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4156 gimple_seq_add_stmt (ilist
, g
);
4158 x
= fold_convert (build_pointer_type (boolean_type_node
),
4160 if (TREE_CONSTANT (v
))
4161 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4162 TYPE_SIZE_UNIT (type
));
4165 tree t
= maybe_lookup_decl (v
, ctx
);
4169 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4170 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4172 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4174 build_int_cst (TREE_TYPE (v
), 1));
4175 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4177 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4178 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4180 cond
= create_tmp_var (TREE_TYPE (x
));
4181 gimplify_assign (cond
, x
, ilist
);
4184 else if (TREE_CONSTANT (v
))
4186 x
= create_tmp_var_raw (type
, name
);
4187 gimple_add_tmp_var (x
);
4188 TREE_ADDRESSABLE (x
) = 1;
4189 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4194 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4195 tree t
= maybe_lookup_decl (v
, ctx
);
4199 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4200 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4201 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4203 build_int_cst (TREE_TYPE (v
), 1));
4204 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4206 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4207 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4208 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4211 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4212 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4213 tree y
= create_tmp_var (ptype
, name
);
4214 gimplify_assign (y
, x
, ilist
);
4218 if (!integer_zerop (bias
))
4220 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4222 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4224 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4225 pointer_sized_int_node
, yb
, bias
);
4226 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4227 yb
= create_tmp_var (ptype
, name
);
4228 gimplify_assign (yb
, x
, ilist
);
4232 d
= TREE_OPERAND (d
, 0);
4233 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4234 d
= TREE_OPERAND (d
, 0);
4235 if (TREE_CODE (d
) == ADDR_EXPR
)
4237 if (orig_var
!= var
)
4239 gcc_assert (is_variable_sized (orig_var
));
4240 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4242 gimplify_assign (new_var
, x
, ilist
);
4243 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4244 tree t
= build_fold_indirect_ref (new_var
);
4245 DECL_IGNORED_P (new_var
) = 0;
4246 TREE_THIS_NOTRAP (t
) = 1;
4247 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4248 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4252 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4253 build_int_cst (ptype
, 0));
4254 SET_DECL_VALUE_EXPR (new_var
, x
);
4255 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4260 gcc_assert (orig_var
== var
);
4261 if (TREE_CODE (d
) == INDIRECT_REF
)
4263 x
= create_tmp_var (ptype
, name
);
4264 TREE_ADDRESSABLE (x
) = 1;
4265 gimplify_assign (x
, yb
, ilist
);
4266 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4268 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4269 gimplify_assign (new_var
, x
, ilist
);
4271 /* GOMP_taskgroup_reduction_register memsets the whole
4272 array to zero. If the initializer is zero, we don't
4273 need to initialize it again, just mark it as ever
4274 used unconditionally, i.e. cond = true. */
4276 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4277 && initializer_zerop (omp_reduction_init (c
,
4280 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4282 gimple_seq_add_stmt (ilist
, g
);
4285 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4289 if (!is_parallel_ctx (ctx
))
4291 tree condv
= create_tmp_var (boolean_type_node
);
4292 g
= gimple_build_assign (condv
,
4293 build_simple_mem_ref (cond
));
4294 gimple_seq_add_stmt (ilist
, g
);
4295 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4296 g
= gimple_build_cond (NE_EXPR
, condv
,
4297 boolean_false_node
, end
, lab1
);
4298 gimple_seq_add_stmt (ilist
, g
);
4299 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4301 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4303 gimple_seq_add_stmt (ilist
, g
);
4306 tree y1
= create_tmp_var (ptype
);
4307 gimplify_assign (y1
, y
, ilist
);
4308 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4309 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4310 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4311 if (task_reduction_needs_orig_p
)
4313 y3
= create_tmp_var (ptype
);
4315 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4316 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4317 size_int (task_reduction_cnt_full
4318 + task_reduction_cntorig
- 1),
4319 NULL_TREE
, NULL_TREE
);
4322 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4323 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4326 gimplify_assign (y3
, ref
, ilist
);
4328 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4332 y2
= create_tmp_var (ptype
);
4333 gimplify_assign (y2
, y
, ilist
);
4335 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4337 tree ref
= build_outer_var_ref (var
, ctx
);
4338 /* For ref build_outer_var_ref already performs this. */
4339 if (TREE_CODE (d
) == INDIRECT_REF
)
4340 gcc_assert (omp_is_reference (var
));
4341 else if (TREE_CODE (d
) == ADDR_EXPR
)
4342 ref
= build_fold_addr_expr (ref
);
4343 else if (omp_is_reference (var
))
4344 ref
= build_fold_addr_expr (ref
);
4345 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4346 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4347 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4349 y3
= create_tmp_var (ptype
);
4350 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4354 y4
= create_tmp_var (ptype
);
4355 gimplify_assign (y4
, ref
, dlist
);
4359 tree i
= create_tmp_var (TREE_TYPE (v
));
4360 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4361 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4362 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4365 i2
= create_tmp_var (TREE_TYPE (v
));
4366 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4367 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4368 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4369 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4371 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4373 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4374 tree decl_placeholder
4375 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4376 SET_DECL_VALUE_EXPR (decl_placeholder
,
4377 build_simple_mem_ref (y1
));
4378 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4379 SET_DECL_VALUE_EXPR (placeholder
,
4380 y3
? build_simple_mem_ref (y3
)
4382 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4383 x
= lang_hooks
.decls
.omp_clause_default_ctor
4384 (c
, build_simple_mem_ref (y1
),
4385 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4387 gimplify_and_add (x
, ilist
);
4388 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4390 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4391 lower_omp (&tseq
, ctx
);
4392 gimple_seq_add_seq (ilist
, tseq
);
4394 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4397 SET_DECL_VALUE_EXPR (decl_placeholder
,
4398 build_simple_mem_ref (y2
));
4399 SET_DECL_VALUE_EXPR (placeholder
,
4400 build_simple_mem_ref (y4
));
4401 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4402 lower_omp (&tseq
, ctx
);
4403 gimple_seq_add_seq (dlist
, tseq
);
4404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4406 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4407 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4410 x
= lang_hooks
.decls
.omp_clause_dtor
4411 (c
, build_simple_mem_ref (y2
));
4414 gimple_seq tseq
= NULL
;
4416 gimplify_stmt (&dtor
, &tseq
);
4417 gimple_seq_add_seq (dlist
, tseq
);
4423 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4424 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4426 /* reduction(-:var) sums up the partial results, so it
4427 acts identically to reduction(+:var). */
4428 if (code
== MINUS_EXPR
)
4431 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4434 x
= build2 (code
, TREE_TYPE (type
),
4435 build_simple_mem_ref (y4
),
4436 build_simple_mem_ref (y2
));
4437 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4441 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4442 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4443 gimple_seq_add_stmt (ilist
, g
);
4446 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4447 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4448 gimple_seq_add_stmt (ilist
, g
);
4450 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4451 build_int_cst (TREE_TYPE (i
), 1));
4452 gimple_seq_add_stmt (ilist
, g
);
4453 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4454 gimple_seq_add_stmt (ilist
, g
);
4455 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4458 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4459 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4460 gimple_seq_add_stmt (dlist
, g
);
4463 g
= gimple_build_assign
4464 (y4
, POINTER_PLUS_EXPR
, y4
,
4465 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4466 gimple_seq_add_stmt (dlist
, g
);
4468 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4469 build_int_cst (TREE_TYPE (i2
), 1));
4470 gimple_seq_add_stmt (dlist
, g
);
4471 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4472 gimple_seq_add_stmt (dlist
, g
);
4473 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4483 bool by_ref
= use_pointer_for_field (var
, ctx
);
4484 x
= build_receiver_ref (var
, by_ref
, ctx
);
4486 if (!omp_is_reference (var
))
4487 x
= build_fold_addr_expr (x
);
4488 x
= fold_convert (ptr_type_node
, x
);
4489 unsigned cnt
= task_reduction_cnt
- 1;
4490 if (!task_reduction_needs_orig_p
)
4491 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4493 cnt
= task_reduction_cntorig
- 1;
4494 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4495 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4496 gimplify_assign (r
, x
, ilist
);
4501 tree type
= TREE_TYPE (new_var
);
4502 if (!omp_is_reference (var
))
4503 type
= build_pointer_type (type
);
4504 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4506 unsigned cnt
= task_reduction_cnt
- 1;
4507 if (!task_reduction_needs_orig_p
)
4508 cnt
+= (task_reduction_cntorig_full
4509 - task_reduction_cntorig
);
4511 cnt
= task_reduction_cntorig
- 1;
4512 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4513 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4517 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4519 if (ctx
->task_reductions
[1 + idx
])
4520 off
= fold_convert (sizetype
,
4521 ctx
->task_reductions
[1 + idx
]);
4523 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4525 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4528 x
= fold_convert (type
, x
);
4530 if (omp_is_reference (var
))
4532 gimplify_assign (new_var
, x
, ilist
);
4534 new_var
= build_simple_mem_ref (new_var
);
4538 t
= create_tmp_var (type
);
4539 gimplify_assign (t
, x
, ilist
);
4540 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4541 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4543 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4544 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4545 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4546 cond
= create_tmp_var (TREE_TYPE (t
));
4547 gimplify_assign (cond
, t
, ilist
);
4549 else if (is_variable_sized (var
))
4551 /* For variable sized types, we need to allocate the
4552 actual storage here. Call alloca and store the
4553 result in the pointer decl that we created elsewhere. */
4557 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4562 ptr
= DECL_VALUE_EXPR (new_var
);
4563 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4564 ptr
= TREE_OPERAND (ptr
, 0);
4565 gcc_assert (DECL_P (ptr
));
4566 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4568 /* void *tmp = __builtin_alloca */
4569 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4570 stmt
= gimple_build_call (atmp
, 2, x
,
4571 size_int (DECL_ALIGN (var
)));
4572 tmp
= create_tmp_var_raw (ptr_type_node
);
4573 gimple_add_tmp_var (tmp
);
4574 gimple_call_set_lhs (stmt
, tmp
);
4576 gimple_seq_add_stmt (ilist
, stmt
);
4578 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4579 gimplify_assign (ptr
, x
, ilist
);
4582 else if (omp_is_reference (var
)
4583 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
4584 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
4586 /* For references that are being privatized for Fortran,
4587 allocate new backing storage for the new pointer
4588 variable. This allows us to avoid changing all the
4589 code that expects a pointer to something that expects
4590 a direct variable. */
4594 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4595 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4597 x
= build_receiver_ref (var
, false, ctx
);
4598 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4600 else if (TREE_CONSTANT (x
))
4602 /* For reduction in SIMD loop, defer adding the
4603 initialization of the reference, because if we decide
4604 to use SIMD array for it, the initilization could cause
4606 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4610 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4612 gimple_add_tmp_var (x
);
4613 TREE_ADDRESSABLE (x
) = 1;
4614 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4620 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4621 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4622 tree al
= size_int (TYPE_ALIGN (rtype
));
4623 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4628 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4629 gimplify_assign (new_var
, x
, ilist
);
4632 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4634 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4635 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4636 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4644 switch (OMP_CLAUSE_CODE (c
))
4646 case OMP_CLAUSE_SHARED
:
4647 /* Ignore shared directives in teams construct inside
4648 target construct. */
4649 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4650 && !is_host_teams_ctx (ctx
))
4652 /* Shared global vars are just accessed directly. */
4653 if (is_global_var (new_var
))
4655 /* For taskloop firstprivate/lastprivate, represented
4656 as firstprivate and shared clause on the task, new_var
4657 is the firstprivate var. */
4658 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4660 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4661 needs to be delayed until after fixup_child_record_type so
4662 that we get the correct type during the dereference. */
4663 by_ref
= use_pointer_for_field (var
, ctx
);
4664 x
= build_receiver_ref (var
, by_ref
, ctx
);
4665 SET_DECL_VALUE_EXPR (new_var
, x
);
4666 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4668 /* ??? If VAR is not passed by reference, and the variable
4669 hasn't been initialized yet, then we'll get a warning for
4670 the store into the omp_data_s structure. Ideally, we'd be
4671 able to notice this and not store anything at all, but
4672 we're generating code too early. Suppress the warning. */
4674 TREE_NO_WARNING (var
) = 1;
4677 case OMP_CLAUSE_LASTPRIVATE
:
4678 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4682 case OMP_CLAUSE_PRIVATE
:
4683 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4684 x
= build_outer_var_ref (var
, ctx
);
4685 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4687 if (is_task_ctx (ctx
))
4688 x
= build_receiver_ref (var
, false, ctx
);
4690 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4696 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4697 (c
, unshare_expr (new_var
), x
);
4700 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4701 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4702 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4703 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4707 x
= lang_hooks
.decls
.omp_clause_default_ctor
4708 (c
, unshare_expr (ivar
), x
);
4710 gimplify_and_add (x
, &llist
[0]);
4713 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4716 gimple_seq tseq
= NULL
;
4719 gimplify_stmt (&dtor
, &tseq
);
4720 gimple_seq_add_seq (&llist
[1], tseq
);
4727 gimplify_and_add (nx
, ilist
);
4731 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4734 gimple_seq tseq
= NULL
;
4737 gimplify_stmt (&dtor
, &tseq
);
4738 gimple_seq_add_seq (dlist
, tseq
);
4742 case OMP_CLAUSE_LINEAR
:
4743 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4744 goto do_firstprivate
;
4745 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4748 x
= build_outer_var_ref (var
, ctx
);
4751 case OMP_CLAUSE_FIRSTPRIVATE
:
4752 if (is_task_ctx (ctx
))
4754 if ((omp_is_reference (var
)
4755 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
4756 || is_variable_sized (var
))
4758 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4760 || use_pointer_for_field (var
, NULL
))
4762 x
= build_receiver_ref (var
, false, ctx
);
4763 SET_DECL_VALUE_EXPR (new_var
, x
);
4764 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4768 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
4769 && omp_is_reference (var
))
4771 x
= build_outer_var_ref (var
, ctx
);
4772 gcc_assert (TREE_CODE (x
) == MEM_REF
4773 && integer_zerop (TREE_OPERAND (x
, 1)));
4774 x
= TREE_OPERAND (x
, 0);
4775 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4776 (c
, unshare_expr (new_var
), x
);
4777 gimplify_and_add (x
, ilist
);
4781 x
= build_outer_var_ref (var
, ctx
);
4784 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4785 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4787 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4788 tree stept
= TREE_TYPE (t
);
4789 tree ct
= omp_find_clause (clauses
,
4790 OMP_CLAUSE__LOOPTEMP_
);
4792 tree l
= OMP_CLAUSE_DECL (ct
);
4793 tree n1
= fd
->loop
.n1
;
4794 tree step
= fd
->loop
.step
;
4795 tree itype
= TREE_TYPE (l
);
4796 if (POINTER_TYPE_P (itype
))
4797 itype
= signed_type_for (itype
);
4798 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4799 if (TYPE_UNSIGNED (itype
)
4800 && fd
->loop
.cond_code
== GT_EXPR
)
4801 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4802 fold_build1 (NEGATE_EXPR
, itype
, l
),
4803 fold_build1 (NEGATE_EXPR
,
4806 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4807 t
= fold_build2 (MULT_EXPR
, stept
,
4808 fold_convert (stept
, l
), t
);
4810 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4812 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4814 gimplify_and_add (x
, ilist
);
4818 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4819 x
= fold_build2 (POINTER_PLUS_EXPR
,
4820 TREE_TYPE (x
), x
, t
);
4822 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4825 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4826 || TREE_ADDRESSABLE (new_var
))
4827 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4830 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4832 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4833 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4834 gimplify_and_add (x
, ilist
);
4835 gimple_stmt_iterator gsi
4836 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4838 = gimple_build_assign (unshare_expr (lvar
), iv
);
4839 gsi_insert_before_without_update (&gsi
, g
,
4841 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4842 enum tree_code code
= PLUS_EXPR
;
4843 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4844 code
= POINTER_PLUS_EXPR
;
4845 g
= gimple_build_assign (iv
, code
, iv
, t
);
4846 gsi_insert_before_without_update (&gsi
, g
,
4850 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4851 (c
, unshare_expr (ivar
), x
);
4852 gimplify_and_add (x
, &llist
[0]);
4853 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4856 gimple_seq tseq
= NULL
;
4859 gimplify_stmt (&dtor
, &tseq
);
4860 gimple_seq_add_seq (&llist
[1], tseq
);
4865 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4866 (c
, unshare_expr (new_var
), x
);
4867 gimplify_and_add (x
, ilist
);
4870 case OMP_CLAUSE__LOOPTEMP_
:
4871 case OMP_CLAUSE__REDUCTEMP_
:
4872 gcc_assert (is_taskreg_ctx (ctx
));
4873 x
= build_outer_var_ref (var
, ctx
);
4874 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4875 gimplify_and_add (x
, ilist
);
4878 case OMP_CLAUSE_COPYIN
:
4879 by_ref
= use_pointer_for_field (var
, NULL
);
4880 x
= build_receiver_ref (var
, by_ref
, ctx
);
4881 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4882 append_to_statement_list (x
, ©in_seq
);
4883 copyin_by_ref
|= by_ref
;
4886 case OMP_CLAUSE_REDUCTION
:
4887 case OMP_CLAUSE_IN_REDUCTION
:
4888 /* OpenACC reductions are initialized using the
4889 GOACC_REDUCTION internal function. */
4890 if (is_gimple_omp_oacc (ctx
->stmt
))
4892 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4894 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4896 tree ptype
= TREE_TYPE (placeholder
);
4899 x
= error_mark_node
;
4900 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
4901 && !task_reduction_needs_orig_p
)
4903 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4905 tree pptype
= build_pointer_type (ptype
);
4906 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4907 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4908 size_int (task_reduction_cnt_full
4909 + task_reduction_cntorig
- 1),
4910 NULL_TREE
, NULL_TREE
);
4914 = *ctx
->task_reduction_map
->get (c
);
4915 x
= task_reduction_read (ilist
, tskred_temp
,
4916 pptype
, 7 + 3 * idx
);
4918 x
= fold_convert (pptype
, x
);
4919 x
= build_simple_mem_ref (x
);
4924 x
= build_outer_var_ref (var
, ctx
);
4926 if (omp_is_reference (var
)
4927 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
4928 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4930 SET_DECL_VALUE_EXPR (placeholder
, x
);
4931 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4932 tree new_vard
= new_var
;
4933 if (omp_is_reference (var
))
4935 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4936 new_vard
= TREE_OPERAND (new_var
, 0);
4937 gcc_assert (DECL_P (new_vard
));
4940 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4943 if (new_vard
== new_var
)
4945 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4946 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4950 SET_DECL_VALUE_EXPR (new_vard
,
4951 build_fold_addr_expr (ivar
));
4952 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4954 x
= lang_hooks
.decls
.omp_clause_default_ctor
4955 (c
, unshare_expr (ivar
),
4956 build_outer_var_ref (var
, ctx
));
4958 gimplify_and_add (x
, &llist
[0]);
4959 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4961 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4962 lower_omp (&tseq
, ctx
);
4963 gimple_seq_add_seq (&llist
[0], tseq
);
4965 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4966 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4967 lower_omp (&tseq
, ctx
);
4968 gimple_seq_add_seq (&llist
[1], tseq
);
4969 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4970 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4971 if (new_vard
== new_var
)
4972 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4974 SET_DECL_VALUE_EXPR (new_vard
,
4975 build_fold_addr_expr (lvar
));
4976 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4981 gimplify_stmt (&dtor
, &tseq
);
4982 gimple_seq_add_seq (&llist
[1], tseq
);
4986 /* If this is a reference to constant size reduction var
4987 with placeholder, we haven't emitted the initializer
4988 for it because it is undesirable if SIMD arrays are used.
4989 But if they aren't used, we need to emit the deferred
4990 initialization now. */
4991 else if (omp_is_reference (var
) && is_simd
)
4992 handle_simd_reference (clause_loc
, new_vard
, ilist
);
4994 tree lab2
= NULL_TREE
;
4998 if (!is_parallel_ctx (ctx
))
5000 tree condv
= create_tmp_var (boolean_type_node
);
5001 tree m
= build_simple_mem_ref (cond
);
5002 g
= gimple_build_assign (condv
, m
);
5003 gimple_seq_add_stmt (ilist
, g
);
5005 = create_artificial_label (UNKNOWN_LOCATION
);
5006 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5007 g
= gimple_build_cond (NE_EXPR
, condv
,
5010 gimple_seq_add_stmt (ilist
, g
);
5011 gimple_seq_add_stmt (ilist
,
5012 gimple_build_label (lab1
));
5014 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5016 gimple_seq_add_stmt (ilist
, g
);
5018 x
= lang_hooks
.decls
.omp_clause_default_ctor
5019 (c
, unshare_expr (new_var
),
5021 : build_outer_var_ref (var
, ctx
));
5023 gimplify_and_add (x
, ilist
);
5024 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5026 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5027 lower_omp (&tseq
, ctx
);
5028 gimple_seq_add_seq (ilist
, tseq
);
5030 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5033 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5034 lower_omp (&tseq
, ctx
);
5035 gimple_seq_add_seq (dlist
, tseq
);
5036 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5038 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5042 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5049 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5050 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5051 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5056 tree lab2
= NULL_TREE
;
5057 /* GOMP_taskgroup_reduction_register memsets the whole
5058 array to zero. If the initializer is zero, we don't
5059 need to initialize it again, just mark it as ever
5060 used unconditionally, i.e. cond = true. */
5061 if (initializer_zerop (x
))
5063 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5065 gimple_seq_add_stmt (ilist
, g
);
5070 if (!cond) { cond = true; new_var = x; } */
5071 if (!is_parallel_ctx (ctx
))
5073 tree condv
= create_tmp_var (boolean_type_node
);
5074 tree m
= build_simple_mem_ref (cond
);
5075 g
= gimple_build_assign (condv
, m
);
5076 gimple_seq_add_stmt (ilist
, g
);
5078 = create_artificial_label (UNKNOWN_LOCATION
);
5079 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5080 g
= gimple_build_cond (NE_EXPR
, condv
,
5083 gimple_seq_add_stmt (ilist
, g
);
5084 gimple_seq_add_stmt (ilist
,
5085 gimple_build_label (lab1
));
5087 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5089 gimple_seq_add_stmt (ilist
, g
);
5090 gimplify_assign (new_var
, x
, ilist
);
5092 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5096 /* reduction(-:var) sums up the partial results, so it
5097 acts identically to reduction(+:var). */
5098 if (code
== MINUS_EXPR
)
5101 tree new_vard
= new_var
;
5102 if (is_simd
&& omp_is_reference (var
))
5104 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5105 new_vard
= TREE_OPERAND (new_var
, 0);
5106 gcc_assert (DECL_P (new_vard
));
5109 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5112 tree ref
= build_outer_var_ref (var
, ctx
);
5114 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5119 simt_lane
= create_tmp_var (unsigned_type_node
);
5120 x
= build_call_expr_internal_loc
5121 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5122 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5123 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5124 gimplify_assign (ivar
, x
, &llist
[2]);
5126 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5127 ref
= build_outer_var_ref (var
, ctx
);
5128 gimplify_assign (ref
, x
, &llist
[1]);
5130 if (new_vard
!= new_var
)
5132 SET_DECL_VALUE_EXPR (new_vard
,
5133 build_fold_addr_expr (lvar
));
5134 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5139 if (omp_is_reference (var
) && is_simd
)
5140 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5141 gimplify_assign (new_var
, x
, ilist
);
5144 tree ref
= build_outer_var_ref (var
, ctx
);
5146 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5147 ref
= build_outer_var_ref (var
, ctx
);
5148 gimplify_assign (ref
, x
, dlist
);
5161 tree clobber
= build_constructor (TREE_TYPE (tskred_avar
), NULL
);
5162 TREE_THIS_VOLATILE (clobber
) = 1;
5163 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5166 if (known_eq (sctx
.max_vf
, 1U))
5167 sctx
.is_simt
= false;
5169 if (sctx
.lane
|| sctx
.is_simt
)
5171 uid
= create_tmp_var (ptr_type_node
, "simduid");
5172 /* Don't want uninit warnings on simduid, it is always uninitialized,
5173 but we use it not for the value, but for the DECL_UID only. */
5174 TREE_NO_WARNING (uid
) = 1;
5175 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
5176 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
5177 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5178 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5180 /* Emit calls denoting privatized variables and initializing a pointer to
5181 structure that holds private variables as fields after ompdevlow pass. */
5184 sctx
.simt_eargs
[0] = uid
;
5186 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
5187 gimple_call_set_lhs (g
, uid
);
5188 gimple_seq_add_stmt (ilist
, g
);
5189 sctx
.simt_eargs
.release ();
5191 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
5192 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
5193 gimple_call_set_lhs (g
, simtrec
);
5194 gimple_seq_add_stmt (ilist
, g
);
5199 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
5200 gimple_call_set_lhs (g
, sctx
.lane
);
5201 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5202 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
5203 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
5204 build_int_cst (unsigned_type_node
, 0));
5205 gimple_seq_add_stmt (ilist
, g
);
5206 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5209 tree simt_vf
= create_tmp_var (unsigned_type_node
);
5210 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
5211 gimple_call_set_lhs (g
, simt_vf
);
5212 gimple_seq_add_stmt (dlist
, g
);
5214 tree t
= build_int_cst (unsigned_type_node
, 1);
5215 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
5216 gimple_seq_add_stmt (dlist
, g
);
5218 t
= build_int_cst (unsigned_type_node
, 0);
5219 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5220 gimple_seq_add_stmt (dlist
, g
);
5222 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5223 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5224 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5225 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
5226 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
5228 gimple_seq_add_seq (dlist
, llist
[2]);
5230 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
5231 gimple_seq_add_stmt (dlist
, g
);
5233 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
5234 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
5235 gimple_seq_add_stmt (dlist
, g
);
5237 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
5239 for (int i
= 0; i
< 2; i
++)
5242 tree vf
= create_tmp_var (unsigned_type_node
);
5243 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
5244 gimple_call_set_lhs (g
, vf
);
5245 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
5246 gimple_seq_add_stmt (seq
, g
);
5247 tree t
= build_int_cst (unsigned_type_node
, 0);
5248 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5249 gimple_seq_add_stmt (seq
, g
);
5250 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5251 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5252 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5253 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
5254 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
5255 gimple_seq_add_seq (seq
, llist
[i
]);
5256 t
= build_int_cst (unsigned_type_node
, 1);
5257 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
5258 gimple_seq_add_stmt (seq
, g
);
5259 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
5260 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
5261 gimple_seq_add_stmt (seq
, g
);
5262 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
5267 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
5269 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
5270 gimple_seq_add_stmt (dlist
, g
);
5273 /* The copyin sequence is not to be executed by the main thread, since
5274 that would result in self-copies. Perhaps not visible to scalars,
5275 but it certainly is to C++ operator=. */
5278 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
5280 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
5281 build_int_cst (TREE_TYPE (x
), 0));
5282 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
5283 gimplify_and_add (x
, ilist
);
5286 /* If any copyin variable is passed by reference, we must ensure the
5287 master thread doesn't modify it before it is copied over in all
5288 threads. Similarly for variables in both firstprivate and
5289 lastprivate clauses we need to ensure the lastprivate copying
5290 happens after firstprivate copying in all threads. And similarly
5291 for UDRs if initializer expression refers to omp_orig. */
5292 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
5294 /* Don't add any barrier for #pragma omp simd or
5295 #pragma omp distribute. */
5296 if (!is_task_ctx (ctx
)
5297 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
5298 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
5299 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
5302 /* If max_vf is non-zero, then we can use only a vectorization factor
5303 up to the max_vf we chose. So stick it into the safelen clause. */
5304 if (maybe_ne (sctx
.max_vf
, 0U))
5306 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
5307 OMP_CLAUSE_SAFELEN
);
5308 poly_uint64 safe_len
;
5310 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
5311 && maybe_gt (safe_len
, sctx
.max_vf
)))
5313 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
5314 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
5316 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5317 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5323 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5324 both parallel and workshare constructs. PREDICATE may be NULL if it's
5328 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
5331 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
5332 bool par_clauses
= false;
5333 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
5335 /* Early exit if there are no lastprivate or linear clauses. */
5336 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
5337 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
5338 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
5339 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
5341 if (clauses
== NULL
)
5343 /* If this was a workshare clause, see if it had been combined
5344 with its parallel. In that case, look for the clauses on the
5345 parallel statement itself. */
5346 if (is_parallel_ctx (ctx
))
5350 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5353 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5354 OMP_CLAUSE_LASTPRIVATE
);
5355 if (clauses
== NULL
)
5360 bool maybe_simt
= false;
5361 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5362 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5364 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
5365 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
5367 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
5373 tree label_true
, arm1
, arm2
;
5374 enum tree_code pred_code
= TREE_CODE (predicate
);
5376 label
= create_artificial_label (UNKNOWN_LOCATION
);
5377 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
5378 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
5380 arm1
= TREE_OPERAND (predicate
, 0);
5381 arm2
= TREE_OPERAND (predicate
, 1);
5382 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5383 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5388 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5389 arm2
= boolean_false_node
;
5390 pred_code
= NE_EXPR
;
5394 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
5395 c
= fold_convert (integer_type_node
, c
);
5396 simtcond
= create_tmp_var (integer_type_node
);
5397 gimplify_assign (simtcond
, c
, stmt_list
);
5398 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
5400 c
= create_tmp_var (integer_type_node
);
5401 gimple_call_set_lhs (g
, c
);
5402 gimple_seq_add_stmt (stmt_list
, g
);
5403 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
5407 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
5408 gimple_seq_add_stmt (stmt_list
, stmt
);
5409 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
5412 for (c
= clauses
; c
;)
5415 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5417 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5418 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5419 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
5421 var
= OMP_CLAUSE_DECL (c
);
5422 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5423 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
5424 && is_taskloop_ctx (ctx
))
5426 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
5427 new_var
= lookup_decl (var
, ctx
->outer
);
5431 new_var
= lookup_decl (var
, ctx
);
5432 /* Avoid uninitialized warnings for lastprivate and
5433 for linear iterators. */
5435 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5436 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
5437 TREE_NO_WARNING (new_var
) = 1;
5440 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
5442 tree val
= DECL_VALUE_EXPR (new_var
);
5443 if (TREE_CODE (val
) == ARRAY_REF
5444 && VAR_P (TREE_OPERAND (val
, 0))
5445 && lookup_attribute ("omp simd array",
5446 DECL_ATTRIBUTES (TREE_OPERAND (val
,
5449 if (lastlane
== NULL
)
5451 lastlane
= create_tmp_var (unsigned_type_node
);
5453 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
5455 TREE_OPERAND (val
, 1));
5456 gimple_call_set_lhs (g
, lastlane
);
5457 gimple_seq_add_stmt (stmt_list
, g
);
5459 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
5460 TREE_OPERAND (val
, 0), lastlane
,
5461 NULL_TREE
, NULL_TREE
);
5464 else if (maybe_simt
)
5466 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
5467 ? DECL_VALUE_EXPR (new_var
)
5469 if (simtlast
== NULL
)
5471 simtlast
= create_tmp_var (unsigned_type_node
);
5472 gcall
*g
= gimple_build_call_internal
5473 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
5474 gimple_call_set_lhs (g
, simtlast
);
5475 gimple_seq_add_stmt (stmt_list
, g
);
5477 x
= build_call_expr_internal_loc
5478 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
5479 TREE_TYPE (val
), 2, val
, simtlast
);
5480 new_var
= unshare_expr (new_var
);
5481 gimplify_assign (new_var
, x
, stmt_list
);
5482 new_var
= unshare_expr (new_var
);
5485 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5486 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
5488 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
5489 gimple_seq_add_seq (stmt_list
,
5490 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
5491 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
5493 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5494 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
5496 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
5497 gimple_seq_add_seq (stmt_list
,
5498 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
5499 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
5503 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5504 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
5506 gcc_checking_assert (is_taskloop_ctx (ctx
));
5507 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
5509 if (is_global_var (ovar
))
5513 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
5514 if (omp_is_reference (var
))
5515 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5516 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
5517 gimplify_and_add (x
, stmt_list
);
5519 c
= OMP_CLAUSE_CHAIN (c
);
5520 if (c
== NULL
&& !par_clauses
)
5522 /* If this was a workshare clause, see if it had been combined
5523 with its parallel. In that case, continue looking for the
5524 clauses also on the parallel statement itself. */
5525 if (is_parallel_ctx (ctx
))
5529 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5532 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5533 OMP_CLAUSE_LASTPRIVATE
);
5539 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
5542 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5543 (which might be a placeholder). INNER is true if this is an inner
5544 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5545 join markers. Generate the before-loop forking sequence in
5546 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5547 general form of these sequences is
5549 GOACC_REDUCTION_SETUP
5551 GOACC_REDUCTION_INIT
5553 GOACC_REDUCTION_FINI
5555 GOACC_REDUCTION_TEARDOWN. */
5558 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
5559 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
5560 gimple_seq
*join_seq
, omp_context
*ctx
)
5562 gimple_seq before_fork
= NULL
;
5563 gimple_seq after_fork
= NULL
;
5564 gimple_seq before_join
= NULL
;
5565 gimple_seq after_join
= NULL
;
5566 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
5567 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
5568 unsigned offset
= 0;
5570 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5571 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5573 tree orig
= OMP_CLAUSE_DECL (c
);
5574 tree var
= maybe_lookup_decl (orig
, ctx
);
5575 tree ref_to_res
= NULL_TREE
;
5576 tree incoming
, outgoing
, v1
, v2
, v3
;
5577 bool is_private
= false;
5579 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
5580 if (rcode
== MINUS_EXPR
)
5582 else if (rcode
== TRUTH_ANDIF_EXPR
)
5583 rcode
= BIT_AND_EXPR
;
5584 else if (rcode
== TRUTH_ORIF_EXPR
)
5585 rcode
= BIT_IOR_EXPR
;
5586 tree op
= build_int_cst (unsigned_type_node
, rcode
);
5591 incoming
= outgoing
= var
;
5595 /* See if an outer construct also reduces this variable. */
5596 omp_context
*outer
= ctx
;
5598 while (omp_context
*probe
= outer
->outer
)
5600 enum gimple_code type
= gimple_code (probe
->stmt
);
5605 case GIMPLE_OMP_FOR
:
5606 cls
= gimple_omp_for_clauses (probe
->stmt
);
5609 case GIMPLE_OMP_TARGET
:
5610 if (gimple_omp_target_kind (probe
->stmt
)
5611 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
5614 cls
= gimple_omp_target_clauses (probe
->stmt
);
5622 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
5623 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
5624 && orig
== OMP_CLAUSE_DECL (cls
))
5626 incoming
= outgoing
= lookup_decl (orig
, probe
);
5627 goto has_outer_reduction
;
5629 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
5630 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
5631 && orig
== OMP_CLAUSE_DECL (cls
))
5639 /* This is the outermost construct with this reduction,
5640 see if there's a mapping for it. */
5641 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
5642 && maybe_lookup_field (orig
, outer
) && !is_private
)
5644 ref_to_res
= build_receiver_ref (orig
, false, outer
);
5645 if (omp_is_reference (orig
))
5646 ref_to_res
= build_simple_mem_ref (ref_to_res
);
5648 tree type
= TREE_TYPE (var
);
5649 if (POINTER_TYPE_P (type
))
5650 type
= TREE_TYPE (type
);
5653 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
5657 /* Try to look at enclosing contexts for reduction var,
5658 use original if no mapping found. */
5660 omp_context
*c
= ctx
->outer
;
5663 t
= maybe_lookup_decl (orig
, c
);
5666 incoming
= outgoing
= (t
? t
: orig
);
5669 has_outer_reduction
:;
5673 ref_to_res
= integer_zero_node
;
5675 if (omp_is_reference (orig
))
5677 tree type
= TREE_TYPE (var
);
5678 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
5682 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
5683 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
5686 v1
= create_tmp_var (type
, id
);
5687 v2
= create_tmp_var (type
, id
);
5688 v3
= create_tmp_var (type
, id
);
5690 gimplify_assign (v1
, var
, fork_seq
);
5691 gimplify_assign (v2
, var
, fork_seq
);
5692 gimplify_assign (v3
, var
, fork_seq
);
5694 var
= build_simple_mem_ref (var
);
5695 v1
= build_simple_mem_ref (v1
);
5696 v2
= build_simple_mem_ref (v2
);
5697 v3
= build_simple_mem_ref (v3
);
5698 outgoing
= build_simple_mem_ref (outgoing
);
5700 if (!TREE_CONSTANT (incoming
))
5701 incoming
= build_simple_mem_ref (incoming
);
5706 /* Determine position in reduction buffer, which may be used
5707 by target. The parser has ensured that this is not a
5708 variable-sized type. */
5709 fixed_size_mode mode
5710 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
5711 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
5712 offset
= (offset
+ align
- 1) & ~(align
- 1);
5713 tree off
= build_int_cst (sizetype
, offset
);
5714 offset
+= GET_MODE_SIZE (mode
);
5718 init_code
= build_int_cst (integer_type_node
,
5719 IFN_GOACC_REDUCTION_INIT
);
5720 fini_code
= build_int_cst (integer_type_node
,
5721 IFN_GOACC_REDUCTION_FINI
);
5722 setup_code
= build_int_cst (integer_type_node
,
5723 IFN_GOACC_REDUCTION_SETUP
);
5724 teardown_code
= build_int_cst (integer_type_node
,
5725 IFN_GOACC_REDUCTION_TEARDOWN
);
5729 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5730 TREE_TYPE (var
), 6, setup_code
,
5731 unshare_expr (ref_to_res
),
5732 incoming
, level
, op
, off
);
5734 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5735 TREE_TYPE (var
), 6, init_code
,
5736 unshare_expr (ref_to_res
),
5737 v1
, level
, op
, off
);
5739 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5740 TREE_TYPE (var
), 6, fini_code
,
5741 unshare_expr (ref_to_res
),
5742 v2
, level
, op
, off
);
5744 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5745 TREE_TYPE (var
), 6, teardown_code
,
5746 ref_to_res
, v3
, level
, op
, off
);
5748 gimplify_assign (v1
, setup_call
, &before_fork
);
5749 gimplify_assign (v2
, init_call
, &after_fork
);
5750 gimplify_assign (v3
, fini_call
, &before_join
);
5751 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5754 /* Now stitch things together. */
5755 gimple_seq_add_seq (fork_seq
, before_fork
);
5757 gimple_seq_add_stmt (fork_seq
, fork
);
5758 gimple_seq_add_seq (fork_seq
, after_fork
);
5760 gimple_seq_add_seq (join_seq
, before_join
);
5762 gimple_seq_add_stmt (join_seq
, join
);
5763 gimple_seq_add_seq (join_seq
, after_join
);
5766 /* Generate code to implement the REDUCTION clauses. */
5769 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5771 gimple_seq sub_seq
= NULL
;
5776 /* OpenACC loop reductions are handled elsewhere. */
5777 if (is_gimple_omp_oacc (ctx
->stmt
))
5780 /* SIMD reductions are handled in lower_rec_input_clauses. */
5781 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5782 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5785 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5786 update in that case, otherwise use a lock. */
5787 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5788 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5789 && !OMP_CLAUSE_REDUCTION_TASK (c
))
5791 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5792 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5794 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5804 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5806 tree var
, ref
, new_var
, orig_var
;
5807 enum tree_code code
;
5808 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5810 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5811 || OMP_CLAUSE_REDUCTION_TASK (c
))
5814 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
5815 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5816 if (TREE_CODE (var
) == MEM_REF
)
5818 var
= TREE_OPERAND (var
, 0);
5819 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5820 var
= TREE_OPERAND (var
, 0);
5821 if (TREE_CODE (var
) == ADDR_EXPR
)
5822 var
= TREE_OPERAND (var
, 0);
5825 /* If this is a pointer or referenced based array
5826 section, the var could be private in the outer
5827 context e.g. on orphaned loop construct. Pretend this
5828 is private variable's outer reference. */
5829 ccode
= OMP_CLAUSE_PRIVATE
;
5830 if (TREE_CODE (var
) == INDIRECT_REF
)
5831 var
= TREE_OPERAND (var
, 0);
5834 if (is_variable_sized (var
))
5836 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5837 var
= DECL_VALUE_EXPR (var
);
5838 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5839 var
= TREE_OPERAND (var
, 0);
5840 gcc_assert (DECL_P (var
));
5843 new_var
= lookup_decl (var
, ctx
);
5844 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5845 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5846 ref
= build_outer_var_ref (var
, ctx
, ccode
);
5847 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5849 /* reduction(-:var) sums up the partial results, so it acts
5850 identically to reduction(+:var). */
5851 if (code
== MINUS_EXPR
)
5856 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5858 addr
= save_expr (addr
);
5859 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5860 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5861 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5862 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
5863 gimplify_and_add (x
, stmt_seqp
);
5866 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5868 tree d
= OMP_CLAUSE_DECL (c
);
5869 tree type
= TREE_TYPE (d
);
5870 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5871 tree i
= create_tmp_var (TREE_TYPE (v
));
5872 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5873 tree bias
= TREE_OPERAND (d
, 1);
5874 d
= TREE_OPERAND (d
, 0);
5875 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5877 tree b
= TREE_OPERAND (d
, 1);
5878 b
= maybe_lookup_decl (b
, ctx
);
5881 b
= TREE_OPERAND (d
, 1);
5882 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5884 if (integer_zerop (bias
))
5888 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5889 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5890 TREE_TYPE (b
), b
, bias
);
5892 d
= TREE_OPERAND (d
, 0);
5894 /* For ref build_outer_var_ref already performs this, so
5895 only new_var needs a dereference. */
5896 if (TREE_CODE (d
) == INDIRECT_REF
)
5898 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5899 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5901 else if (TREE_CODE (d
) == ADDR_EXPR
)
5903 if (orig_var
== var
)
5905 new_var
= build_fold_addr_expr (new_var
);
5906 ref
= build_fold_addr_expr (ref
);
5911 gcc_assert (orig_var
== var
);
5912 if (omp_is_reference (var
))
5913 ref
= build_fold_addr_expr (ref
);
5917 tree t
= maybe_lookup_decl (v
, ctx
);
5921 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5922 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5924 if (!integer_zerop (bias
))
5926 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5927 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5928 TREE_TYPE (new_var
), new_var
,
5929 unshare_expr (bias
));
5930 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5931 TREE_TYPE (ref
), ref
, bias
);
5933 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5934 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5935 tree m
= create_tmp_var (ptype
);
5936 gimplify_assign (m
, new_var
, stmt_seqp
);
5938 m
= create_tmp_var (ptype
);
5939 gimplify_assign (m
, ref
, stmt_seqp
);
5941 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5942 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5943 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5944 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5945 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5946 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5947 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5949 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5950 tree decl_placeholder
5951 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5952 SET_DECL_VALUE_EXPR (placeholder
, out
);
5953 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5954 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5955 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5956 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5957 gimple_seq_add_seq (&sub_seq
,
5958 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5959 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5960 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5961 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5965 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5966 out
= unshare_expr (out
);
5967 gimplify_assign (out
, x
, &sub_seq
);
5969 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5970 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5971 gimple_seq_add_stmt (&sub_seq
, g
);
5972 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5973 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5974 gimple_seq_add_stmt (&sub_seq
, g
);
5975 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5976 build_int_cst (TREE_TYPE (i
), 1));
5977 gimple_seq_add_stmt (&sub_seq
, g
);
5978 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5979 gimple_seq_add_stmt (&sub_seq
, g
);
5980 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5982 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5984 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5986 if (omp_is_reference (var
)
5987 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
5989 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
5990 SET_DECL_VALUE_EXPR (placeholder
, ref
);
5991 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5992 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5993 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5994 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5995 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5999 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6000 ref
= build_outer_var_ref (var
, ctx
);
6001 gimplify_assign (ref
, x
, &sub_seq
);
6005 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
6007 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6009 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
6011 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
6013 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6017 /* Generate code to implement the COPYPRIVATE clauses. */
6020 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
6025 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6027 tree var
, new_var
, ref
, x
;
6029 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6031 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
6034 var
= OMP_CLAUSE_DECL (c
);
6035 by_ref
= use_pointer_for_field (var
, NULL
);
6037 ref
= build_sender_ref (var
, ctx
);
6038 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
6041 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
6042 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
6044 gimplify_assign (ref
, x
, slist
);
6046 ref
= build_receiver_ref (var
, false, ctx
);
6049 ref
= fold_convert_loc (clause_loc
,
6050 build_pointer_type (TREE_TYPE (new_var
)),
6052 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
6054 if (omp_is_reference (var
))
6056 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
6057 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
6058 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6060 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
6061 gimplify_and_add (x
, rlist
);
6066 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6067 and REDUCTION from the sender (aka parent) side. */
6070 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
6074 int ignored_looptemp
= 0;
6075 bool is_taskloop
= false;
6077 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6078 by GOMP_taskloop. */
6079 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
6081 ignored_looptemp
= 2;
6085 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6087 tree val
, ref
, x
, var
;
6088 bool by_ref
, do_in
= false, do_out
= false;
6089 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6091 switch (OMP_CLAUSE_CODE (c
))
6093 case OMP_CLAUSE_PRIVATE
:
6094 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
6097 case OMP_CLAUSE_FIRSTPRIVATE
:
6098 case OMP_CLAUSE_COPYIN
:
6099 case OMP_CLAUSE_LASTPRIVATE
:
6100 case OMP_CLAUSE_IN_REDUCTION
:
6101 case OMP_CLAUSE__REDUCTEMP_
:
6103 case OMP_CLAUSE_REDUCTION
:
6104 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
6107 case OMP_CLAUSE_SHARED
:
6108 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6111 case OMP_CLAUSE__LOOPTEMP_
:
6112 if (ignored_looptemp
)
6122 val
= OMP_CLAUSE_DECL (c
);
6123 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6124 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
6125 && TREE_CODE (val
) == MEM_REF
)
6127 val
= TREE_OPERAND (val
, 0);
6128 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
6129 val
= TREE_OPERAND (val
, 0);
6130 if (TREE_CODE (val
) == INDIRECT_REF
6131 || TREE_CODE (val
) == ADDR_EXPR
)
6132 val
= TREE_OPERAND (val
, 0);
6133 if (is_variable_sized (val
))
6137 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6138 outer taskloop region. */
6139 omp_context
*ctx_for_o
= ctx
;
6141 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
6142 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6143 ctx_for_o
= ctx
->outer
;
6145 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
6147 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
6148 && is_global_var (var
)
6149 && (val
== OMP_CLAUSE_DECL (c
)
6150 || !is_task_ctx (ctx
)
6151 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
6152 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
6153 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
6154 != POINTER_TYPE
)))))
6157 t
= omp_member_access_dummy_var (var
);
6160 var
= DECL_VALUE_EXPR (var
);
6161 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
6163 var
= unshare_and_remap (var
, t
, o
);
6165 var
= unshare_expr (var
);
6168 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
6170 /* Handle taskloop firstprivate/lastprivate, where the
6171 lastprivate on GIMPLE_OMP_TASK is represented as
6172 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6173 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
6174 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
6175 if (use_pointer_for_field (val
, ctx
))
6176 var
= build_fold_addr_expr (var
);
6177 gimplify_assign (x
, var
, ilist
);
6178 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
6182 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6183 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
6184 || val
== OMP_CLAUSE_DECL (c
))
6185 && is_variable_sized (val
))
6187 by_ref
= use_pointer_for_field (val
, NULL
);
6189 switch (OMP_CLAUSE_CODE (c
))
6191 case OMP_CLAUSE_FIRSTPRIVATE
:
6192 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
6194 && is_task_ctx (ctx
))
6195 TREE_NO_WARNING (var
) = 1;
6199 case OMP_CLAUSE_PRIVATE
:
6200 case OMP_CLAUSE_COPYIN
:
6201 case OMP_CLAUSE__LOOPTEMP_
:
6202 case OMP_CLAUSE__REDUCTEMP_
:
6206 case OMP_CLAUSE_LASTPRIVATE
:
6207 if (by_ref
|| omp_is_reference (val
))
6209 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
6216 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
6221 case OMP_CLAUSE_REDUCTION
:
6222 case OMP_CLAUSE_IN_REDUCTION
:
6224 if (val
== OMP_CLAUSE_DECL (c
))
6226 if (is_task_ctx (ctx
))
6227 by_ref
= use_pointer_for_field (val
, ctx
);
6229 do_out
= !(by_ref
|| omp_is_reference (val
));
6232 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
6241 ref
= build_sender_ref (val
, ctx
);
6242 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
6243 gimplify_assign (ref
, x
, ilist
);
6244 if (is_task_ctx (ctx
))
6245 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
6250 ref
= build_sender_ref (val
, ctx
);
6251 gimplify_assign (var
, ref
, olist
);
6256 /* Generate code to implement SHARED from the sender (aka parent)
6257 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6258 list things that got automatically shared. */
6261 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
6263 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
6265 if (ctx
->record_type
== NULL
)
6268 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
6269 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
6271 ovar
= DECL_ABSTRACT_ORIGIN (f
);
6272 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
6275 nvar
= maybe_lookup_decl (ovar
, ctx
);
6276 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
6279 /* If CTX is a nested parallel directive. Find the immediately
6280 enclosing parallel or workshare construct that contains a
6281 mapping for OVAR. */
6282 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
6284 t
= omp_member_access_dummy_var (var
);
6287 var
= DECL_VALUE_EXPR (var
);
6288 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
6290 var
= unshare_and_remap (var
, t
, o
);
6292 var
= unshare_expr (var
);
6295 if (use_pointer_for_field (ovar
, ctx
))
6297 x
= build_sender_ref (ovar
, ctx
);
6298 var
= build_fold_addr_expr (var
);
6299 gimplify_assign (x
, var
, ilist
);
6303 x
= build_sender_ref (ovar
, ctx
);
6304 gimplify_assign (x
, var
, ilist
);
6306 if (!TREE_READONLY (var
)
6307 /* We don't need to receive a new reference to a result
6308 or parm decl. In fact we may not store to it as we will
6309 invalidate any pending RSO and generate wrong gimple
6311 && !((TREE_CODE (var
) == RESULT_DECL
6312 || TREE_CODE (var
) == PARM_DECL
)
6313 && DECL_BY_REFERENCE (var
)))
6315 x
= build_sender_ref (ovar
, ctx
);
6316 gimplify_assign (var
, x
, olist
);
6322 /* Emit an OpenACC head marker call, encapulating the partitioning and
6323 other information that must be processed by the target compiler.
6324 Return the maximum number of dimensions the associated loop might
6325 be partitioned over. */
6328 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
6329 gimple_seq
*seq
, omp_context
*ctx
)
6331 unsigned levels
= 0;
6333 tree gang_static
= NULL_TREE
;
6334 auto_vec
<tree
, 5> args
;
6336 args
.quick_push (build_int_cst
6337 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
6338 args
.quick_push (ddvar
);
6339 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6341 switch (OMP_CLAUSE_CODE (c
))
6343 case OMP_CLAUSE_GANG
:
6344 tag
|= OLF_DIM_GANG
;
6345 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
6346 /* static:* is represented by -1, and we can ignore it, as
6347 scheduling is always static. */
6348 if (gang_static
&& integer_minus_onep (gang_static
))
6349 gang_static
= NULL_TREE
;
6353 case OMP_CLAUSE_WORKER
:
6354 tag
|= OLF_DIM_WORKER
;
6358 case OMP_CLAUSE_VECTOR
:
6359 tag
|= OLF_DIM_VECTOR
;
6363 case OMP_CLAUSE_SEQ
:
6367 case OMP_CLAUSE_AUTO
:
6371 case OMP_CLAUSE_INDEPENDENT
:
6372 tag
|= OLF_INDEPENDENT
;
6375 case OMP_CLAUSE_TILE
:
6386 if (DECL_P (gang_static
))
6387 gang_static
= build_outer_var_ref (gang_static
, ctx
);
6388 tag
|= OLF_GANG_STATIC
;
6391 /* In a parallel region, loops are implicitly INDEPENDENT. */
6392 omp_context
*tgt
= enclosing_target_ctx (ctx
);
6393 if (!tgt
|| is_oacc_parallel (tgt
))
6394 tag
|= OLF_INDEPENDENT
;
6397 /* Tiling could use all 3 levels. */
6401 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6402 Ensure at least one level, or 2 for possible auto
6404 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
6405 << OLF_DIM_BASE
) | OLF_SEQ
));
6407 if (levels
< 1u + maybe_auto
)
6408 levels
= 1u + maybe_auto
;
6411 args
.quick_push (build_int_cst (integer_type_node
, levels
));
6412 args
.quick_push (build_int_cst (integer_type_node
, tag
));
6414 args
.quick_push (gang_static
);
6416 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
6417 gimple_set_location (call
, loc
);
6418 gimple_set_lhs (call
, ddvar
);
6419 gimple_seq_add_stmt (seq
, call
);
6424 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6425 partitioning level of the enclosed region. */
6428 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
6429 tree tofollow
, gimple_seq
*seq
)
6431 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
6432 : IFN_UNIQUE_OACC_TAIL_MARK
);
6433 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
6434 int nargs
= 2 + (tofollow
!= NULL_TREE
);
6435 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
6436 marker
, ddvar
, tofollow
);
6437 gimple_set_location (call
, loc
);
6438 gimple_set_lhs (call
, ddvar
);
6439 gimple_seq_add_stmt (seq
, call
);
6442 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6443 the loop clauses, from which we extract reductions. Initialize
6447 lower_oacc_head_tail (location_t loc
, tree clauses
,
6448 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
6451 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
6452 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
6454 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
6455 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
6456 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
6459 for (unsigned done
= 1; count
; count
--, done
++)
6461 gimple_seq fork_seq
= NULL
;
6462 gimple_seq join_seq
= NULL
;
6464 tree place
= build_int_cst (integer_type_node
, -1);
6465 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
6466 fork_kind
, ddvar
, place
);
6467 gimple_set_location (fork
, loc
);
6468 gimple_set_lhs (fork
, ddvar
);
6470 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
6471 join_kind
, ddvar
, place
);
6472 gimple_set_location (join
, loc
);
6473 gimple_set_lhs (join
, ddvar
);
6475 /* Mark the beginning of this level sequence. */
6477 lower_oacc_loop_marker (loc
, ddvar
, true,
6478 build_int_cst (integer_type_node
, count
),
6480 lower_oacc_loop_marker (loc
, ddvar
, false,
6481 build_int_cst (integer_type_node
, done
),
6484 lower_oacc_reductions (loc
, clauses
, place
, inner
,
6485 fork
, join
, &fork_seq
, &join_seq
, ctx
);
6487 /* Append this level to head. */
6488 gimple_seq_add_seq (head
, fork_seq
);
6489 /* Prepend it to tail. */
6490 gimple_seq_add_seq (&join_seq
, *tail
);
6496 /* Mark the end of the sequence. */
6497 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
6498 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
6501 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6502 catch handler and return it. This prevents programs from violating the
6503 structured block semantics with throws. */
6506 maybe_catch_exception (gimple_seq body
)
6511 if (!flag_exceptions
)
6514 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
6515 decl
= lang_hooks
.eh_protect_cleanup_actions ();
6517 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
6519 g
= gimple_build_eh_must_not_throw (decl
);
6520 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
6523 return gimple_seq_alloc_with_stmt (g
);
6527 /* Routines to lower OMP directives into OMP-GIMPLE. */
6529 /* If ctx is a worksharing context inside of a cancellable parallel
6530 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6531 and conditional branch to parallel's cancel_label to handle
6532 cancellation in the implicit barrier. */
6535 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
6538 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
6539 if (gimple_omp_return_nowait_p (omp_return
))
6541 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
6542 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
6543 && outer
->cancellable
)
6545 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
6546 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
6547 tree lhs
= create_tmp_var (c_bool_type
);
6548 gimple_omp_return_set_lhs (omp_return
, lhs
);
6549 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
6550 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
6551 fold_convert (c_bool_type
,
6552 boolean_false_node
),
6553 outer
->cancel_label
, fallthru_label
);
6554 gimple_seq_add_stmt (body
, g
);
6555 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
6557 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
6561 /* Find the first task_reduction or reduction clause or return NULL
6562 if there are none. */
6565 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
6566 enum omp_clause_code ccode
)
6570 clauses
= omp_find_clause (clauses
, ccode
);
6571 if (clauses
== NULL_TREE
)
6573 if (ccode
!= OMP_CLAUSE_REDUCTION
6574 || code
== OMP_TASKLOOP
6575 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
6577 clauses
= OMP_CLAUSE_CHAIN (clauses
);
6581 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
6582 gimple_seq
*, gimple_seq
*);
6584 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6585 CTX is the enclosing OMP context for the current statement. */
6588 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6590 tree block
, control
;
6591 gimple_stmt_iterator tgsi
;
6592 gomp_sections
*stmt
;
6594 gbind
*new_stmt
, *bind
;
6595 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, new_body
;
6597 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
6599 push_gimplify_context ();
6605 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
6606 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
6607 tree rtmp
= NULL_TREE
;
6610 tree type
= build_pointer_type (pointer_sized_int_node
);
6611 tree temp
= create_tmp_var (type
);
6612 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
6613 OMP_CLAUSE_DECL (c
) = temp
;
6614 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
6615 gimple_omp_sections_set_clauses (stmt
, c
);
6616 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
6617 gimple_omp_sections_clauses (stmt
),
6618 &ilist
, &tred_dlist
);
6620 rtmp
= make_ssa_name (type
);
6621 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
6624 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
6625 &ilist
, &dlist
, ctx
, NULL
);
6627 new_body
= gimple_omp_body (stmt
);
6628 gimple_omp_set_body (stmt
, NULL
);
6629 tgsi
= gsi_start (new_body
);
6630 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
6635 sec_start
= gsi_stmt (tgsi
);
6636 sctx
= maybe_lookup_ctx (sec_start
);
6639 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
6640 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
6641 GSI_CONTINUE_LINKING
);
6642 gimple_omp_set_body (sec_start
, NULL
);
6644 if (gsi_one_before_end_p (tgsi
))
6646 gimple_seq l
= NULL
;
6647 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
6649 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
6650 gimple_omp_section_set_last (sec_start
);
6653 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
6654 GSI_CONTINUE_LINKING
);
6657 block
= make_node (BLOCK
);
6658 bind
= gimple_build_bind (NULL
, new_body
, block
);
6661 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
6663 block
= make_node (BLOCK
);
6664 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6665 gsi_replace (gsi_p
, new_stmt
, true);
6667 pop_gimplify_context (new_stmt
);
6668 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
6669 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6670 if (BLOCK_VARS (block
))
6671 TREE_USED (block
) = 1;
6674 gimple_seq_add_seq (&new_body
, ilist
);
6675 gimple_seq_add_stmt (&new_body
, stmt
);
6676 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
6677 gimple_seq_add_stmt (&new_body
, bind
);
6679 control
= create_tmp_var (unsigned_type_node
, ".section");
6680 t
= gimple_build_omp_continue (control
, control
);
6681 gimple_omp_sections_set_control (stmt
, control
);
6682 gimple_seq_add_stmt (&new_body
, t
);
6684 gimple_seq_add_seq (&new_body
, olist
);
6685 if (ctx
->cancellable
)
6686 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
6687 gimple_seq_add_seq (&new_body
, dlist
);
6689 new_body
= maybe_catch_exception (new_body
);
6691 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
6692 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6693 t
= gimple_build_omp_return (nowait
);
6694 gimple_seq_add_stmt (&new_body
, t
);
6695 gimple_seq_add_seq (&new_body
, tred_dlist
);
6696 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
6699 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
6701 gimple_bind_set_body (new_stmt
, new_body
);
6705 /* A subroutine of lower_omp_single. Expand the simple form of
6706 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6708 if (GOMP_single_start ())
6710 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6712 FIXME. It may be better to delay expanding the logic of this until
6713 pass_expand_omp. The expanded logic may make the job more difficult
6714 to a synchronization analysis pass. */
6717 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
6719 location_t loc
= gimple_location (single_stmt
);
6720 tree tlabel
= create_artificial_label (loc
);
6721 tree flabel
= create_artificial_label (loc
);
6722 gimple
*call
, *cond
;
6725 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
6726 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
6727 call
= gimple_build_call (decl
, 0);
6728 gimple_call_set_lhs (call
, lhs
);
6729 gimple_seq_add_stmt (pre_p
, call
);
6731 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
6732 fold_convert_loc (loc
, TREE_TYPE (lhs
),
6735 gimple_seq_add_stmt (pre_p
, cond
);
6736 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
6737 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6738 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
6742 /* A subroutine of lower_omp_single. Expand the simple form of
6743 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6745 #pragma omp single copyprivate (a, b, c)
6747 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6750 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6756 GOMP_single_copy_end (©out);
6767 FIXME. It may be better to delay expanding the logic of this until
6768 pass_expand_omp. The expanded logic may make the job more difficult
6769 to a synchronization analysis pass. */
6772 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
6775 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
6776 gimple_seq copyin_seq
;
6777 location_t loc
= gimple_location (single_stmt
);
6779 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
6781 ptr_type
= build_pointer_type (ctx
->record_type
);
6782 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
6784 l0
= create_artificial_label (loc
);
6785 l1
= create_artificial_label (loc
);
6786 l2
= create_artificial_label (loc
);
6788 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
6789 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
6790 t
= fold_convert_loc (loc
, ptr_type
, t
);
6791 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
6793 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
6794 build_int_cst (ptr_type
, 0));
6795 t
= build3 (COND_EXPR
, void_type_node
, t
,
6796 build_and_jump (&l0
), build_and_jump (&l1
));
6797 gimplify_and_add (t
, pre_p
);
6799 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
6801 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6804 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
6807 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
6808 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
6809 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
6810 gimplify_and_add (t
, pre_p
);
6812 t
= build_and_jump (&l2
);
6813 gimplify_and_add (t
, pre_p
);
6815 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
6817 gimple_seq_add_seq (pre_p
, copyin_seq
);
6819 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
6823 /* Expand code for an OpenMP single directive. */
6826 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6829 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
6831 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6833 push_gimplify_context ();
6835 block
= make_node (BLOCK
);
6836 bind
= gimple_build_bind (NULL
, NULL
, block
);
6837 gsi_replace (gsi_p
, bind
, true);
6840 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6841 &bind_body
, &dlist
, ctx
, NULL
);
6842 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6844 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6846 if (ctx
->record_type
)
6847 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6849 lower_omp_single_simple (single_stmt
, &bind_body
);
6851 gimple_omp_set_body (single_stmt
, NULL
);
6853 gimple_seq_add_seq (&bind_body
, dlist
);
6855 bind_body
= maybe_catch_exception (bind_body
);
6857 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6858 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6859 gimple
*g
= gimple_build_omp_return (nowait
);
6860 gimple_seq_add_stmt (&bind_body_tail
, g
);
6861 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
6862 if (ctx
->record_type
)
6864 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6865 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6866 TREE_THIS_VOLATILE (clobber
) = 1;
6867 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6868 clobber
), GSI_SAME_STMT
);
6870 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6871 gimple_bind_set_body (bind
, bind_body
);
6873 pop_gimplify_context (bind
);
6875 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6876 BLOCK_VARS (block
) = ctx
->block_vars
;
6877 if (BLOCK_VARS (block
))
6878 TREE_USED (block
) = 1;
6882 /* Expand code for an OpenMP master directive. */
6885 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6887 tree block
, lab
= NULL
, x
, bfn_decl
;
6888 gimple
*stmt
= gsi_stmt (*gsi_p
);
6890 location_t loc
= gimple_location (stmt
);
6893 push_gimplify_context ();
6895 block
= make_node (BLOCK
);
6896 bind
= gimple_build_bind (NULL
, NULL
, block
);
6897 gsi_replace (gsi_p
, bind
, true);
6898 gimple_bind_add_stmt (bind
, stmt
);
6900 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6901 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6902 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6903 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6905 gimplify_and_add (x
, &tseq
);
6906 gimple_bind_add_seq (bind
, tseq
);
6908 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6909 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6910 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6911 gimple_omp_set_body (stmt
, NULL
);
6913 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6915 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6917 pop_gimplify_context (bind
);
6919 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6920 BLOCK_VARS (block
) = ctx
->block_vars
;
6923 /* Helper function for lower_omp_task_reductions. For a specific PASS
6924 find out the current clause it should be processed, or return false
6925 if all have been processed already. */
6928 omp_task_reduction_iterate (int pass
, enum tree_code code
,
6929 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
6930 tree
*type
, tree
*next
)
6932 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
6934 if (ccode
== OMP_CLAUSE_REDUCTION
6935 && code
!= OMP_TASKLOOP
6936 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
6938 *decl
= OMP_CLAUSE_DECL (*c
);
6939 *type
= TREE_TYPE (*decl
);
6940 if (TREE_CODE (*decl
) == MEM_REF
)
6947 if (omp_is_reference (*decl
))
6948 *type
= TREE_TYPE (*type
);
6949 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
6952 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
6961 /* Lower task_reduction and reduction clauses (the latter unless CODE is
6962 OMP_TASKGROUP only with task modifier). Register mapping of those in
6963 START sequence and reducing them and unregister them in the END sequence. */
6966 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
6967 gimple_seq
*start
, gimple_seq
*end
)
6969 enum omp_clause_code ccode
6970 = (code
== OMP_TASKGROUP
6971 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
6972 tree cancellable
= NULL_TREE
;
6973 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
6974 if (clauses
== NULL_TREE
)
6976 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
6978 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
6979 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
6980 && outer
->cancellable
)
6982 cancellable
= error_mark_node
;
6985 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
6988 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
6989 tree
*last
= &TYPE_FIELDS (record_type
);
6993 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
6995 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
6998 DECL_CHAIN (field
) = ifield
;
6999 last
= &DECL_CHAIN (ifield
);
7001 for (int pass
= 0; pass
< 2; pass
++)
7003 tree decl
, type
, next
;
7004 for (tree c
= clauses
;
7005 omp_task_reduction_iterate (pass
, code
, ccode
,
7006 &c
, &decl
, &type
, &next
); c
= next
)
7009 tree new_type
= type
;
7011 new_type
= remap_type (type
, &ctx
->outer
->cb
);
7013 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
7014 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
7016 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
7018 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
7019 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
7020 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
7023 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
7024 DECL_CONTEXT (field
) = record_type
;
7026 last
= &DECL_CHAIN (field
);
7028 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
7030 DECL_CONTEXT (bfield
) = record_type
;
7032 last
= &DECL_CHAIN (bfield
);
7036 layout_type (record_type
);
7038 /* Build up an array which registers with the runtime all the reductions
7039 and deregisters them at the end. Format documented in libgomp/task.c. */
7040 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
7041 tree avar
= create_tmp_var_raw (atype
);
7042 gimple_add_tmp_var (avar
);
7043 TREE_ADDRESSABLE (avar
) = 1;
7044 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
7045 NULL_TREE
, NULL_TREE
);
7046 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
7047 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7048 gimple_seq seq
= NULL
;
7049 tree sz
= fold_convert (pointer_sized_int_node
,
7050 TYPE_SIZE_UNIT (record_type
));
7052 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
7053 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
7054 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
7055 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
7056 ctx
->task_reductions
.create (1 + cnt
);
7057 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
7058 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
7060 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
7061 gimple_seq_add_seq (start
, seq
);
7062 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
7063 NULL_TREE
, NULL_TREE
);
7064 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
7065 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7066 NULL_TREE
, NULL_TREE
);
7067 t
= build_int_cst (pointer_sized_int_node
,
7068 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
7069 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7070 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
7071 NULL_TREE
, NULL_TREE
);
7072 t
= build_int_cst (pointer_sized_int_node
, -1);
7073 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7074 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
7075 NULL_TREE
, NULL_TREE
);
7076 t
= build_int_cst (pointer_sized_int_node
, 0);
7077 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7079 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7080 and for each task reduction checks a bool right after the private variable
7081 within that thread's chunk; if the bool is clear, it hasn't been
7082 initialized and thus isn't going to be reduced nor destructed, otherwise
7083 reduce and destruct it. */
7084 tree idx
= create_tmp_var (size_type_node
);
7085 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
7086 tree num_thr_sz
= create_tmp_var (size_type_node
);
7087 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7088 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7089 tree lab3
= NULL_TREE
;
7091 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7093 /* For worksharing constructs, only perform it in the master thread,
7094 with the exception of cancelled implicit barriers - then only handle
7095 the current thread. */
7096 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7097 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7098 tree thr_num
= create_tmp_var (integer_type_node
);
7099 g
= gimple_build_call (t
, 0);
7100 gimple_call_set_lhs (g
, thr_num
);
7101 gimple_seq_add_stmt (end
, g
);
7105 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7106 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7107 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7108 if (code
== OMP_FOR
)
7109 c
= gimple_omp_for_clauses (ctx
->stmt
);
7110 else if (code
== OMP_SECTIONS
)
7111 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7112 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
7114 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
7116 gimple_seq_add_stmt (end
, g
);
7117 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7118 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
7119 gimple_seq_add_stmt (end
, g
);
7120 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
7121 build_one_cst (TREE_TYPE (idx
)));
7122 gimple_seq_add_stmt (end
, g
);
7123 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
7124 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7126 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
7127 gimple_seq_add_stmt (end
, g
);
7128 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7130 if (code
!= OMP_PARALLEL
)
7132 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
7133 tree num_thr
= create_tmp_var (integer_type_node
);
7134 g
= gimple_build_call (t
, 0);
7135 gimple_call_set_lhs (g
, num_thr
);
7136 gimple_seq_add_stmt (end
, g
);
7137 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
7138 gimple_seq_add_stmt (end
, g
);
7140 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7144 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7145 OMP_CLAUSE__REDUCTEMP_
);
7146 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
7147 t
= fold_convert (size_type_node
, t
);
7148 gimplify_assign (num_thr_sz
, t
, end
);
7150 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7151 NULL_TREE
, NULL_TREE
);
7152 tree data
= create_tmp_var (pointer_sized_int_node
);
7153 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
7154 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
7156 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
7157 ptr
= create_tmp_var (build_pointer_type (record_type
));
7159 ptr
= create_tmp_var (ptr_type_node
);
7160 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
7162 tree field
= TYPE_FIELDS (record_type
);
7165 field
= DECL_CHAIN (DECL_CHAIN (field
));
7166 for (int pass
= 0; pass
< 2; pass
++)
7168 tree decl
, type
, next
;
7169 for (tree c
= clauses
;
7170 omp_task_reduction_iterate (pass
, code
, ccode
,
7171 &c
, &decl
, &type
, &next
); c
= next
)
7173 tree var
= decl
, ref
;
7174 if (TREE_CODE (decl
) == MEM_REF
)
7176 var
= TREE_OPERAND (var
, 0);
7177 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7178 var
= TREE_OPERAND (var
, 0);
7180 if (TREE_CODE (var
) == ADDR_EXPR
)
7181 var
= TREE_OPERAND (var
, 0);
7182 else if (TREE_CODE (var
) == INDIRECT_REF
)
7183 var
= TREE_OPERAND (var
, 0);
7184 tree orig_var
= var
;
7185 if (is_variable_sized (var
))
7187 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7188 var
= DECL_VALUE_EXPR (var
);
7189 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7190 var
= TREE_OPERAND (var
, 0);
7191 gcc_assert (DECL_P (var
));
7193 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7194 if (orig_var
!= var
)
7195 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
7196 else if (TREE_CODE (v
) == ADDR_EXPR
)
7197 t
= build_fold_addr_expr (t
);
7198 else if (TREE_CODE (v
) == INDIRECT_REF
)
7199 t
= build_fold_indirect_ref (t
);
7200 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
7202 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
7203 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7204 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
7206 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
7207 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
7208 fold_convert (size_type_node
,
7209 TREE_OPERAND (decl
, 1)));
7213 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7214 if (!omp_is_reference (decl
))
7215 t
= build_fold_addr_expr (t
);
7217 t
= fold_convert (pointer_sized_int_node
, t
);
7219 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7220 gimple_seq_add_seq (start
, seq
);
7221 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7222 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7223 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7224 t
= unshare_expr (byte_position (field
));
7225 t
= fold_convert (pointer_sized_int_node
, t
);
7226 ctx
->task_reduction_map
->put (c
, cnt
);
7227 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
7230 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7231 gimple_seq_add_seq (start
, seq
);
7232 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7233 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
7234 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7236 tree bfield
= DECL_CHAIN (field
);
7238 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7239 /* In parallel or worksharing all threads unconditionally
7240 initialize all their task reduction private variables. */
7241 cond
= boolean_true_node
;
7242 else if (TREE_TYPE (ptr
) == ptr_type_node
)
7244 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7245 unshare_expr (byte_position (bfield
)));
7247 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
7248 gimple_seq_add_seq (end
, seq
);
7249 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
7250 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
7251 build_int_cst (pbool
, 0));
7254 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
7255 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
7256 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7257 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7258 tree condv
= create_tmp_var (boolean_type_node
);
7259 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
7260 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
7262 gimple_seq_add_stmt (end
, g
);
7263 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7264 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
7266 /* If this reduction doesn't need destruction and parallel
7267 has been cancelled, there is nothing to do for this
7268 reduction, so jump around the merge operation. */
7269 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7270 g
= gimple_build_cond (NE_EXPR
, cancellable
,
7271 build_zero_cst (TREE_TYPE (cancellable
)),
7273 gimple_seq_add_stmt (end
, g
);
7274 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7278 if (TREE_TYPE (ptr
) == ptr_type_node
)
7280 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7281 unshare_expr (byte_position (field
)));
7283 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
7284 gimple_seq_add_seq (end
, seq
);
7285 tree pbool
= build_pointer_type (TREE_TYPE (field
));
7286 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
7287 build_int_cst (pbool
, 0));
7290 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
7291 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
7293 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7294 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
7295 ref
= build_simple_mem_ref (ref
);
7296 /* reduction(-:var) sums up the partial results, so it acts
7297 identically to reduction(+:var). */
7298 if (rcode
== MINUS_EXPR
)
7300 if (TREE_CODE (decl
) == MEM_REF
)
7302 tree type
= TREE_TYPE (new_var
);
7303 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7304 tree i
= create_tmp_var (TREE_TYPE (v
));
7305 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7308 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7309 tree vv
= create_tmp_var (TREE_TYPE (v
));
7310 gimplify_assign (vv
, v
, start
);
7313 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7314 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7315 new_var
= build_fold_addr_expr (new_var
);
7316 new_var
= fold_convert (ptype
, new_var
);
7317 ref
= fold_convert (ptype
, ref
);
7318 tree m
= create_tmp_var (ptype
);
7319 gimplify_assign (m
, new_var
, end
);
7321 m
= create_tmp_var (ptype
);
7322 gimplify_assign (m
, ref
, end
);
7324 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
7325 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7326 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
7327 gimple_seq_add_stmt (end
, gimple_build_label (body
));
7328 tree priv
= build_simple_mem_ref (new_var
);
7329 tree out
= build_simple_mem_ref (ref
);
7330 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7332 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7333 tree decl_placeholder
7334 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7335 tree lab6
= NULL_TREE
;
7338 /* If this reduction needs destruction and parallel
7339 has been cancelled, jump around the merge operation
7340 to the destruction. */
7341 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7342 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7343 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
7344 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
7346 gimple_seq_add_stmt (end
, g
);
7347 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7349 SET_DECL_VALUE_EXPR (placeholder
, out
);
7350 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7351 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7352 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7353 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7354 gimple_seq_add_seq (end
,
7355 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7356 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7357 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
7359 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7360 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7363 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7364 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
7367 gimple_seq tseq
= NULL
;
7368 gimplify_stmt (&x
, &tseq
);
7369 gimple_seq_add_seq (end
, tseq
);
7374 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
7375 out
= unshare_expr (out
);
7376 gimplify_assign (out
, x
, end
);
7379 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7380 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7381 gimple_seq_add_stmt (end
, g
);
7382 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7383 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7384 gimple_seq_add_stmt (end
, g
);
7385 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7386 build_int_cst (TREE_TYPE (i
), 1));
7387 gimple_seq_add_stmt (end
, g
);
7388 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
7389 gimple_seq_add_stmt (end
, g
);
7390 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
7392 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7394 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7395 tree oldv
= NULL_TREE
;
7396 tree lab6
= NULL_TREE
;
7399 /* If this reduction needs destruction and parallel
7400 has been cancelled, jump around the merge operation
7401 to the destruction. */
7402 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7403 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7404 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
7405 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
7407 gimple_seq_add_stmt (end
, g
);
7408 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7410 if (omp_is_reference (decl
)
7411 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7413 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
7414 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
7415 tree refv
= create_tmp_var (TREE_TYPE (ref
));
7416 gimplify_assign (refv
, ref
, end
);
7417 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
7418 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7419 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7420 tree d
= maybe_lookup_decl (decl
, ctx
);
7422 if (DECL_HAS_VALUE_EXPR_P (d
))
7423 oldv
= DECL_VALUE_EXPR (d
);
7424 if (omp_is_reference (var
))
7426 tree v
= fold_convert (TREE_TYPE (d
),
7427 build_fold_addr_expr (new_var
));
7428 SET_DECL_VALUE_EXPR (d
, v
);
7431 SET_DECL_VALUE_EXPR (d
, new_var
);
7432 DECL_HAS_VALUE_EXPR_P (d
) = 1;
7433 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7435 SET_DECL_VALUE_EXPR (d
, oldv
);
7438 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
7439 DECL_HAS_VALUE_EXPR_P (d
) = 0;
7441 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7442 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7443 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
7444 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7446 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7447 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
7450 gimple_seq tseq
= NULL
;
7451 gimplify_stmt (&x
, &tseq
);
7452 gimple_seq_add_seq (end
, tseq
);
7457 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
7458 ref
= unshare_expr (ref
);
7459 gimplify_assign (ref
, x
, end
);
7461 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7463 field
= DECL_CHAIN (bfield
);
7467 if (code
== OMP_TASKGROUP
)
7469 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
7470 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
7471 gimple_seq_add_stmt (start
, g
);
7476 if (code
== OMP_FOR
)
7477 c
= gimple_omp_for_clauses (ctx
->stmt
);
7478 else if (code
== OMP_SECTIONS
)
7479 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7481 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
7482 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
7483 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
7484 build_fold_addr_expr (avar
));
7485 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
7488 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
7489 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
7491 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
7492 gimple_seq_add_stmt (end
, g
);
7493 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
7494 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7496 enum built_in_function bfn
7497 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
7498 t
= builtin_decl_explicit (bfn
);
7499 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
7503 arg
= create_tmp_var (c_bool_type
);
7504 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
7508 arg
= build_int_cst (c_bool_type
, 0);
7509 g
= gimple_build_call (t
, 1, arg
);
7513 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
7514 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
7516 gimple_seq_add_stmt (end
, g
);
7517 t
= build_constructor (atype
, NULL
);
7518 TREE_THIS_VOLATILE (t
) = 1;
7519 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
7522 /* Expand code for an OpenMP taskgroup directive. */
7525 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7527 gimple
*stmt
= gsi_stmt (*gsi_p
);
7530 gimple_seq dseq
= NULL
;
7531 tree block
= make_node (BLOCK
);
7533 bind
= gimple_build_bind (NULL
, NULL
, block
);
7534 gsi_replace (gsi_p
, bind
, true);
7535 gimple_bind_add_stmt (bind
, stmt
);
7537 push_gimplify_context ();
7539 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
7541 gimple_bind_add_stmt (bind
, x
);
7543 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
7544 gimple_omp_taskgroup_clauses (stmt
),
7545 gimple_bind_body_ptr (bind
), &dseq
);
7547 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7548 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7549 gimple_omp_set_body (stmt
, NULL
);
7551 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7552 gimple_bind_add_seq (bind
, dseq
);
7554 pop_gimplify_context (bind
);
7556 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7557 BLOCK_VARS (block
) = ctx
->block_vars
;
7561 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
7564 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
7567 struct omp_for_data fd
;
7568 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
7571 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
7572 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
7573 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
7577 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
7578 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
7579 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7580 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
7582 /* Merge depend clauses from multiple adjacent
7583 #pragma omp ordered depend(sink:...) constructs
7584 into one #pragma omp ordered depend(sink:...), so that
7585 we can optimize them together. */
7586 gimple_stmt_iterator gsi
= *gsi_p
;
7588 while (!gsi_end_p (gsi
))
7590 gimple
*stmt
= gsi_stmt (gsi
);
7591 if (is_gimple_debug (stmt
)
7592 || gimple_code (stmt
) == GIMPLE_NOP
)
7597 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
7599 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
7600 c
= gimple_omp_ordered_clauses (ord_stmt2
);
7602 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
7603 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
7606 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
7608 gsi_remove (&gsi
, true);
7612 /* Canonicalize sink dependence clauses into one folded clause if
7615 The basic algorithm is to create a sink vector whose first
7616 element is the GCD of all the first elements, and whose remaining
7617 elements are the minimum of the subsequent columns.
7619 We ignore dependence vectors whose first element is zero because
7620 such dependencies are known to be executed by the same thread.
7622 We take into account the direction of the loop, so a minimum
7623 becomes a maximum if the loop is iterating forwards. We also
7624 ignore sink clauses where the loop direction is unknown, or where
7625 the offsets are clearly invalid because they are not a multiple
7626 of the loop increment.
7630 #pragma omp for ordered(2)
7631 for (i=0; i < N; ++i)
7632 for (j=0; j < M; ++j)
7634 #pragma omp ordered \
7635 depend(sink:i-8,j-2) \
7636 depend(sink:i,j-1) \ // Completely ignored because i+0.
7637 depend(sink:i-4,j-3) \
7638 depend(sink:i-6,j-4)
7639 #pragma omp ordered depend(source)
7644 depend(sink:-gcd(8,4,6),-min(2,3,4))
7649 /* FIXME: Computing GCD's where the first element is zero is
7650 non-trivial in the presence of collapsed loops. Do this later. */
7651 if (fd
.collapse
> 1)
7654 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
7656 /* wide_int is not a POD so it must be default-constructed. */
7657 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
7658 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
7660 tree folded_dep
= NULL_TREE
;
7661 /* TRUE if the first dimension's offset is negative. */
7662 bool neg_offset_p
= false;
7664 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
7666 while ((c
= *list_p
) != NULL
)
7668 bool remove
= false;
7670 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
7671 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
7672 goto next_ordered_clause
;
7675 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
7676 vec
&& TREE_CODE (vec
) == TREE_LIST
;
7677 vec
= TREE_CHAIN (vec
), ++i
)
7679 gcc_assert (i
< len
);
7681 /* omp_extract_for_data has canonicalized the condition. */
7682 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
7683 || fd
.loops
[i
].cond_code
== GT_EXPR
);
7684 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
7685 bool maybe_lexically_later
= true;
7687 /* While the committee makes up its mind, bail if we have any
7688 non-constant steps. */
7689 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
7690 goto lower_omp_ordered_ret
;
7692 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
7693 if (POINTER_TYPE_P (itype
))
7695 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
7696 TYPE_PRECISION (itype
),
7699 /* Ignore invalid offsets that are not multiples of the step. */
7700 if (!wi::multiple_of_p (wi::abs (offset
),
7701 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
7704 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
7705 "ignoring sink clause with offset that is not "
7706 "a multiple of the loop step");
7708 goto next_ordered_clause
;
7711 /* Calculate the first dimension. The first dimension of
7712 the folded dependency vector is the GCD of the first
7713 elements, while ignoring any first elements whose offset
7717 /* Ignore dependence vectors whose first dimension is 0. */
7721 goto next_ordered_clause
;
7725 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
7727 error_at (OMP_CLAUSE_LOCATION (c
),
7728 "first offset must be in opposite direction "
7729 "of loop iterations");
7730 goto lower_omp_ordered_ret
;
7734 neg_offset_p
= forward
;
7735 /* Initialize the first time around. */
7736 if (folded_dep
== NULL_TREE
)
7739 folded_deps
[0] = offset
;
7742 folded_deps
[0] = wi::gcd (folded_deps
[0],
7746 /* Calculate minimum for the remaining dimensions. */
7749 folded_deps
[len
+ i
- 1] = offset
;
7750 if (folded_dep
== c
)
7751 folded_deps
[i
] = offset
;
7752 else if (maybe_lexically_later
7753 && !wi::eq_p (folded_deps
[i
], offset
))
7755 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
7759 for (j
= 1; j
<= i
; j
++)
7760 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
7763 maybe_lexically_later
= false;
7767 gcc_assert (i
== len
);
7771 next_ordered_clause
:
7773 *list_p
= OMP_CLAUSE_CHAIN (c
);
7775 list_p
= &OMP_CLAUSE_CHAIN (c
);
7781 folded_deps
[0] = -folded_deps
[0];
7783 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
7784 if (POINTER_TYPE_P (itype
))
7787 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
7788 = wide_int_to_tree (itype
, folded_deps
[0]);
7789 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
7790 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
7793 lower_omp_ordered_ret
:
7795 /* Ordered without clauses is #pragma omp threads, while we want
7796 a nop instead if we remove all clauses. */
7797 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
7798 gsi_replace (gsi_p
, gimple_build_nop (), true);
7802 /* Expand code for an OpenMP ordered directive. */
7805 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7808 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
7809 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
7812 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
7814 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7817 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
7818 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
7819 OMP_CLAUSE_THREADS
);
7821 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
7824 /* FIXME: This is needs to be moved to the expansion to verify various
7825 conditions only testable on cfg with dominators computed, and also
7826 all the depend clauses to be merged still might need to be available
7827 for the runtime checks. */
7829 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
7833 push_gimplify_context ();
7835 block
= make_node (BLOCK
);
7836 bind
= gimple_build_bind (NULL
, NULL
, block
);
7837 gsi_replace (gsi_p
, bind
, true);
7838 gimple_bind_add_stmt (bind
, stmt
);
7842 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
7843 build_int_cst (NULL_TREE
, threads
));
7844 cfun
->has_simduid_loops
= true;
7847 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
7849 gimple_bind_add_stmt (bind
, x
);
7851 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
7854 counter
= create_tmp_var (integer_type_node
);
7855 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
7856 gimple_call_set_lhs (g
, counter
);
7857 gimple_bind_add_stmt (bind
, g
);
7859 body
= create_artificial_label (UNKNOWN_LOCATION
);
7860 test
= create_artificial_label (UNKNOWN_LOCATION
);
7861 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
7863 tree simt_pred
= create_tmp_var (integer_type_node
);
7864 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
7865 gimple_call_set_lhs (g
, simt_pred
);
7866 gimple_bind_add_stmt (bind
, g
);
7868 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
7869 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
7870 gimple_bind_add_stmt (bind
, g
);
7872 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
7874 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7875 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7876 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7877 gimple_omp_set_body (stmt
, NULL
);
7881 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
7882 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
7883 gimple_bind_add_stmt (bind
, g
);
7885 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
7886 tree nonneg
= create_tmp_var (integer_type_node
);
7887 gimple_seq tseq
= NULL
;
7888 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
7889 gimple_bind_add_seq (bind
, tseq
);
7891 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
7892 gimple_call_set_lhs (g
, nonneg
);
7893 gimple_bind_add_stmt (bind
, g
);
7895 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7896 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
7897 gimple_bind_add_stmt (bind
, g
);
7899 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
7902 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
7903 build_int_cst (NULL_TREE
, threads
));
7905 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
7907 gimple_bind_add_stmt (bind
, x
);
7909 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7911 pop_gimplify_context (bind
);
7913 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7914 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7918 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
7919 substitution of a couple of function calls. But in the NAMED case,
7920 requires that languages coordinate a symbol name. It is therefore
7921 best put here in common code. */
7923 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
7926 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7929 tree name
, lock
, unlock
;
7930 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
7932 location_t loc
= gimple_location (stmt
);
7935 name
= gimple_omp_critical_name (stmt
);
7940 if (!critical_name_mutexes
)
7941 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
7943 tree
*n
= critical_name_mutexes
->get (name
);
7948 decl
= create_tmp_var_raw (ptr_type_node
);
7950 new_str
= ACONCAT ((".gomp_critical_user_",
7951 IDENTIFIER_POINTER (name
), NULL
));
7952 DECL_NAME (decl
) = get_identifier (new_str
);
7953 TREE_PUBLIC (decl
) = 1;
7954 TREE_STATIC (decl
) = 1;
7955 DECL_COMMON (decl
) = 1;
7956 DECL_ARTIFICIAL (decl
) = 1;
7957 DECL_IGNORED_P (decl
) = 1;
7959 varpool_node::finalize_decl (decl
);
7961 critical_name_mutexes
->put (name
, decl
);
7966 /* If '#pragma omp critical' is inside offloaded region or
7967 inside function marked as offloadable, the symbol must be
7968 marked as offloadable too. */
7970 if (cgraph_node::get (current_function_decl
)->offloadable
)
7971 varpool_node::get_create (decl
)->offloadable
= 1;
7973 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
7974 if (is_gimple_omp_offloaded (octx
->stmt
))
7976 varpool_node::get_create (decl
)->offloadable
= 1;
7980 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
7981 lock
= build_call_expr_loc (loc
, lock
, 1,
7982 build_fold_addr_expr_loc (loc
, decl
));
7984 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
7985 unlock
= build_call_expr_loc (loc
, unlock
, 1,
7986 build_fold_addr_expr_loc (loc
, decl
));
7990 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
7991 lock
= build_call_expr_loc (loc
, lock
, 0);
7993 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
7994 unlock
= build_call_expr_loc (loc
, unlock
, 0);
7997 push_gimplify_context ();
7999 block
= make_node (BLOCK
);
8000 bind
= gimple_build_bind (NULL
, NULL
, block
);
8001 gsi_replace (gsi_p
, bind
, true);
8002 gimple_bind_add_stmt (bind
, stmt
);
8004 tbody
= gimple_bind_body (bind
);
8005 gimplify_and_add (lock
, &tbody
);
8006 gimple_bind_set_body (bind
, tbody
);
8008 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8009 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8010 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8011 gimple_omp_set_body (stmt
, NULL
);
8013 tbody
= gimple_bind_body (bind
);
8014 gimplify_and_add (unlock
, &tbody
);
8015 gimple_bind_set_body (bind
, tbody
);
8017 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8019 pop_gimplify_context (bind
);
8020 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8021 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8024 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8025 for a lastprivate clause. Given a loop control predicate of (V
8026 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8027 is appended to *DLIST, iterator initialization is appended to
8031 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
8032 gimple_seq
*dlist
, struct omp_context
*ctx
)
8034 tree clauses
, cond
, vinit
;
8035 enum tree_code cond_code
;
8038 cond_code
= fd
->loop
.cond_code
;
8039 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
8041 /* When possible, use a strict equality expression. This can let VRP
8042 type optimizations deduce the value and remove a copy. */
8043 if (tree_fits_shwi_p (fd
->loop
.step
))
8045 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
8046 if (step
== 1 || step
== -1)
8047 cond_code
= EQ_EXPR
;
8050 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
8051 || gimple_omp_for_grid_phony (fd
->for_stmt
))
8052 cond
= omp_grid_lastprivate_predicate (fd
);
8055 tree n2
= fd
->loop
.n2
;
8056 if (fd
->collapse
> 1
8057 && TREE_CODE (n2
) != INTEGER_CST
8058 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
8060 struct omp_context
*taskreg_ctx
= NULL
;
8061 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
8063 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
8064 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
8065 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
8067 if (gimple_omp_for_combined_into_p (gfor
))
8069 gcc_assert (ctx
->outer
->outer
8070 && is_parallel_ctx (ctx
->outer
->outer
));
8071 taskreg_ctx
= ctx
->outer
->outer
;
8075 struct omp_for_data outer_fd
;
8076 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
8077 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
8080 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
8081 taskreg_ctx
= ctx
->outer
->outer
;
8083 else if (is_taskreg_ctx (ctx
->outer
))
8084 taskreg_ctx
= ctx
->outer
;
8088 tree taskreg_clauses
8089 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
8090 tree innerc
= omp_find_clause (taskreg_clauses
,
8091 OMP_CLAUSE__LOOPTEMP_
);
8092 gcc_assert (innerc
);
8093 for (i
= 0; i
< fd
->collapse
; i
++)
8095 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
8096 OMP_CLAUSE__LOOPTEMP_
);
8097 gcc_assert (innerc
);
8099 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
8100 OMP_CLAUSE__LOOPTEMP_
);
8102 n2
= fold_convert (TREE_TYPE (n2
),
8103 lookup_decl (OMP_CLAUSE_DECL (innerc
),
8107 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
8110 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
8112 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
8113 if (!gimple_seq_empty_p (stmts
))
8115 gimple_seq_add_seq (&stmts
, *dlist
);
8118 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8119 vinit
= fd
->loop
.n1
;
8120 if (cond_code
== EQ_EXPR
8121 && tree_fits_shwi_p (fd
->loop
.n2
)
8122 && ! integer_zerop (fd
->loop
.n2
))
8123 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
8125 vinit
= unshare_expr (vinit
);
8127 /* Initialize the iterator variable, so that threads that don't execute
8128 any iterations don't execute the lastprivate clauses by accident. */
8129 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
8134 /* Lower code for an OMP loop directive. */
8137 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8140 struct omp_for_data fd
, *fdp
= NULL
;
8141 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
8143 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
8144 gimple_seq cnt_list
= NULL
;
8145 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
8148 push_gimplify_context ();
8150 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
8152 block
= make_node (BLOCK
);
8153 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8154 /* Replace at gsi right away, so that 'stmt' is no member
8155 of a sequence anymore as we're going to add to a different
8157 gsi_replace (gsi_p
, new_stmt
, true);
8159 /* Move declaration of temporaries in the loop body before we make
8161 omp_for_body
= gimple_omp_body (stmt
);
8162 if (!gimple_seq_empty_p (omp_for_body
)
8163 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
8166 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
8167 tree vars
= gimple_bind_vars (inner_bind
);
8168 gimple_bind_append_vars (new_stmt
, vars
);
8169 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8170 keep them on the inner_bind and it's block. */
8171 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
8172 if (gimple_bind_block (inner_bind
))
8173 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
8176 if (gimple_omp_for_combined_into_p (stmt
))
8178 omp_extract_for_data (stmt
, &fd
, NULL
);
8181 /* We need two temporaries with fd.loop.v type (istart/iend)
8182 and then (fd.collapse - 1) temporaries with the same
8183 type for count2 ... countN-1 vars if not constant. */
8185 tree type
= fd
.iter_type
;
8187 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
8188 count
+= fd
.collapse
- 1;
8190 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
8191 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
8192 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
8197 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
8198 OMP_CLAUSE__LOOPTEMP_
);
8200 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
8201 OMP_CLAUSE__LOOPTEMP_
);
8202 for (i
= 0; i
< count
; i
++)
8207 gcc_assert (outerc
);
8208 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
8209 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
8210 OMP_CLAUSE__LOOPTEMP_
);
8214 /* If there are 2 adjacent SIMD stmts, one with _simt_
8215 clause, another without, make sure they have the same
8216 decls in _looptemp_ clauses, because the outer stmt
8217 they are combined into will look up just one inner_stmt. */
8219 temp
= OMP_CLAUSE_DECL (simtc
);
8221 temp
= create_tmp_var (type
);
8222 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
8224 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
8225 OMP_CLAUSE_DECL (*pc
) = temp
;
8226 pc
= &OMP_CLAUSE_CHAIN (*pc
);
8228 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
8229 OMP_CLAUSE__LOOPTEMP_
);
8234 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8238 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
8239 OMP_CLAUSE_REDUCTION
);
8240 tree rtmp
= NULL_TREE
;
8243 tree type
= build_pointer_type (pointer_sized_int_node
);
8244 tree temp
= create_tmp_var (type
);
8245 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8246 OMP_CLAUSE_DECL (c
) = temp
;
8247 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
8248 gimple_omp_for_set_clauses (stmt
, c
);
8249 lower_omp_task_reductions (ctx
, OMP_FOR
,
8250 gimple_omp_for_clauses (stmt
),
8251 &tred_ilist
, &tred_dlist
);
8253 rtmp
= make_ssa_name (type
);
8254 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
8257 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
8259 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
8260 gimple_omp_for_pre_body (stmt
));
8262 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8264 /* Lower the header expressions. At this point, we can assume that
8265 the header is of the form:
8267 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8269 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8270 using the .omp_data_s mapping, if needed. */
8271 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
8273 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
8274 if (!is_gimple_min_invariant (*rhs_p
))
8275 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8276 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
8277 recompute_tree_invariant_for_addr_expr (*rhs_p
);
8279 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
8280 if (!is_gimple_min_invariant (*rhs_p
))
8281 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8282 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
8283 recompute_tree_invariant_for_addr_expr (*rhs_p
);
8285 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
8286 if (!is_gimple_min_invariant (*rhs_p
))
8287 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8290 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
8292 gimple_seq_add_seq (&body
, cnt_list
);
8294 /* Once lowered, extract the bounds and clauses. */
8295 omp_extract_for_data (stmt
, &fd
, NULL
);
8297 if (is_gimple_omp_oacc (ctx
->stmt
)
8298 && !ctx_in_oacc_kernels_region (ctx
))
8299 lower_oacc_head_tail (gimple_location (stmt
),
8300 gimple_omp_for_clauses (stmt
),
8301 &oacc_head
, &oacc_tail
, ctx
);
8303 /* Add OpenACC partitioning and reduction markers just before the loop. */
8305 gimple_seq_add_seq (&body
, oacc_head
);
8307 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
8309 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
8310 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8311 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8312 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8314 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
8315 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
8316 OMP_CLAUSE_LINEAR_STEP (c
)
8317 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
8321 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
8322 && gimple_omp_for_grid_phony (stmt
));
8324 gimple_seq_add_stmt (&body
, stmt
);
8325 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
8328 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
8331 /* After the loop, add exit clauses. */
8332 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
8334 if (ctx
->cancellable
)
8335 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
8337 gimple_seq_add_seq (&body
, dlist
);
8341 gimple_seq_add_seq (&tred_ilist
, body
);
8345 body
= maybe_catch_exception (body
);
8349 /* Region exit marker goes at the end of the loop body. */
8350 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
8351 gimple_seq_add_stmt (&body
, g
);
8353 gimple_seq_add_seq (&body
, tred_dlist
);
8355 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
8358 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8361 /* Add OpenACC joining and reduction markers just after the loop. */
8363 gimple_seq_add_seq (&body
, oacc_tail
);
8365 pop_gimplify_context (new_stmt
);
8367 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8368 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
8369 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
8370 if (BLOCK_VARS (block
))
8371 TREE_USED (block
) = 1;
8373 gimple_bind_set_body (new_stmt
, body
);
8374 gimple_omp_set_body (stmt
, NULL
);
8375 gimple_omp_for_set_pre_body (stmt
, NULL
);
8378 /* Callback for walk_stmts. Check if the current statement only contains
8379 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8382 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
8383 bool *handled_ops_p
,
8384 struct walk_stmt_info
*wi
)
8386 int *info
= (int *) wi
->info
;
8387 gimple
*stmt
= gsi_stmt (*gsi_p
);
8389 *handled_ops_p
= true;
8390 switch (gimple_code (stmt
))
8396 case GIMPLE_OMP_FOR
:
8397 case GIMPLE_OMP_SECTIONS
:
8398 *info
= *info
== 0 ? 1 : -1;
8407 struct omp_taskcopy_context
8409 /* This field must be at the beginning, as we do "inheritance": Some
8410 callback functions for tree-inline.c (e.g., omp_copy_decl)
8411 receive a copy_body_data pointer that is up-casted to an
8412 omp_context pointer. */
8418 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
8420 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
8422 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
8423 return create_tmp_var (TREE_TYPE (var
));
8429 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
8431 tree name
, new_fields
= NULL
, type
, f
;
8433 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8434 name
= DECL_NAME (TYPE_NAME (orig_type
));
8435 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
8436 TYPE_DECL
, name
, type
);
8437 TYPE_NAME (type
) = name
;
8439 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
8441 tree new_f
= copy_node (f
);
8442 DECL_CONTEXT (new_f
) = type
;
8443 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
8444 TREE_CHAIN (new_f
) = new_fields
;
8445 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
8446 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
8447 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
8450 tcctx
->cb
.decl_map
->put (f
, new_f
);
8452 TYPE_FIELDS (type
) = nreverse (new_fields
);
8457 /* Create task copyfn. */
8460 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
8462 struct function
*child_cfun
;
8463 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
8464 tree record_type
, srecord_type
, bind
, list
;
8465 bool record_needs_remap
= false, srecord_needs_remap
= false;
8467 struct omp_taskcopy_context tcctx
;
8468 location_t loc
= gimple_location (task_stmt
);
8469 size_t looptempno
= 0;
8471 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
8472 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
8473 gcc_assert (child_cfun
->cfg
== NULL
);
8474 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
8476 /* Reset DECL_CONTEXT on function arguments. */
8477 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
8478 DECL_CONTEXT (t
) = child_fn
;
8480 /* Populate the function. */
8481 push_gimplify_context ();
8482 push_cfun (child_cfun
);
8484 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
8485 TREE_SIDE_EFFECTS (bind
) = 1;
8487 DECL_SAVED_TREE (child_fn
) = bind
;
8488 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
8490 /* Remap src and dst argument types if needed. */
8491 record_type
= ctx
->record_type
;
8492 srecord_type
= ctx
->srecord_type
;
8493 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8494 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
8496 record_needs_remap
= true;
8499 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
8500 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
8502 srecord_needs_remap
= true;
8506 if (record_needs_remap
|| srecord_needs_remap
)
8508 memset (&tcctx
, '\0', sizeof (tcctx
));
8509 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
8510 tcctx
.cb
.dst_fn
= child_fn
;
8511 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
8512 gcc_checking_assert (tcctx
.cb
.src_node
);
8513 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
8514 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
8515 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
8516 tcctx
.cb
.eh_lp_nr
= 0;
8517 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
8518 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
8521 if (record_needs_remap
)
8522 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
8523 if (srecord_needs_remap
)
8524 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
8527 tcctx
.cb
.decl_map
= NULL
;
8529 arg
= DECL_ARGUMENTS (child_fn
);
8530 TREE_TYPE (arg
) = build_pointer_type (record_type
);
8531 sarg
= DECL_CHAIN (arg
);
8532 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
8534 /* First pass: initialize temporaries used in record_type and srecord_type
8535 sizes and field offsets. */
8536 if (tcctx
.cb
.decl_map
)
8537 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8538 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
8542 decl
= OMP_CLAUSE_DECL (c
);
8543 p
= tcctx
.cb
.decl_map
->get (decl
);
8546 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
8547 sf
= (tree
) n
->value
;
8548 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8549 src
= build_simple_mem_ref_loc (loc
, sarg
);
8550 src
= omp_build_component_ref (src
, sf
);
8551 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
8552 append_to_statement_list (t
, &list
);
8555 /* Second pass: copy shared var pointers and copy construct non-VLA
8556 firstprivate vars. */
8557 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8558 switch (OMP_CLAUSE_CODE (c
))
8561 case OMP_CLAUSE_SHARED
:
8562 decl
= OMP_CLAUSE_DECL (c
);
8563 key
= (splay_tree_key
) decl
;
8564 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8565 key
= (splay_tree_key
) &DECL_UID (decl
);
8566 n
= splay_tree_lookup (ctx
->field_map
, key
);
8569 f
= (tree
) n
->value
;
8570 if (tcctx
.cb
.decl_map
)
8571 f
= *tcctx
.cb
.decl_map
->get (f
);
8572 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
8573 sf
= (tree
) n
->value
;
8574 if (tcctx
.cb
.decl_map
)
8575 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8576 src
= build_simple_mem_ref_loc (loc
, sarg
);
8577 src
= omp_build_component_ref (src
, sf
);
8578 dst
= build_simple_mem_ref_loc (loc
, arg
);
8579 dst
= omp_build_component_ref (dst
, f
);
8580 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8581 append_to_statement_list (t
, &list
);
8583 case OMP_CLAUSE_REDUCTION
:
8584 case OMP_CLAUSE_IN_REDUCTION
:
8585 decl
= OMP_CLAUSE_DECL (c
);
8586 if (TREE_CODE (decl
) == MEM_REF
)
8588 decl
= TREE_OPERAND (decl
, 0);
8589 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8590 decl
= TREE_OPERAND (decl
, 0);
8591 if (TREE_CODE (decl
) == INDIRECT_REF
8592 || TREE_CODE (decl
) == ADDR_EXPR
)
8593 decl
= TREE_OPERAND (decl
, 0);
8595 key
= (splay_tree_key
) decl
;
8596 n
= splay_tree_lookup (ctx
->field_map
, key
);
8599 f
= (tree
) n
->value
;
8600 if (tcctx
.cb
.decl_map
)
8601 f
= *tcctx
.cb
.decl_map
->get (f
);
8602 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
8603 sf
= (tree
) n
->value
;
8604 if (tcctx
.cb
.decl_map
)
8605 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8606 src
= build_simple_mem_ref_loc (loc
, sarg
);
8607 src
= omp_build_component_ref (src
, sf
);
8608 if (decl
!= OMP_CLAUSE_DECL (c
)
8609 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8610 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
8611 src
= build_simple_mem_ref_loc (loc
, src
);
8612 dst
= build_simple_mem_ref_loc (loc
, arg
);
8613 dst
= omp_build_component_ref (dst
, f
);
8614 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8615 append_to_statement_list (t
, &list
);
8617 case OMP_CLAUSE__LOOPTEMP_
:
8618 /* Fields for first two _looptemp_ clauses are initialized by
8619 GOMP_taskloop*, the rest are handled like firstprivate. */
8626 case OMP_CLAUSE__REDUCTEMP_
:
8627 case OMP_CLAUSE_FIRSTPRIVATE
:
8628 decl
= OMP_CLAUSE_DECL (c
);
8629 if (is_variable_sized (decl
))
8631 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
8634 f
= (tree
) n
->value
;
8635 if (tcctx
.cb
.decl_map
)
8636 f
= *tcctx
.cb
.decl_map
->get (f
);
8637 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
8640 sf
= (tree
) n
->value
;
8641 if (tcctx
.cb
.decl_map
)
8642 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8643 src
= build_simple_mem_ref_loc (loc
, sarg
);
8644 src
= omp_build_component_ref (src
, sf
);
8645 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
8646 src
= build_simple_mem_ref_loc (loc
, src
);
8650 dst
= build_simple_mem_ref_loc (loc
, arg
);
8651 dst
= omp_build_component_ref (dst
, f
);
8652 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
8653 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8655 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
8656 append_to_statement_list (t
, &list
);
8658 case OMP_CLAUSE_PRIVATE
:
8659 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8661 decl
= OMP_CLAUSE_DECL (c
);
8662 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
8663 f
= (tree
) n
->value
;
8664 if (tcctx
.cb
.decl_map
)
8665 f
= *tcctx
.cb
.decl_map
->get (f
);
8666 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
8669 sf
= (tree
) n
->value
;
8670 if (tcctx
.cb
.decl_map
)
8671 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8672 src
= build_simple_mem_ref_loc (loc
, sarg
);
8673 src
= omp_build_component_ref (src
, sf
);
8674 if (use_pointer_for_field (decl
, NULL
))
8675 src
= build_simple_mem_ref_loc (loc
, src
);
8679 dst
= build_simple_mem_ref_loc (loc
, arg
);
8680 dst
= omp_build_component_ref (dst
, f
);
8681 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8682 append_to_statement_list (t
, &list
);
8688 /* Last pass: handle VLA firstprivates. */
8689 if (tcctx
.cb
.decl_map
)
8690 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8691 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
8695 decl
= OMP_CLAUSE_DECL (c
);
8696 if (!is_variable_sized (decl
))
8698 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
8701 f
= (tree
) n
->value
;
8702 f
= *tcctx
.cb
.decl_map
->get (f
);
8703 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
8704 ind
= DECL_VALUE_EXPR (decl
);
8705 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
8706 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
8707 n
= splay_tree_lookup (ctx
->sfield_map
,
8708 (splay_tree_key
) TREE_OPERAND (ind
, 0));
8709 sf
= (tree
) n
->value
;
8710 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8711 src
= build_simple_mem_ref_loc (loc
, sarg
);
8712 src
= omp_build_component_ref (src
, sf
);
8713 src
= build_simple_mem_ref_loc (loc
, src
);
8714 dst
= build_simple_mem_ref_loc (loc
, arg
);
8715 dst
= omp_build_component_ref (dst
, f
);
8716 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
8717 append_to_statement_list (t
, &list
);
8718 n
= splay_tree_lookup (ctx
->field_map
,
8719 (splay_tree_key
) TREE_OPERAND (ind
, 0));
8720 df
= (tree
) n
->value
;
8721 df
= *tcctx
.cb
.decl_map
->get (df
);
8722 ptr
= build_simple_mem_ref_loc (loc
, arg
);
8723 ptr
= omp_build_component_ref (ptr
, df
);
8724 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
8725 build_fold_addr_expr_loc (loc
, dst
));
8726 append_to_statement_list (t
, &list
);
8729 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
8730 append_to_statement_list (t
, &list
);
8732 if (tcctx
.cb
.decl_map
)
8733 delete tcctx
.cb
.decl_map
;
8734 pop_gimplify_context (NULL
);
8735 BIND_EXPR_BODY (bind
) = list
;
8740 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
8744 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
8746 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
8747 gcc_assert (clauses
);
8748 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8749 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8750 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8752 case OMP_CLAUSE_DEPEND_LAST
:
8753 /* Lowering already done at gimplification. */
8755 case OMP_CLAUSE_DEPEND_IN
:
8758 case OMP_CLAUSE_DEPEND_OUT
:
8759 case OMP_CLAUSE_DEPEND_INOUT
:
8762 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8765 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8768 case OMP_CLAUSE_DEPEND_SOURCE
:
8769 case OMP_CLAUSE_DEPEND_SINK
:
8774 if (cnt
[1] || cnt
[3])
8776 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
8777 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
8778 tree array
= create_tmp_var (type
);
8779 TREE_ADDRESSABLE (array
) = 1;
8780 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8784 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
8785 gimple_seq_add_stmt (iseq
, g
);
8786 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8789 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
8790 gimple_seq_add_stmt (iseq
, g
);
8791 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
8793 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8794 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
8795 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
8796 gimple_seq_add_stmt (iseq
, g
);
8798 for (i
= 0; i
< 4; i
++)
8802 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8803 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
8807 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8809 case OMP_CLAUSE_DEPEND_IN
:
8813 case OMP_CLAUSE_DEPEND_OUT
:
8814 case OMP_CLAUSE_DEPEND_INOUT
:
8818 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8822 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8829 tree t
= OMP_CLAUSE_DECL (c
);
8830 t
= fold_convert (ptr_type_node
, t
);
8831 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
8832 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
8833 NULL_TREE
, NULL_TREE
);
8834 g
= gimple_build_assign (r
, t
);
8835 gimple_seq_add_stmt (iseq
, g
);
8838 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8839 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8840 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8841 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
8843 tree clobber
= build_constructor (type
, NULL
);
8844 TREE_THIS_VOLATILE (clobber
) = 1;
8845 g
= gimple_build_assign (array
, clobber
);
8846 gimple_seq_add_stmt (oseq
, g
);
8849 /* Lower the OpenMP parallel or task directive in the current statement
8850 in GSI_P. CTX holds context information for the directive. */
8853 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8857 gimple
*stmt
= gsi_stmt (*gsi_p
);
8858 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
8859 gimple_seq par_body
;
8860 location_t loc
= gimple_location (stmt
);
8862 clauses
= gimple_omp_taskreg_clauses (stmt
);
8863 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
8864 && gimple_omp_task_taskwait_p (stmt
))
8872 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
8873 par_body
= gimple_bind_body (par_bind
);
8875 child_fn
= ctx
->cb
.dst_fn
;
8876 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
8877 && !gimple_omp_parallel_combined_p (stmt
))
8879 struct walk_stmt_info wi
;
8882 memset (&wi
, 0, sizeof (wi
));
8885 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
8887 gimple_omp_parallel_set_combined_p (stmt
, true);
8889 gimple_seq dep_ilist
= NULL
;
8890 gimple_seq dep_olist
= NULL
;
8891 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
8892 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
8894 push_gimplify_context ();
8895 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
8896 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
8897 &dep_ilist
, &dep_olist
);
8900 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
8901 && gimple_omp_task_taskwait_p (stmt
))
8905 gsi_replace (gsi_p
, dep_bind
, true);
8906 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8907 gimple_bind_add_stmt (dep_bind
, stmt
);
8908 gimple_bind_add_seq (dep_bind
, dep_olist
);
8909 pop_gimplify_context (dep_bind
);
8914 if (ctx
->srecord_type
)
8915 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
8917 gimple_seq tskred_ilist
= NULL
;
8918 gimple_seq tskred_olist
= NULL
;
8919 if ((is_task_ctx (ctx
)
8920 && gimple_omp_task_taskloop_p (ctx
->stmt
)
8921 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
8922 OMP_CLAUSE_REDUCTION
))
8923 || (is_parallel_ctx (ctx
)
8924 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
8925 OMP_CLAUSE__REDUCTEMP_
)))
8927 if (dep_bind
== NULL
)
8929 push_gimplify_context ();
8930 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
8932 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
8934 gimple_omp_taskreg_clauses (ctx
->stmt
),
8935 &tskred_ilist
, &tskred_olist
);
8938 push_gimplify_context ();
8940 gimple_seq par_olist
= NULL
;
8941 gimple_seq par_ilist
= NULL
;
8942 gimple_seq par_rlist
= NULL
;
8943 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
8944 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
8945 if (phony_construct
&& ctx
->record_type
)
8947 gcc_checking_assert (!ctx
->receiver_decl
);
8948 ctx
->receiver_decl
= create_tmp_var
8949 (build_reference_type (ctx
->record_type
), ".omp_rec");
8951 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
8952 lower_omp (&par_body
, ctx
);
8953 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
8954 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
8956 /* Declare all the variables created by mapping and the variables
8957 declared in the scope of the parallel body. */
8958 record_vars_into (ctx
->block_vars
, child_fn
);
8959 maybe_remove_omp_member_access_dummy_vars (par_bind
);
8960 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
8962 if (ctx
->record_type
)
8965 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
8966 : ctx
->record_type
, ".omp_data_o");
8967 DECL_NAMELESS (ctx
->sender_decl
) = 1;
8968 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
8969 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
8972 gimple_seq olist
= NULL
;
8973 gimple_seq ilist
= NULL
;
8974 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
8975 lower_send_shared_vars (&ilist
, &olist
, ctx
);
8977 if (ctx
->record_type
)
8979 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
8980 TREE_THIS_VOLATILE (clobber
) = 1;
8981 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
8985 /* Once all the expansions are done, sequence all the different
8986 fragments inside gimple_omp_body. */
8988 gimple_seq new_body
= NULL
;
8990 if (ctx
->record_type
)
8992 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8993 /* fixup_child_record_type might have changed receiver_decl's type. */
8994 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
8995 gimple_seq_add_stmt (&new_body
,
8996 gimple_build_assign (ctx
->receiver_decl
, t
));
8999 gimple_seq_add_seq (&new_body
, par_ilist
);
9000 gimple_seq_add_seq (&new_body
, par_body
);
9001 gimple_seq_add_seq (&new_body
, par_rlist
);
9002 if (ctx
->cancellable
)
9003 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
9004 gimple_seq_add_seq (&new_body
, par_olist
);
9005 new_body
= maybe_catch_exception (new_body
);
9006 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
9007 gimple_seq_add_stmt (&new_body
,
9008 gimple_build_omp_continue (integer_zero_node
,
9009 integer_zero_node
));
9010 if (!phony_construct
)
9012 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
9013 gimple_omp_set_body (stmt
, new_body
);
9016 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
9017 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9019 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
9020 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
9021 gimple_bind_add_seq (bind
, ilist
);
9022 if (!phony_construct
)
9023 gimple_bind_add_stmt (bind
, stmt
);
9025 gimple_bind_add_seq (bind
, new_body
);
9026 gimple_bind_add_seq (bind
, olist
);
9028 pop_gimplify_context (NULL
);
9032 gimple_bind_add_seq (dep_bind
, dep_ilist
);
9033 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
9034 gimple_bind_add_stmt (dep_bind
, bind
);
9035 gimple_bind_add_seq (dep_bind
, tskred_olist
);
9036 gimple_bind_add_seq (dep_bind
, dep_olist
);
9037 pop_gimplify_context (dep_bind
);
9041 /* Lower the GIMPLE_OMP_TARGET in the current statement
9042 in GSI_P. CTX holds context information for the directive. */
9045 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9048 tree child_fn
, t
, c
;
9049 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
9050 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
9051 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
9052 location_t loc
= gimple_location (stmt
);
9053 bool offloaded
, data_region
;
9054 unsigned int map_cnt
= 0;
9056 offloaded
= is_gimple_omp_offloaded (stmt
);
9057 switch (gimple_omp_target_kind (stmt
))
9059 case GF_OMP_TARGET_KIND_REGION
:
9060 case GF_OMP_TARGET_KIND_UPDATE
:
9061 case GF_OMP_TARGET_KIND_ENTER_DATA
:
9062 case GF_OMP_TARGET_KIND_EXIT_DATA
:
9063 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
9064 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
9065 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
9066 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
9067 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
9068 data_region
= false;
9070 case GF_OMP_TARGET_KIND_DATA
:
9071 case GF_OMP_TARGET_KIND_OACC_DATA
:
9072 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
9079 clauses
= gimple_omp_target_clauses (stmt
);
9081 gimple_seq dep_ilist
= NULL
;
9082 gimple_seq dep_olist
= NULL
;
9083 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
9085 push_gimplify_context ();
9086 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9087 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
9088 &dep_ilist
, &dep_olist
);
9095 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
9096 tgt_body
= gimple_bind_body (tgt_bind
);
9098 else if (data_region
)
9099 tgt_body
= gimple_omp_body (stmt
);
9100 child_fn
= ctx
->cb
.dst_fn
;
9102 push_gimplify_context ();
9105 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9106 switch (OMP_CLAUSE_CODE (c
))
9112 case OMP_CLAUSE_MAP
:
9114 /* First check what we're prepared to handle in the following. */
9115 switch (OMP_CLAUSE_MAP_KIND (c
))
9117 case GOMP_MAP_ALLOC
:
9120 case GOMP_MAP_TOFROM
:
9121 case GOMP_MAP_POINTER
:
9122 case GOMP_MAP_TO_PSET
:
9123 case GOMP_MAP_DELETE
:
9124 case GOMP_MAP_RELEASE
:
9125 case GOMP_MAP_ALWAYS_TO
:
9126 case GOMP_MAP_ALWAYS_FROM
:
9127 case GOMP_MAP_ALWAYS_TOFROM
:
9128 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9129 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9130 case GOMP_MAP_STRUCT
:
9131 case GOMP_MAP_ALWAYS_POINTER
:
9133 case GOMP_MAP_FORCE_ALLOC
:
9134 case GOMP_MAP_FORCE_TO
:
9135 case GOMP_MAP_FORCE_FROM
:
9136 case GOMP_MAP_FORCE_TOFROM
:
9137 case GOMP_MAP_FORCE_PRESENT
:
9138 case GOMP_MAP_FORCE_DEVICEPTR
:
9139 case GOMP_MAP_DEVICE_RESIDENT
:
9141 gcc_assert (is_gimple_omp_oacc (stmt
));
9149 case OMP_CLAUSE_FROM
:
9151 var
= OMP_CLAUSE_DECL (c
);
9154 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
9155 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9156 && (OMP_CLAUSE_MAP_KIND (c
)
9157 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
9163 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
9165 tree var2
= DECL_VALUE_EXPR (var
);
9166 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
9167 var2
= TREE_OPERAND (var2
, 0);
9168 gcc_assert (DECL_P (var2
));
9173 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9174 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9175 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9177 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9179 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
9180 && varpool_node::get_create (var
)->offloadable
)
9183 tree type
= build_pointer_type (TREE_TYPE (var
));
9184 tree new_var
= lookup_decl (var
, ctx
);
9185 x
= create_tmp_var_raw (type
, get_name (new_var
));
9186 gimple_add_tmp_var (x
);
9187 x
= build_simple_mem_ref (x
);
9188 SET_DECL_VALUE_EXPR (new_var
, x
);
9189 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9194 if (!maybe_lookup_field (var
, ctx
))
9197 /* Don't remap oacc parallel reduction variables, because the
9198 intermediate result must be local to each gang. */
9199 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9200 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
9202 x
= build_receiver_ref (var
, true, ctx
);
9203 tree new_var
= lookup_decl (var
, ctx
);
9205 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9206 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9207 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9208 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9209 x
= build_simple_mem_ref (x
);
9210 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9212 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
9213 if (omp_is_reference (new_var
)
9214 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
9216 /* Create a local object to hold the instance
9218 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
9219 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
9220 tree inst
= create_tmp_var (type
, id
);
9221 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
9222 x
= build_fold_addr_expr (inst
);
9224 gimplify_assign (new_var
, x
, &fplist
);
9226 else if (DECL_P (new_var
))
9228 SET_DECL_VALUE_EXPR (new_var
, x
);
9229 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9237 case OMP_CLAUSE_FIRSTPRIVATE
:
9238 if (is_oacc_parallel (ctx
))
9239 goto oacc_firstprivate
;
9241 var
= OMP_CLAUSE_DECL (c
);
9242 if (!omp_is_reference (var
)
9243 && !is_gimple_reg_type (TREE_TYPE (var
)))
9245 tree new_var
= lookup_decl (var
, ctx
);
9246 if (is_variable_sized (var
))
9248 tree pvar
= DECL_VALUE_EXPR (var
);
9249 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9250 pvar
= TREE_OPERAND (pvar
, 0);
9251 gcc_assert (DECL_P (pvar
));
9252 tree new_pvar
= lookup_decl (pvar
, ctx
);
9253 x
= build_fold_indirect_ref (new_pvar
);
9254 TREE_THIS_NOTRAP (x
) = 1;
9257 x
= build_receiver_ref (var
, true, ctx
);
9258 SET_DECL_VALUE_EXPR (new_var
, x
);
9259 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9263 case OMP_CLAUSE_PRIVATE
:
9264 if (is_gimple_omp_oacc (ctx
->stmt
))
9266 var
= OMP_CLAUSE_DECL (c
);
9267 if (is_variable_sized (var
))
9269 tree new_var
= lookup_decl (var
, ctx
);
9270 tree pvar
= DECL_VALUE_EXPR (var
);
9271 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9272 pvar
= TREE_OPERAND (pvar
, 0);
9273 gcc_assert (DECL_P (pvar
));
9274 tree new_pvar
= lookup_decl (pvar
, ctx
);
9275 x
= build_fold_indirect_ref (new_pvar
);
9276 TREE_THIS_NOTRAP (x
) = 1;
9277 SET_DECL_VALUE_EXPR (new_var
, x
);
9278 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9282 case OMP_CLAUSE_USE_DEVICE_PTR
:
9283 case OMP_CLAUSE_IS_DEVICE_PTR
:
9284 var
= OMP_CLAUSE_DECL (c
);
9286 if (is_variable_sized (var
))
9288 tree new_var
= lookup_decl (var
, ctx
);
9289 tree pvar
= DECL_VALUE_EXPR (var
);
9290 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9291 pvar
= TREE_OPERAND (pvar
, 0);
9292 gcc_assert (DECL_P (pvar
));
9293 tree new_pvar
= lookup_decl (pvar
, ctx
);
9294 x
= build_fold_indirect_ref (new_pvar
);
9295 TREE_THIS_NOTRAP (x
) = 1;
9296 SET_DECL_VALUE_EXPR (new_var
, x
);
9297 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9299 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9301 tree new_var
= lookup_decl (var
, ctx
);
9302 tree type
= build_pointer_type (TREE_TYPE (var
));
9303 x
= create_tmp_var_raw (type
, get_name (new_var
));
9304 gimple_add_tmp_var (x
);
9305 x
= build_simple_mem_ref (x
);
9306 SET_DECL_VALUE_EXPR (new_var
, x
);
9307 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9311 tree new_var
= lookup_decl (var
, ctx
);
9312 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
9313 gimple_add_tmp_var (x
);
9314 SET_DECL_VALUE_EXPR (new_var
, x
);
9315 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9322 target_nesting_level
++;
9323 lower_omp (&tgt_body
, ctx
);
9324 target_nesting_level
--;
9326 else if (data_region
)
9327 lower_omp (&tgt_body
, ctx
);
9331 /* Declare all the variables created by mapping and the variables
9332 declared in the scope of the target body. */
9333 record_vars_into (ctx
->block_vars
, child_fn
);
9334 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
9335 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
9340 if (ctx
->record_type
)
9343 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
9344 DECL_NAMELESS (ctx
->sender_decl
) = 1;
9345 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
9346 t
= make_tree_vec (3);
9347 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
9349 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
9351 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
9352 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
9353 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
9354 tree tkind_type
= short_unsigned_type_node
;
9355 int talign_shift
= 8;
9357 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
9359 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
9360 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
9361 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
9362 gimple_omp_target_set_data_arg (stmt
, t
);
9364 vec
<constructor_elt
, va_gc
> *vsize
;
9365 vec
<constructor_elt
, va_gc
> *vkind
;
9366 vec_alloc (vsize
, map_cnt
);
9367 vec_alloc (vkind
, map_cnt
);
9368 unsigned int map_idx
= 0;
9370 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9371 switch (OMP_CLAUSE_CODE (c
))
9373 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
9374 unsigned int talign
;
9379 case OMP_CLAUSE_MAP
:
9381 case OMP_CLAUSE_FROM
:
9382 oacc_firstprivate_map
:
9384 ovar
= OMP_CLAUSE_DECL (c
);
9385 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9386 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9387 || (OMP_CLAUSE_MAP_KIND (c
)
9388 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
9392 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9393 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
9395 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
9396 == get_base_address (ovar
));
9397 nc
= OMP_CLAUSE_CHAIN (c
);
9398 ovar
= OMP_CLAUSE_DECL (nc
);
9402 tree x
= build_sender_ref (ovar
, ctx
);
9404 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
9405 gimplify_assign (x
, v
, &ilist
);
9411 if (DECL_SIZE (ovar
)
9412 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
9414 tree ovar2
= DECL_VALUE_EXPR (ovar
);
9415 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
9416 ovar2
= TREE_OPERAND (ovar2
, 0);
9417 gcc_assert (DECL_P (ovar2
));
9420 if (!maybe_lookup_field (ovar
, ctx
))
9424 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
9425 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
9426 talign
= DECL_ALIGN_UNIT (ovar
);
9429 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9430 x
= build_sender_ref (ovar
, ctx
);
9432 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9433 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9434 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9435 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
9437 gcc_assert (offloaded
);
9439 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
9440 mark_addressable (avar
);
9441 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
9442 talign
= DECL_ALIGN_UNIT (avar
);
9443 avar
= build_fold_addr_expr (avar
);
9444 gimplify_assign (x
, avar
, &ilist
);
9446 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9448 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
9449 if (!omp_is_reference (var
))
9451 if (is_gimple_reg (var
)
9452 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9453 TREE_NO_WARNING (var
) = 1;
9454 var
= build_fold_addr_expr (var
);
9457 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9458 gimplify_assign (x
, var
, &ilist
);
9460 else if (is_gimple_reg (var
))
9462 gcc_assert (offloaded
);
9463 tree avar
= create_tmp_var (TREE_TYPE (var
));
9464 mark_addressable (avar
);
9465 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
9466 if (GOMP_MAP_COPY_TO_P (map_kind
)
9467 || map_kind
== GOMP_MAP_POINTER
9468 || map_kind
== GOMP_MAP_TO_PSET
9469 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
9471 /* If we need to initialize a temporary
9472 with VAR because it is not addressable, and
9473 the variable hasn't been initialized yet, then
9474 we'll get a warning for the store to avar.
9475 Don't warn in that case, the mapping might
9477 TREE_NO_WARNING (var
) = 1;
9478 gimplify_assign (avar
, var
, &ilist
);
9480 avar
= build_fold_addr_expr (avar
);
9481 gimplify_assign (x
, avar
, &ilist
);
9482 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
9483 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
9484 && !TYPE_READONLY (TREE_TYPE (var
)))
9486 x
= unshare_expr (x
);
9487 x
= build_simple_mem_ref (x
);
9488 gimplify_assign (var
, x
, &olist
);
9493 var
= build_fold_addr_expr (var
);
9494 gimplify_assign (x
, var
, &ilist
);
9498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9500 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
9501 s
= TREE_TYPE (ovar
);
9502 if (TREE_CODE (s
) == REFERENCE_TYPE
)
9504 s
= TYPE_SIZE_UNIT (s
);
9507 s
= OMP_CLAUSE_SIZE (c
);
9509 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
9510 s
= fold_convert (size_type_node
, s
);
9511 purpose
= size_int (map_idx
++);
9512 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9513 if (TREE_CODE (s
) != INTEGER_CST
)
9514 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
9516 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
9517 switch (OMP_CLAUSE_CODE (c
))
9519 case OMP_CLAUSE_MAP
:
9520 tkind
= OMP_CLAUSE_MAP_KIND (c
);
9522 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
9525 case GOMP_MAP_ALLOC
:
9528 case GOMP_MAP_TOFROM
:
9529 case GOMP_MAP_ALWAYS_TO
:
9530 case GOMP_MAP_ALWAYS_FROM
:
9531 case GOMP_MAP_ALWAYS_TOFROM
:
9532 case GOMP_MAP_RELEASE
:
9533 case GOMP_MAP_FORCE_TO
:
9534 case GOMP_MAP_FORCE_FROM
:
9535 case GOMP_MAP_FORCE_TOFROM
:
9536 case GOMP_MAP_FORCE_PRESENT
:
9537 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
9539 case GOMP_MAP_DELETE
:
9540 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
9544 if (tkind_zero
!= tkind
)
9546 if (integer_zerop (s
))
9548 else if (integer_nonzerop (s
))
9552 case OMP_CLAUSE_FIRSTPRIVATE
:
9553 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
9554 tkind
= GOMP_MAP_TO
;
9558 tkind
= GOMP_MAP_TO
;
9561 case OMP_CLAUSE_FROM
:
9562 tkind
= GOMP_MAP_FROM
;
9568 gcc_checking_assert (tkind
9569 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9570 gcc_checking_assert (tkind_zero
9571 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9572 talign
= ceil_log2 (talign
);
9573 tkind
|= talign
<< talign_shift
;
9574 tkind_zero
|= talign
<< talign_shift
;
9575 gcc_checking_assert (tkind
9576 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9577 gcc_checking_assert (tkind_zero
9578 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9579 if (tkind
== tkind_zero
)
9580 x
= build_int_cstu (tkind_type
, tkind
);
9583 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
9584 x
= build3 (COND_EXPR
, tkind_type
,
9585 fold_build2 (EQ_EXPR
, boolean_type_node
,
9586 unshare_expr (s
), size_zero_node
),
9587 build_int_cstu (tkind_type
, tkind_zero
),
9588 build_int_cstu (tkind_type
, tkind
));
9590 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
9595 case OMP_CLAUSE_FIRSTPRIVATE
:
9596 if (is_oacc_parallel (ctx
))
9597 goto oacc_firstprivate_map
;
9598 ovar
= OMP_CLAUSE_DECL (c
);
9599 if (omp_is_reference (ovar
))
9600 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9602 talign
= DECL_ALIGN_UNIT (ovar
);
9603 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9604 x
= build_sender_ref (ovar
, ctx
);
9605 tkind
= GOMP_MAP_FIRSTPRIVATE
;
9606 type
= TREE_TYPE (ovar
);
9607 if (omp_is_reference (ovar
))
9608 type
= TREE_TYPE (type
);
9609 if ((INTEGRAL_TYPE_P (type
)
9610 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
9611 || TREE_CODE (type
) == POINTER_TYPE
)
9613 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
9615 if (omp_is_reference (var
))
9616 t
= build_simple_mem_ref (var
);
9617 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9618 TREE_NO_WARNING (var
) = 1;
9619 if (TREE_CODE (type
) != POINTER_TYPE
)
9620 t
= fold_convert (pointer_sized_int_node
, t
);
9621 t
= fold_convert (TREE_TYPE (x
), t
);
9622 gimplify_assign (x
, t
, &ilist
);
9624 else if (omp_is_reference (var
))
9625 gimplify_assign (x
, var
, &ilist
);
9626 else if (is_gimple_reg (var
))
9628 tree avar
= create_tmp_var (TREE_TYPE (var
));
9629 mark_addressable (avar
);
9630 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9631 TREE_NO_WARNING (var
) = 1;
9632 gimplify_assign (avar
, var
, &ilist
);
9633 avar
= build_fold_addr_expr (avar
);
9634 gimplify_assign (x
, avar
, &ilist
);
9638 var
= build_fold_addr_expr (var
);
9639 gimplify_assign (x
, var
, &ilist
);
9641 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
9643 else if (omp_is_reference (ovar
))
9644 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9646 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
9647 s
= fold_convert (size_type_node
, s
);
9648 purpose
= size_int (map_idx
++);
9649 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9650 if (TREE_CODE (s
) != INTEGER_CST
)
9651 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
9653 gcc_checking_assert (tkind
9654 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9655 talign
= ceil_log2 (talign
);
9656 tkind
|= talign
<< talign_shift
;
9657 gcc_checking_assert (tkind
9658 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9659 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
9660 build_int_cstu (tkind_type
, tkind
));
9663 case OMP_CLAUSE_USE_DEVICE_PTR
:
9664 case OMP_CLAUSE_IS_DEVICE_PTR
:
9665 ovar
= OMP_CLAUSE_DECL (c
);
9666 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9667 x
= build_sender_ref (ovar
, ctx
);
9668 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
9669 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
9671 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
9672 type
= TREE_TYPE (ovar
);
9673 if (TREE_CODE (type
) == ARRAY_TYPE
)
9674 var
= build_fold_addr_expr (var
);
9677 if (omp_is_reference (ovar
))
9679 type
= TREE_TYPE (type
);
9680 if (TREE_CODE (type
) != ARRAY_TYPE
)
9681 var
= build_simple_mem_ref (var
);
9682 var
= fold_convert (TREE_TYPE (x
), var
);
9685 gimplify_assign (x
, var
, &ilist
);
9687 purpose
= size_int (map_idx
++);
9688 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9689 gcc_checking_assert (tkind
9690 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9691 gcc_checking_assert (tkind
9692 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9693 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
9694 build_int_cstu (tkind_type
, tkind
));
9698 gcc_assert (map_idx
== map_cnt
);
9700 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
9701 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
9702 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
9703 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
9704 for (int i
= 1; i
<= 2; i
++)
9705 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
9707 gimple_seq initlist
= NULL
;
9708 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
9709 TREE_VEC_ELT (t
, i
)),
9710 &initlist
, true, NULL_TREE
);
9711 gimple_seq_add_seq (&ilist
, initlist
);
9713 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
9715 TREE_THIS_VOLATILE (clobber
) = 1;
9716 gimple_seq_add_stmt (&olist
,
9717 gimple_build_assign (TREE_VEC_ELT (t
, i
),
9721 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
9722 TREE_THIS_VOLATILE (clobber
) = 1;
9723 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
9727 /* Once all the expansions are done, sequence all the different
9728 fragments inside gimple_omp_body. */
9733 && ctx
->record_type
)
9735 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
9736 /* fixup_child_record_type might have changed receiver_decl's type. */
9737 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
9738 gimple_seq_add_stmt (&new_body
,
9739 gimple_build_assign (ctx
->receiver_decl
, t
));
9741 gimple_seq_add_seq (&new_body
, fplist
);
9743 if (offloaded
|| data_region
)
9745 tree prev
= NULL_TREE
;
9746 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9747 switch (OMP_CLAUSE_CODE (c
))
9752 case OMP_CLAUSE_FIRSTPRIVATE
:
9753 if (is_gimple_omp_oacc (ctx
->stmt
))
9755 var
= OMP_CLAUSE_DECL (c
);
9756 if (omp_is_reference (var
)
9757 || is_gimple_reg_type (TREE_TYPE (var
)))
9759 tree new_var
= lookup_decl (var
, ctx
);
9761 type
= TREE_TYPE (var
);
9762 if (omp_is_reference (var
))
9763 type
= TREE_TYPE (type
);
9764 if ((INTEGRAL_TYPE_P (type
)
9765 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
9766 || TREE_CODE (type
) == POINTER_TYPE
)
9768 x
= build_receiver_ref (var
, false, ctx
);
9769 if (TREE_CODE (type
) != POINTER_TYPE
)
9770 x
= fold_convert (pointer_sized_int_node
, x
);
9771 x
= fold_convert (type
, x
);
9772 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
9774 if (omp_is_reference (var
))
9776 tree v
= create_tmp_var_raw (type
, get_name (var
));
9777 gimple_add_tmp_var (v
);
9778 TREE_ADDRESSABLE (v
) = 1;
9779 gimple_seq_add_stmt (&new_body
,
9780 gimple_build_assign (v
, x
));
9781 x
= build_fold_addr_expr (v
);
9783 gimple_seq_add_stmt (&new_body
,
9784 gimple_build_assign (new_var
, x
));
9788 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
9789 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
9791 gimple_seq_add_stmt (&new_body
,
9792 gimple_build_assign (new_var
, x
));
9795 else if (is_variable_sized (var
))
9797 tree pvar
= DECL_VALUE_EXPR (var
);
9798 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9799 pvar
= TREE_OPERAND (pvar
, 0);
9800 gcc_assert (DECL_P (pvar
));
9801 tree new_var
= lookup_decl (pvar
, ctx
);
9802 x
= build_receiver_ref (var
, false, ctx
);
9803 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9804 gimple_seq_add_stmt (&new_body
,
9805 gimple_build_assign (new_var
, x
));
9808 case OMP_CLAUSE_PRIVATE
:
9809 if (is_gimple_omp_oacc (ctx
->stmt
))
9811 var
= OMP_CLAUSE_DECL (c
);
9812 if (omp_is_reference (var
))
9814 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9815 tree new_var
= lookup_decl (var
, ctx
);
9816 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
9817 if (TREE_CONSTANT (x
))
9819 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
9821 gimple_add_tmp_var (x
);
9822 TREE_ADDRESSABLE (x
) = 1;
9823 x
= build_fold_addr_expr_loc (clause_loc
, x
);
9828 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
9829 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9830 gimple_seq_add_stmt (&new_body
,
9831 gimple_build_assign (new_var
, x
));
9834 case OMP_CLAUSE_USE_DEVICE_PTR
:
9835 case OMP_CLAUSE_IS_DEVICE_PTR
:
9836 var
= OMP_CLAUSE_DECL (c
);
9837 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
9838 x
= build_sender_ref (var
, ctx
);
9840 x
= build_receiver_ref (var
, false, ctx
);
9841 if (is_variable_sized (var
))
9843 tree pvar
= DECL_VALUE_EXPR (var
);
9844 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9845 pvar
= TREE_OPERAND (pvar
, 0);
9846 gcc_assert (DECL_P (pvar
));
9847 tree new_var
= lookup_decl (pvar
, ctx
);
9848 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9849 gimple_seq_add_stmt (&new_body
,
9850 gimple_build_assign (new_var
, x
));
9852 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9854 tree new_var
= lookup_decl (var
, ctx
);
9855 new_var
= DECL_VALUE_EXPR (new_var
);
9856 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
9857 new_var
= TREE_OPERAND (new_var
, 0);
9858 gcc_assert (DECL_P (new_var
));
9859 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9860 gimple_seq_add_stmt (&new_body
,
9861 gimple_build_assign (new_var
, x
));
9865 tree type
= TREE_TYPE (var
);
9866 tree new_var
= lookup_decl (var
, ctx
);
9867 if (omp_is_reference (var
))
9869 type
= TREE_TYPE (type
);
9870 if (TREE_CODE (type
) != ARRAY_TYPE
)
9872 tree v
= create_tmp_var_raw (type
, get_name (var
));
9873 gimple_add_tmp_var (v
);
9874 TREE_ADDRESSABLE (v
) = 1;
9875 x
= fold_convert (type
, x
);
9876 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
9878 gimple_seq_add_stmt (&new_body
,
9879 gimple_build_assign (v
, x
));
9880 x
= build_fold_addr_expr (v
);
9883 new_var
= DECL_VALUE_EXPR (new_var
);
9884 x
= fold_convert (TREE_TYPE (new_var
), x
);
9885 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9886 gimple_seq_add_stmt (&new_body
,
9887 gimple_build_assign (new_var
, x
));
9891 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9892 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9893 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
9894 or references to VLAs. */
9895 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9896 switch (OMP_CLAUSE_CODE (c
))
9901 case OMP_CLAUSE_MAP
:
9902 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9903 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9905 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9906 poly_int64 offset
= 0;
9908 var
= OMP_CLAUSE_DECL (c
);
9910 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
9911 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
9913 && varpool_node::get_create (var
)->offloadable
)
9915 if (TREE_CODE (var
) == INDIRECT_REF
9916 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
9917 var
= TREE_OPERAND (var
, 0);
9918 if (TREE_CODE (var
) == COMPONENT_REF
)
9920 var
= get_addr_base_and_unit_offset (var
, &offset
);
9921 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
9923 else if (DECL_SIZE (var
)
9924 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
9926 tree var2
= DECL_VALUE_EXPR (var
);
9927 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
9928 var2
= TREE_OPERAND (var2
, 0);
9929 gcc_assert (DECL_P (var2
));
9932 tree new_var
= lookup_decl (var
, ctx
), x
;
9933 tree type
= TREE_TYPE (new_var
);
9935 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
9936 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9939 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
9941 new_var
= build2 (MEM_REF
, type
,
9942 build_fold_addr_expr (new_var
),
9943 build_int_cst (build_pointer_type (type
),
9946 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
9948 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
9949 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
9950 new_var
= build2 (MEM_REF
, type
,
9951 build_fold_addr_expr (new_var
),
9952 build_int_cst (build_pointer_type (type
),
9956 is_ref
= omp_is_reference (var
);
9957 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9959 bool ref_to_array
= false;
9962 type
= TREE_TYPE (type
);
9963 if (TREE_CODE (type
) == ARRAY_TYPE
)
9965 type
= build_pointer_type (type
);
9966 ref_to_array
= true;
9969 else if (TREE_CODE (type
) == ARRAY_TYPE
)
9971 tree decl2
= DECL_VALUE_EXPR (new_var
);
9972 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
9973 decl2
= TREE_OPERAND (decl2
, 0);
9974 gcc_assert (DECL_P (decl2
));
9976 type
= TREE_TYPE (new_var
);
9978 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
9979 x
= fold_convert_loc (clause_loc
, type
, x
);
9980 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
9982 tree bias
= OMP_CLAUSE_SIZE (c
);
9984 bias
= lookup_decl (bias
, ctx
);
9985 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
9986 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
9988 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
9989 TREE_TYPE (x
), x
, bias
);
9992 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
9993 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9994 if (is_ref
&& !ref_to_array
)
9996 tree t
= create_tmp_var_raw (type
, get_name (var
));
9997 gimple_add_tmp_var (t
);
9998 TREE_ADDRESSABLE (t
) = 1;
9999 gimple_seq_add_stmt (&new_body
,
10000 gimple_build_assign (t
, x
));
10001 x
= build_fold_addr_expr_loc (clause_loc
, t
);
10003 gimple_seq_add_stmt (&new_body
,
10004 gimple_build_assign (new_var
, x
));
10007 else if (OMP_CLAUSE_CHAIN (c
)
10008 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
10010 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10011 == GOMP_MAP_FIRSTPRIVATE_POINTER
10012 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10013 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
10016 case OMP_CLAUSE_PRIVATE
:
10017 var
= OMP_CLAUSE_DECL (c
);
10018 if (is_variable_sized (var
))
10020 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10021 tree new_var
= lookup_decl (var
, ctx
);
10022 tree pvar
= DECL_VALUE_EXPR (var
);
10023 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10024 pvar
= TREE_OPERAND (pvar
, 0);
10025 gcc_assert (DECL_P (pvar
));
10026 tree new_pvar
= lookup_decl (pvar
, ctx
);
10027 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10028 tree al
= size_int (DECL_ALIGN (var
));
10029 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
10030 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
10031 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
10032 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10033 gimple_seq_add_stmt (&new_body
,
10034 gimple_build_assign (new_pvar
, x
));
10036 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
10038 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10039 tree new_var
= lookup_decl (var
, ctx
);
10040 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
10041 if (TREE_CONSTANT (x
))
10046 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10047 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
10048 tree al
= size_int (TYPE_ALIGN (rtype
));
10049 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
10052 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
10053 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10054 gimple_seq_add_stmt (&new_body
,
10055 gimple_build_assign (new_var
, x
));
10060 gimple_seq fork_seq
= NULL
;
10061 gimple_seq join_seq
= NULL
;
10063 if (is_oacc_parallel (ctx
))
10065 /* If there are reductions on the offloaded region itself, treat
10066 them as a dummy GANG loop. */
10067 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
10069 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
10070 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
10073 gimple_seq_add_seq (&new_body
, fork_seq
);
10074 gimple_seq_add_seq (&new_body
, tgt_body
);
10075 gimple_seq_add_seq (&new_body
, join_seq
);
10078 new_body
= maybe_catch_exception (new_body
);
10080 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
10081 gimple_omp_set_body (stmt
, new_body
);
10084 bind
= gimple_build_bind (NULL
, NULL
,
10085 tgt_bind
? gimple_bind_block (tgt_bind
)
10087 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
10088 gimple_bind_add_seq (bind
, ilist
);
10089 gimple_bind_add_stmt (bind
, stmt
);
10090 gimple_bind_add_seq (bind
, olist
);
10092 pop_gimplify_context (NULL
);
10096 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10097 gimple_bind_add_stmt (dep_bind
, bind
);
10098 gimple_bind_add_seq (dep_bind
, dep_olist
);
10099 pop_gimplify_context (dep_bind
);
10103 /* Expand code for an OpenMP teams directive. */
10106 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10108 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
10109 push_gimplify_context ();
10111 tree block
= make_node (BLOCK
);
10112 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
10113 gsi_replace (gsi_p
, bind
, true);
10114 gimple_seq bind_body
= NULL
;
10115 gimple_seq dlist
= NULL
;
10116 gimple_seq olist
= NULL
;
10118 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
10119 OMP_CLAUSE_NUM_TEAMS
);
10120 if (num_teams
== NULL_TREE
)
10121 num_teams
= build_int_cst (unsigned_type_node
, 0);
10124 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
10125 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
10126 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
10128 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
10129 OMP_CLAUSE_THREAD_LIMIT
);
10130 if (thread_limit
== NULL_TREE
)
10131 thread_limit
= build_int_cst (unsigned_type_node
, 0);
10134 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
10135 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
10136 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
10140 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
10141 &bind_body
, &dlist
, ctx
, NULL
);
10142 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
10143 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
10144 if (!gimple_omp_teams_grid_phony (teams_stmt
))
10146 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
10147 location_t loc
= gimple_location (teams_stmt
);
10148 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
10149 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
10150 gimple_set_location (call
, loc
);
10151 gimple_seq_add_stmt (&bind_body
, call
);
10154 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
10155 gimple_omp_set_body (teams_stmt
, NULL
);
10156 gimple_seq_add_seq (&bind_body
, olist
);
10157 gimple_seq_add_seq (&bind_body
, dlist
);
10158 if (!gimple_omp_teams_grid_phony (teams_stmt
))
10159 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
10160 gimple_bind_set_body (bind
, bind_body
);
10162 pop_gimplify_context (bind
);
10164 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10165 BLOCK_VARS (block
) = ctx
->block_vars
;
10166 if (BLOCK_VARS (block
))
10167 TREE_USED (block
) = 1;
10170 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10173 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10175 gimple
*stmt
= gsi_stmt (*gsi_p
);
10176 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10177 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
10178 gimple_build_omp_return (false));
10182 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10183 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10184 of OMP context, but with task_shared_vars set. */
10187 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
10192 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10193 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
10196 if (task_shared_vars
10198 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
10201 /* If a global variable has been privatized, TREE_CONSTANT on
10202 ADDR_EXPR might be wrong. */
10203 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
10204 recompute_tree_invariant_for_addr_expr (t
);
10206 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
10210 /* Data to be communicated between lower_omp_regimplify_operands and
10211 lower_omp_regimplify_operands_p. */
10213 struct lower_omp_regimplify_operands_data
10219 /* Helper function for lower_omp_regimplify_operands. Find
10220 omp_member_access_dummy_var vars and adjust temporarily their
10221 DECL_VALUE_EXPRs if needed. */
10224 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
10227 tree t
= omp_member_access_dummy_var (*tp
);
10230 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10231 lower_omp_regimplify_operands_data
*ldata
10232 = (lower_omp_regimplify_operands_data
*) wi
->info
;
10233 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
10236 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
10237 ldata
->decls
->safe_push (*tp
);
10238 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
10239 SET_DECL_VALUE_EXPR (*tp
, v
);
10242 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
10246 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10247 of omp_member_access_dummy_var vars during regimplification. */
10250 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
10251 gimple_stmt_iterator
*gsi_p
)
10253 auto_vec
<tree
, 10> decls
;
10256 struct walk_stmt_info wi
;
10257 memset (&wi
, '\0', sizeof (wi
));
10258 struct lower_omp_regimplify_operands_data data
;
10260 data
.decls
= &decls
;
10262 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
10264 gimple_regimplify_operands (stmt
, gsi_p
);
10265 while (!decls
.is_empty ())
10267 tree t
= decls
.pop ();
10268 tree v
= decls
.pop ();
10269 SET_DECL_VALUE_EXPR (t
, v
);
10274 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10276 gimple
*stmt
= gsi_stmt (*gsi_p
);
10277 struct walk_stmt_info wi
;
10280 if (gimple_has_location (stmt
))
10281 input_location
= gimple_location (stmt
);
10283 if (task_shared_vars
)
10284 memset (&wi
, '\0', sizeof (wi
));
10286 /* If we have issued syntax errors, avoid doing any heavy lifting.
10287 Just replace the OMP directives with a NOP to avoid
10288 confusing RTL expansion. */
10289 if (seen_error () && is_gimple_omp (stmt
))
10291 gsi_replace (gsi_p
, gimple_build_nop (), true);
10295 switch (gimple_code (stmt
))
10299 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
10300 if ((ctx
|| task_shared_vars
)
10301 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
10302 lower_omp_regimplify_p
,
10303 ctx
? NULL
: &wi
, NULL
)
10304 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
10305 lower_omp_regimplify_p
,
10306 ctx
? NULL
: &wi
, NULL
)))
10307 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
10311 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
10313 case GIMPLE_EH_FILTER
:
10314 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
10317 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
10318 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
10320 case GIMPLE_TRANSACTION
:
10321 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
10325 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
10326 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
10328 case GIMPLE_OMP_PARALLEL
:
10329 case GIMPLE_OMP_TASK
:
10330 ctx
= maybe_lookup_ctx (stmt
);
10332 if (ctx
->cancellable
)
10333 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10334 lower_omp_taskreg (gsi_p
, ctx
);
10336 case GIMPLE_OMP_FOR
:
10337 ctx
= maybe_lookup_ctx (stmt
);
10339 if (ctx
->cancellable
)
10340 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10341 lower_omp_for (gsi_p
, ctx
);
10343 case GIMPLE_OMP_SECTIONS
:
10344 ctx
= maybe_lookup_ctx (stmt
);
10346 if (ctx
->cancellable
)
10347 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10348 lower_omp_sections (gsi_p
, ctx
);
10350 case GIMPLE_OMP_SINGLE
:
10351 ctx
= maybe_lookup_ctx (stmt
);
10353 lower_omp_single (gsi_p
, ctx
);
10355 case GIMPLE_OMP_MASTER
:
10356 ctx
= maybe_lookup_ctx (stmt
);
10358 lower_omp_master (gsi_p
, ctx
);
10360 case GIMPLE_OMP_TASKGROUP
:
10361 ctx
= maybe_lookup_ctx (stmt
);
10363 lower_omp_taskgroup (gsi_p
, ctx
);
10365 case GIMPLE_OMP_ORDERED
:
10366 ctx
= maybe_lookup_ctx (stmt
);
10368 lower_omp_ordered (gsi_p
, ctx
);
10370 case GIMPLE_OMP_CRITICAL
:
10371 ctx
= maybe_lookup_ctx (stmt
);
10373 lower_omp_critical (gsi_p
, ctx
);
10375 case GIMPLE_OMP_ATOMIC_LOAD
:
10376 if ((ctx
|| task_shared_vars
)
10377 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10378 as_a
<gomp_atomic_load
*> (stmt
)),
10379 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
10380 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
10382 case GIMPLE_OMP_TARGET
:
10383 ctx
= maybe_lookup_ctx (stmt
);
10385 lower_omp_target (gsi_p
, ctx
);
10387 case GIMPLE_OMP_TEAMS
:
10388 ctx
= maybe_lookup_ctx (stmt
);
10390 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
10391 lower_omp_taskreg (gsi_p
, ctx
);
10393 lower_omp_teams (gsi_p
, ctx
);
10395 case GIMPLE_OMP_GRID_BODY
:
10396 ctx
= maybe_lookup_ctx (stmt
);
10398 lower_omp_grid_body (gsi_p
, ctx
);
10402 call_stmt
= as_a
<gcall
*> (stmt
);
10403 fndecl
= gimple_call_fndecl (call_stmt
);
10405 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
10406 switch (DECL_FUNCTION_CODE (fndecl
))
10408 case BUILT_IN_GOMP_BARRIER
:
10412 case BUILT_IN_GOMP_CANCEL
:
10413 case BUILT_IN_GOMP_CANCELLATION_POINT
:
10416 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
10417 cctx
= cctx
->outer
;
10418 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
10419 if (!cctx
->cancellable
)
10421 if (DECL_FUNCTION_CODE (fndecl
)
10422 == BUILT_IN_GOMP_CANCELLATION_POINT
)
10424 stmt
= gimple_build_nop ();
10425 gsi_replace (gsi_p
, stmt
, false);
10429 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
10431 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
10432 gimple_call_set_fndecl (call_stmt
, fndecl
);
10433 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
10436 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
10437 gimple_call_set_lhs (call_stmt
, lhs
);
10438 tree fallthru_label
;
10439 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
10441 g
= gimple_build_label (fallthru_label
);
10442 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10443 g
= gimple_build_cond (NE_EXPR
, lhs
,
10444 fold_convert (TREE_TYPE (lhs
),
10445 boolean_false_node
),
10446 cctx
->cancel_label
, fallthru_label
);
10447 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10454 if ((ctx
|| task_shared_vars
)
10455 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
10458 /* Just remove clobbers, this should happen only if we have
10459 "privatized" local addressable variables in SIMD regions,
10460 the clobber isn't needed in that case and gimplifying address
10461 of the ARRAY_REF into a pointer and creating MEM_REF based
10462 clobber would create worse code than we get with the clobber
10464 if (gimple_clobber_p (stmt
))
10466 gsi_replace (gsi_p
, gimple_build_nop (), true);
10469 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
10476 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
10478 location_t saved_location
= input_location
;
10479 gimple_stmt_iterator gsi
;
10480 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
10481 lower_omp_1 (&gsi
, ctx
);
10482 /* During gimplification, we haven't folded statments inside offloading
10483 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10484 if (target_nesting_level
|| taskreg_nesting_level
)
10485 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
10487 input_location
= saved_location
;
10490 /* Main entry point. */
10492 static unsigned int
10493 execute_lower_omp (void)
10499 /* This pass always runs, to provide PROP_gimple_lomp.
10500 But often, there is nothing to do. */
10501 if (flag_openacc
== 0 && flag_openmp
== 0
10502 && flag_openmp_simd
== 0)
10505 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
10506 delete_omp_context
);
10508 body
= gimple_body (current_function_decl
);
10510 if (hsa_gen_requested_p ())
10511 omp_grid_gridify_all_targets (&body
);
10513 scan_omp (&body
, NULL
);
10514 gcc_assert (taskreg_nesting_level
== 0);
10515 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
10516 finish_taskreg_scan (ctx
);
10517 taskreg_contexts
.release ();
10519 if (all_contexts
->root
)
10521 if (task_shared_vars
)
10522 push_gimplify_context ();
10523 lower_omp (&body
, NULL
);
10524 if (task_shared_vars
)
10525 pop_gimplify_context (NULL
);
10530 splay_tree_delete (all_contexts
);
10531 all_contexts
= NULL
;
10533 BITMAP_FREE (task_shared_vars
);
10535 /* If current function is a method, remove artificial dummy VAR_DECL created
10536 for non-static data member privatization, they aren't needed for
10537 debuginfo nor anything else, have been already replaced everywhere in the
10538 IL and cause problems with LTO. */
10539 if (DECL_ARGUMENTS (current_function_decl
)
10540 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
10541 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
10543 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
10549 const pass_data pass_data_lower_omp
=
10551 GIMPLE_PASS
, /* type */
10552 "omplower", /* name */
10553 OPTGROUP_OMP
, /* optinfo_flags */
10554 TV_NONE
, /* tv_id */
10555 PROP_gimple_any
, /* properties_required */
10556 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
10557 0, /* properties_destroyed */
10558 0, /* todo_flags_start */
10559 0, /* todo_flags_finish */
10562 class pass_lower_omp
: public gimple_opt_pass
10565 pass_lower_omp (gcc::context
*ctxt
)
10566 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
10569 /* opt_pass methods: */
10570 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
10572 }; // class pass_lower_omp
10574 } // anon namespace
10577 make_pass_lower_omp (gcc::context
*ctxt
)
10579 return new pass_lower_omp (ctxt
);
10582 /* The following is a utility to diagnose structured block violations.
10583 It is not part of the "omplower" pass, as that's invoked too late. It
10584 should be invoked by the respective front ends after gimplification. */
10586 static splay_tree all_labels
;
10588 /* Check for mismatched contexts and generate an error if needed. Return
10589 true if an error is detected. */
10592 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
10593 gimple
*branch_ctx
, gimple
*label_ctx
)
10595 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
10596 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
10598 if (label_ctx
== branch_ctx
)
10601 const char* kind
= NULL
;
10605 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
10606 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
10608 gcc_checking_assert (kind
== NULL
);
10614 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
10618 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10619 so we could traverse it and issue a correct "exit" or "enter" error
10620 message upon a structured block violation.
10622 We built the context by building a list with tree_cons'ing, but there is
10623 no easy counterpart in gimple tuples. It seems like far too much work
10624 for issuing exit/enter error messages. If someone really misses the
10625 distinct error message... patches welcome. */
10628 /* Try to avoid confusing the user by producing and error message
10629 with correct "exit" or "enter" verbiage. We prefer "exit"
10630 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10631 if (branch_ctx
== NULL
)
10637 if (TREE_VALUE (label_ctx
) == branch_ctx
)
10642 label_ctx
= TREE_CHAIN (label_ctx
);
10647 error ("invalid exit from %s structured block", kind
);
10649 error ("invalid entry to %s structured block", kind
);
10652 /* If it's obvious we have an invalid entry, be specific about the error. */
10653 if (branch_ctx
== NULL
)
10654 error ("invalid entry to %s structured block", kind
);
10657 /* Otherwise, be vague and lazy, but efficient. */
10658 error ("invalid branch to/from %s structured block", kind
);
10661 gsi_replace (gsi_p
, gimple_build_nop (), false);
10665 /* Pass 1: Create a minimal tree of structured blocks, and record
10666 where each label is found. */
10669 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10670 struct walk_stmt_info
*wi
)
10672 gimple
*context
= (gimple
*) wi
->info
;
10673 gimple
*inner_context
;
10674 gimple
*stmt
= gsi_stmt (*gsi_p
);
10676 *handled_ops_p
= true;
10678 switch (gimple_code (stmt
))
10682 case GIMPLE_OMP_PARALLEL
:
10683 case GIMPLE_OMP_TASK
:
10684 case GIMPLE_OMP_SECTIONS
:
10685 case GIMPLE_OMP_SINGLE
:
10686 case GIMPLE_OMP_SECTION
:
10687 case GIMPLE_OMP_MASTER
:
10688 case GIMPLE_OMP_ORDERED
:
10689 case GIMPLE_OMP_CRITICAL
:
10690 case GIMPLE_OMP_TARGET
:
10691 case GIMPLE_OMP_TEAMS
:
10692 case GIMPLE_OMP_TASKGROUP
:
10693 /* The minimal context here is just the current OMP construct. */
10694 inner_context
= stmt
;
10695 wi
->info
= inner_context
;
10696 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
10697 wi
->info
= context
;
10700 case GIMPLE_OMP_FOR
:
10701 inner_context
= stmt
;
10702 wi
->info
= inner_context
;
10703 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10705 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
10706 diagnose_sb_1
, NULL
, wi
);
10707 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
10708 wi
->info
= context
;
10712 splay_tree_insert (all_labels
,
10713 (splay_tree_key
) gimple_label_label (
10714 as_a
<glabel
*> (stmt
)),
10715 (splay_tree_value
) context
);
10725 /* Pass 2: Check each branch and see if its context differs from that of
10726 the destination label's context. */
10729 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10730 struct walk_stmt_info
*wi
)
10732 gimple
*context
= (gimple
*) wi
->info
;
10734 gimple
*stmt
= gsi_stmt (*gsi_p
);
10736 *handled_ops_p
= true;
10738 switch (gimple_code (stmt
))
10742 case GIMPLE_OMP_PARALLEL
:
10743 case GIMPLE_OMP_TASK
:
10744 case GIMPLE_OMP_SECTIONS
:
10745 case GIMPLE_OMP_SINGLE
:
10746 case GIMPLE_OMP_SECTION
:
10747 case GIMPLE_OMP_MASTER
:
10748 case GIMPLE_OMP_ORDERED
:
10749 case GIMPLE_OMP_CRITICAL
:
10750 case GIMPLE_OMP_TARGET
:
10751 case GIMPLE_OMP_TEAMS
:
10752 case GIMPLE_OMP_TASKGROUP
:
10754 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
10755 wi
->info
= context
;
10758 case GIMPLE_OMP_FOR
:
10760 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10762 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
10763 diagnose_sb_2
, NULL
, wi
);
10764 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
10765 wi
->info
= context
;
10770 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
10771 tree lab
= gimple_cond_true_label (cond_stmt
);
10774 n
= splay_tree_lookup (all_labels
,
10775 (splay_tree_key
) lab
);
10776 diagnose_sb_0 (gsi_p
, context
,
10777 n
? (gimple
*) n
->value
: NULL
);
10779 lab
= gimple_cond_false_label (cond_stmt
);
10782 n
= splay_tree_lookup (all_labels
,
10783 (splay_tree_key
) lab
);
10784 diagnose_sb_0 (gsi_p
, context
,
10785 n
? (gimple
*) n
->value
: NULL
);
10792 tree lab
= gimple_goto_dest (stmt
);
10793 if (TREE_CODE (lab
) != LABEL_DECL
)
10796 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
10797 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
10801 case GIMPLE_SWITCH
:
10803 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
10805 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
10807 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
10808 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
10809 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
10815 case GIMPLE_RETURN
:
10816 diagnose_sb_0 (gsi_p
, context
, NULL
);
10826 static unsigned int
10827 diagnose_omp_structured_block_errors (void)
10829 struct walk_stmt_info wi
;
10830 gimple_seq body
= gimple_body (current_function_decl
);
10832 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
10834 memset (&wi
, 0, sizeof (wi
));
10835 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
10837 memset (&wi
, 0, sizeof (wi
));
10838 wi
.want_locations
= true;
10839 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
10841 gimple_set_body (current_function_decl
, body
);
10843 splay_tree_delete (all_labels
);
10851 const pass_data pass_data_diagnose_omp_blocks
=
10853 GIMPLE_PASS
, /* type */
10854 "*diagnose_omp_blocks", /* name */
10855 OPTGROUP_OMP
, /* optinfo_flags */
10856 TV_NONE
, /* tv_id */
10857 PROP_gimple_any
, /* properties_required */
10858 0, /* properties_provided */
10859 0, /* properties_destroyed */
10860 0, /* todo_flags_start */
10861 0, /* todo_flags_finish */
10864 class pass_diagnose_omp_blocks
: public gimple_opt_pass
10867 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
10868 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
10871 /* opt_pass methods: */
10872 virtual bool gate (function
*)
10874 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
10876 virtual unsigned int execute (function
*)
10878 return diagnose_omp_structured_block_errors ();
10881 }; // class pass_diagnose_omp_blocks
10883 } // anon namespace
10886 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
10888 return new pass_diagnose_omp_blocks (ctxt
);
10892 #include "gt-omp-low.h"