1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "constructor.h"
39 #include "gomp-constants.h"
40 #include "omp-general.h"
42 #include "memmodel.h" /* For MEMMODEL_ enums. */
45 #define GCC_DIAG_STYLE __gcc_tdiag__
46 #include "diagnostic-core.h"
48 #define GCC_DIAG_STYLE __gcc_gfc__
54 /* True if OpenMP should regard this DECL as being a scalar which has Fortran's
55 allocatable or pointer attribute. */
58 gfc_omp_is_allocatable_or_ptr (const_tree decl
)
61 && (GFC_DECL_GET_SCALAR_POINTER (decl
)
62 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)));
65 /* True if the argument is an optional argument; except that false is also
66 returned for arguments with the value attribute (nonpointers) and for
67 assumed-shape variables (decl is a local variable containing arg->data).
68 Note that for 'procedure(), optional' the value false is used as that's
69 always a pointer and no additional indirection is used.
70 Note that pvoid_type_node is for 'type(c_ptr), value' (and c_funloc). */
73 gfc_omp_is_optional_argument (const_tree decl
)
75 /* Note: VAR_DECL can occur with BIND(C) and array descriptors. */
76 return ((TREE_CODE (decl
) == PARM_DECL
|| VAR_P (decl
))
77 && DECL_LANG_SPECIFIC (decl
)
78 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
79 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
80 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) != FUNCTION_TYPE
81 && GFC_DECL_OPTIONAL_ARGUMENT (decl
));
84 /* Check whether this DECL belongs to a Fortran optional argument.
85 With 'for_present_check' set to false, decls which are optional parameters
86 themselves are returned as tree - or a NULL_TREE otherwise. Those decls are
87 always pointers. With 'for_present_check' set to true, the decl for checking
88 whether an argument is present is returned; for arguments with value
89 attribute this is the hidden argument and of BOOLEAN_TYPE. If the decl is
90 unrelated to optional arguments, NULL_TREE is returned. */
93 gfc_omp_check_optional_argument (tree decl
, bool for_present_check
)
95 if (!for_present_check
)
96 return gfc_omp_is_optional_argument (decl
) ? decl
: NULL_TREE
;
98 if (!DECL_LANG_SPECIFIC (decl
))
101 tree orig_decl
= decl
;
103 /* For assumed-shape arrays, a local decl with arg->data is used. */
104 if (TREE_CODE (decl
) != PARM_DECL
105 && (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
106 || GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))))
107 decl
= GFC_DECL_SAVED_DESCRIPTOR (decl
);
109 /* Note: With BIND(C), array descriptors are converted to a VAR_DECL. */
110 if (decl
== NULL_TREE
111 || (TREE_CODE (decl
) != PARM_DECL
&& TREE_CODE (decl
) != VAR_DECL
)
112 || !DECL_LANG_SPECIFIC (decl
)
113 || !GFC_DECL_OPTIONAL_ARGUMENT (decl
))
116 /* Scalars with VALUE attribute which are passed by value use a hidden
117 argument to denote the present status. They are passed as nonpointer type
118 with one exception: 'type(c_ptr), value' as 'void*'. */
119 /* Cf. trans-expr.cc's gfc_conv_expr_present. */
120 if (TREE_CODE (TREE_TYPE (decl
)) != POINTER_TYPE
121 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
123 char name
[GFC_MAX_SYMBOL_LEN
+ 2];
127 strcpy (&name
[1], IDENTIFIER_POINTER (DECL_NAME (decl
)));
128 tree_name
= get_identifier (name
);
130 /* Walk function argument list to find the hidden arg. */
131 decl
= DECL_ARGUMENTS (DECL_CONTEXT (decl
));
132 for ( ; decl
!= NULL_TREE
; decl
= TREE_CHAIN (decl
))
133 if (DECL_NAME (decl
) == tree_name
134 && DECL_ARTIFICIAL (decl
))
141 return fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
142 orig_decl
, null_pointer_node
);
146 /* Returns tree with NULL if it is not an array descriptor and with the tree to
147 access the 'data' component otherwise. With type_only = true, it returns the
148 TREE_TYPE without creating a new tree. */
151 gfc_omp_array_data (tree decl
, bool type_only
)
153 tree type
= TREE_TYPE (decl
);
155 if (POINTER_TYPE_P (type
))
156 type
= TREE_TYPE (type
);
158 if (!GFC_DESCRIPTOR_TYPE_P (type
))
162 return GFC_TYPE_ARRAY_DATAPTR_TYPE (type
);
164 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
165 decl
= build_fold_indirect_ref (decl
);
167 decl
= gfc_conv_descriptor_data_get (decl
);
172 /* Return the byte-size of the passed array descriptor. */
175 gfc_omp_array_size (tree decl
, gimple_seq
*pre_p
)
178 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
179 decl
= build_fold_indirect_ref (decl
);
180 tree type
= TREE_TYPE (decl
);
181 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
182 bool allocatable
= (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
183 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
184 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
);
185 gfc_init_block (&block
);
186 tree size
= gfc_full_array_size (&block
, decl
,
187 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)));
188 size
= fold_convert (size_type_node
, size
);
189 tree elemsz
= gfc_get_element_type (TREE_TYPE (decl
));
190 if (TREE_CODE (elemsz
) == ARRAY_TYPE
&& TYPE_STRING_FLAG (elemsz
))
191 elemsz
= gfc_conv_descriptor_elem_len (decl
);
193 elemsz
= TYPE_SIZE_UNIT (elemsz
);
194 size
= fold_build2 (MULT_EXPR
, size_type_node
, size
, elemsz
);
196 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
199 tree var
= create_tmp_var (size_type_node
);
200 gfc_add_expr_to_block (&block
, build2 (MODIFY_EXPR
, sizetype
, var
, size
));
201 tree tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
202 gfc_conv_descriptor_data_get (decl
),
204 tmp
= build3_loc (input_location
, COND_EXPR
, void_type_node
, tmp
,
205 gfc_finish_block (&block
),
206 build2 (MODIFY_EXPR
, sizetype
, var
, size_zero_node
));
207 gimplify_and_add (tmp
, pre_p
);
214 /* True if OpenMP should privatize what this DECL points to rather
215 than the DECL itself. */
218 gfc_omp_privatize_by_reference (const_tree decl
)
220 tree type
= TREE_TYPE (decl
);
222 if (TREE_CODE (type
) == REFERENCE_TYPE
223 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
226 if (TREE_CODE (type
) == POINTER_TYPE
227 && gfc_omp_is_optional_argument (decl
))
230 if (TREE_CODE (type
) == POINTER_TYPE
)
232 while (TREE_CODE (decl
) == COMPONENT_REF
)
233 decl
= TREE_OPERAND (decl
, 1);
235 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
236 that have POINTER_TYPE type and aren't scalar pointers, scalar
237 allocatables, Cray pointees or C pointers are supposed to be
238 privatized by reference. */
239 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
240 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
241 || GFC_DECL_CRAY_POINTEE (decl
)
242 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
243 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
246 if (!DECL_ARTIFICIAL (decl
)
247 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
250 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
252 if (DECL_LANG_SPECIFIC (decl
)
253 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
260 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
261 of DECL is predetermined. */
263 enum omp_clause_default_kind
264 gfc_omp_predetermined_sharing (tree decl
)
266 /* Associate names preserve the association established during ASSOCIATE.
267 As they are implemented either as pointers to the selector or array
268 descriptor and shouldn't really change in the ASSOCIATE region,
269 this decl can be either shared or firstprivate. If it is a pointer,
270 use firstprivate, as it is cheaper that way, otherwise make it shared. */
271 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
273 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
274 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
276 return OMP_CLAUSE_DEFAULT_SHARED
;
279 if (DECL_ARTIFICIAL (decl
)
280 && ! GFC_DECL_RESULT (decl
)
281 && ! (DECL_LANG_SPECIFIC (decl
)
282 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
283 return OMP_CLAUSE_DEFAULT_SHARED
;
285 /* Cray pointees shouldn't be listed in any clauses and should be
286 gimplified to dereference of the corresponding Cray pointer.
287 Make them all private, so that they are emitted in the debug
289 if (GFC_DECL_CRAY_POINTEE (decl
))
290 return OMP_CLAUSE_DEFAULT_PRIVATE
;
292 /* Assumed-size arrays are predetermined shared. */
293 if (TREE_CODE (decl
) == PARM_DECL
294 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
295 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
296 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
297 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
299 return OMP_CLAUSE_DEFAULT_SHARED
;
301 /* Dummy procedures aren't considered variables by OpenMP, thus are
302 disallowed in OpenMP clauses. They are represented as PARM_DECLs
303 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
304 to avoid complaining about their uses with default(none). */
305 if (TREE_CODE (decl
) == PARM_DECL
306 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
307 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
308 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
310 /* COMMON and EQUIVALENCE decls are shared. They
311 are only referenced through DECL_VALUE_EXPR of the variables
312 contained in them. If those are privatized, they will not be
313 gimplified to the COMMON or EQUIVALENCE decls. */
314 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
315 return OMP_CLAUSE_DEFAULT_SHARED
;
317 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
318 return OMP_CLAUSE_DEFAULT_SHARED
;
320 /* These are either array or derived parameters, or vtables.
321 In the former cases, the OpenMP standard doesn't consider them to be
322 variables at all (they can't be redefined), but they can nevertheless appear
323 in parallel/task regions and for default(none) purposes treat them as shared.
324 For vtables likely the same handling is desirable. */
325 if (VAR_P (decl
) && TREE_READONLY (decl
)
326 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
327 return OMP_CLAUSE_DEFAULT_SHARED
;
329 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
333 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
334 of DECL is predetermined. */
336 enum omp_clause_defaultmap_kind
337 gfc_omp_predetermined_mapping (tree decl
)
339 if (DECL_ARTIFICIAL (decl
)
340 && ! GFC_DECL_RESULT (decl
)
341 && ! (DECL_LANG_SPECIFIC (decl
)
342 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
343 return OMP_CLAUSE_DEFAULTMAP_TO
;
345 /* These are either array or derived parameters, or vtables. */
346 if (VAR_P (decl
) && TREE_READONLY (decl
)
347 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
348 return OMP_CLAUSE_DEFAULTMAP_TO
;
350 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
354 /* Return decl that should be used when reporting DEFAULT(NONE)
358 gfc_omp_report_decl (tree decl
)
360 if (DECL_ARTIFICIAL (decl
)
361 && DECL_LANG_SPECIFIC (decl
)
362 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
363 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
368 /* Return true if TYPE has any allocatable components. */
371 gfc_has_alloc_comps (tree type
, tree decl
)
375 if (POINTER_TYPE_P (type
))
377 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
378 type
= TREE_TYPE (type
);
379 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
383 if (GFC_DESCRIPTOR_TYPE_P (type
)
384 && (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
385 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
))
388 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
389 type
= gfc_get_element_type (type
);
391 if (TREE_CODE (type
) != RECORD_TYPE
)
394 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
396 ftype
= TREE_TYPE (field
);
397 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
399 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
400 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
402 if (gfc_has_alloc_comps (ftype
, field
))
408 /* Return true if TYPE is polymorphic but not with pointer attribute. */
411 gfc_is_polymorphic_nonptr (tree type
)
413 if (POINTER_TYPE_P (type
))
414 type
= TREE_TYPE (type
);
415 return GFC_CLASS_TYPE_P (type
);
418 /* Return true if TYPE is unlimited polymorphic but not with pointer attribute;
419 unlimited means also intrinsic types are handled and _len is used. */
422 gfc_is_unlimited_polymorphic_nonptr (tree type
)
424 if (POINTER_TYPE_P (type
))
425 type
= TREE_TYPE (type
);
426 if (!GFC_CLASS_TYPE_P (type
))
429 tree field
= TYPE_FIELDS (type
); /* _data */
431 field
= DECL_CHAIN (field
); /* _vptr */
433 field
= DECL_CHAIN (field
);
436 gcc_assert (strcmp ("_len", IDENTIFIER_POINTER (DECL_NAME (field
))) == 0);
440 /* Return true if the DECL is for an allocatable array or scalar. */
443 gfc_omp_allocatable_p (tree decl
)
448 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
451 tree type
= TREE_TYPE (decl
);
452 if (gfc_omp_privatize_by_reference (decl
))
453 type
= TREE_TYPE (type
);
455 if (GFC_DESCRIPTOR_TYPE_P (type
)
456 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
463 /* Return true if DECL in private clause needs
464 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
466 gfc_omp_private_outer_ref (tree decl
)
468 tree type
= TREE_TYPE (decl
);
470 if (gfc_omp_privatize_by_reference (decl
))
471 type
= TREE_TYPE (type
);
473 if (GFC_DESCRIPTOR_TYPE_P (type
)
474 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
477 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
480 if (gfc_has_alloc_comps (type
, decl
))
486 /* Callback for gfc_omp_unshare_expr. */
489 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
492 enum tree_code code
= TREE_CODE (t
);
494 /* Stop at types, decls, constants like copy_tree_r. */
495 if (TREE_CODE_CLASS (code
) == tcc_type
496 || TREE_CODE_CLASS (code
) == tcc_declaration
497 || TREE_CODE_CLASS (code
) == tcc_constant
500 else if (handled_component_p (t
)
501 || TREE_CODE (t
) == MEM_REF
)
503 *tp
= unshare_expr (t
);
510 /* Unshare in expr anything that the FE which normally doesn't
511 care much about tree sharing (because during gimplification
512 everything is unshared) could cause problems with tree sharing
513 at omp-low.cc time. */
516 gfc_omp_unshare_expr (tree expr
)
518 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
522 enum walk_alloc_comps
524 WALK_ALLOC_COMPS_DTOR
,
525 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
526 WALK_ALLOC_COMPS_COPY_CTOR
529 /* Handle allocatable components in OpenMP clauses. */
532 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
533 enum walk_alloc_comps kind
)
535 stmtblock_t block
, tmpblock
;
536 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
537 gfc_init_block (&block
);
539 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
541 if (GFC_DESCRIPTOR_TYPE_P (type
))
543 gfc_init_block (&tmpblock
);
544 tem
= gfc_full_array_size (&tmpblock
, decl
,
545 GFC_TYPE_ARRAY_RANK (type
));
546 then_b
= gfc_finish_block (&tmpblock
);
547 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
548 tem
= gfc_omp_unshare_expr (tem
);
549 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
550 gfc_array_index_type
, tem
,
555 bool compute_nelts
= false;
556 if (!TYPE_DOMAIN (type
)
557 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
558 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
559 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
560 compute_nelts
= true;
561 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
563 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
564 if (lookup_attribute ("omp dummy var", a
))
565 compute_nelts
= true;
569 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
570 TYPE_SIZE_UNIT (type
),
571 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
572 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
575 tem
= array_type_nelts (type
);
576 tem
= fold_convert (gfc_array_index_type
, tem
);
579 tree nelems
= gfc_evaluate_now (tem
, &block
);
580 tree index
= gfc_create_var (gfc_array_index_type
, "S");
582 gfc_init_block (&tmpblock
);
583 tem
= gfc_conv_array_data (decl
);
584 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
585 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
586 tree destvar
, destvref
= NULL_TREE
;
589 tem
= gfc_conv_array_data (dest
);
590 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
591 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
593 gfc_add_expr_to_block (&tmpblock
,
594 gfc_walk_alloc_comps (declvref
, destvref
,
598 gfc_init_loopinfo (&loop
);
600 loop
.from
[0] = gfc_index_zero_node
;
601 loop
.loopvar
[0] = index
;
603 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
604 gfc_add_block_to_block (&block
, &loop
.pre
);
605 return gfc_finish_block (&block
);
607 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
609 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
611 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
612 type
= TREE_TYPE (decl
);
615 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
616 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
618 tree ftype
= TREE_TYPE (field
);
619 tree declf
, destf
= NULL_TREE
;
620 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
621 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
622 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
623 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
626 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
627 decl
, field
, NULL_TREE
);
629 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
630 dest
, field
, NULL_TREE
);
635 case WALK_ALLOC_COMPS_DTOR
:
637 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
638 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
639 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
641 gfc_add_modify (&block
, unshare_expr (destf
),
642 unshare_expr (declf
));
643 tem
= gfc_duplicate_allocatable_nocopy
644 (destf
, declf
, ftype
,
645 GFC_TYPE_ARRAY_RANK (ftype
));
647 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
648 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
650 case WALK_ALLOC_COMPS_COPY_CTOR
:
651 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
652 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
653 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
654 GFC_TYPE_ARRAY_RANK (ftype
),
656 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
657 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
662 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
665 gfc_init_block (&tmpblock
);
666 gfc_add_expr_to_block (&tmpblock
,
667 gfc_walk_alloc_comps (declf
, destf
,
669 then_b
= gfc_finish_block (&tmpblock
);
670 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
671 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
672 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
673 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
674 tem
= unshare_expr (declf
);
679 tem
= fold_convert (pvoid_type_node
, tem
);
680 tem
= fold_build2_loc (input_location
, NE_EXPR
,
681 logical_type_node
, tem
,
683 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
685 build_empty_stmt (input_location
));
687 gfc_add_expr_to_block (&block
, then_b
);
689 if (kind
== WALK_ALLOC_COMPS_DTOR
)
691 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
692 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
694 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
695 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
696 NULL_TREE
, NULL_TREE
, true,
698 GFC_CAF_COARRAY_NOCOARRAY
);
699 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
701 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
703 tem
= gfc_call_free (unshare_expr (declf
));
704 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
709 return gfc_finish_block (&block
);
712 /* Return code to initialize DECL with its default constructor, or
713 NULL if there's nothing to do. */
716 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
718 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
719 stmtblock_t block
, cond_block
;
721 switch (OMP_CLAUSE_CODE (clause
))
723 case OMP_CLAUSE__LOOPTEMP_
:
724 case OMP_CLAUSE__REDUCTEMP_
:
725 case OMP_CLAUSE__CONDTEMP_
:
726 case OMP_CLAUSE__SCANTEMP_
:
728 case OMP_CLAUSE_PRIVATE
:
729 case OMP_CLAUSE_LASTPRIVATE
:
730 case OMP_CLAUSE_LINEAR
:
731 case OMP_CLAUSE_REDUCTION
:
732 case OMP_CLAUSE_IN_REDUCTION
:
733 case OMP_CLAUSE_TASK_REDUCTION
:
739 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
740 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
741 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
742 || !POINTER_TYPE_P (type
)))
744 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
747 gfc_start_block (&block
);
748 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
749 OMP_CLAUSE_DECL (clause
),
750 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
751 gfc_add_expr_to_block (&block
, tem
);
752 return gfc_finish_block (&block
);
757 gcc_assert (outer
!= NULL_TREE
);
759 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
760 "not currently allocated" allocation status if outer
761 array is "not currently allocated", otherwise should be allocated. */
762 gfc_start_block (&block
);
764 gfc_init_block (&cond_block
);
766 if (GFC_DESCRIPTOR_TYPE_P (type
))
768 gfc_add_modify (&cond_block
, decl
, outer
);
769 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
770 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
771 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
773 gfc_conv_descriptor_lbound_get (decl
, rank
));
774 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
775 size
, gfc_index_one_node
);
776 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
777 size
= fold_build2_loc (input_location
, MULT_EXPR
,
778 gfc_array_index_type
, size
,
779 gfc_conv_descriptor_stride_get (decl
, rank
));
780 tree esize
= fold_convert (gfc_array_index_type
,
781 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
782 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
784 size
= unshare_expr (size
);
785 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
789 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
790 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
791 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
792 if (GFC_DESCRIPTOR_TYPE_P (type
))
793 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
795 gfc_add_modify (&cond_block
, unshare_expr (decl
),
796 fold_convert (TREE_TYPE (decl
), ptr
));
797 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
799 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
800 OMP_CLAUSE_DECL (clause
),
801 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
802 gfc_add_expr_to_block (&cond_block
, tem
);
804 then_b
= gfc_finish_block (&cond_block
);
806 /* Reduction clause requires allocated ALLOCATABLE. */
807 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
808 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_IN_REDUCTION
809 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_TASK_REDUCTION
)
811 gfc_init_block (&cond_block
);
812 if (GFC_DESCRIPTOR_TYPE_P (type
))
813 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
816 gfc_add_modify (&cond_block
, unshare_expr (decl
),
817 build_zero_cst (TREE_TYPE (decl
)));
818 else_b
= gfc_finish_block (&cond_block
);
820 tree tem
= fold_convert (pvoid_type_node
,
821 GFC_DESCRIPTOR_TYPE_P (type
)
822 ? gfc_conv_descriptor_data_get (outer
) : outer
);
823 tem
= unshare_expr (tem
);
824 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
825 tem
, null_pointer_node
);
826 gfc_add_expr_to_block (&block
,
827 build3_loc (input_location
, COND_EXPR
,
828 void_type_node
, cond
, then_b
,
830 /* Avoid -W*uninitialized warnings. */
832 suppress_warning (decl
, OPT_Wuninitialized
);
835 gfc_add_expr_to_block (&block
, then_b
);
837 return gfc_finish_block (&block
);
840 /* Build and return code for a copy constructor from SRC to DEST. */
843 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
845 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
846 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
847 tree cond
, then_b
, else_b
;
848 stmtblock_t block
, cond_block
;
850 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
851 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
853 /* Privatize pointer, only; cf. gfc_omp_predetermined_sharing. */
854 if (DECL_P (OMP_CLAUSE_DECL (clause
))
855 && GFC_DECL_ASSOCIATE_VAR_P (OMP_CLAUSE_DECL (clause
)))
856 return build2 (MODIFY_EXPR
, TREE_TYPE (dest
), dest
, src
);
858 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
859 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
860 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
862 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
864 if (gfc_is_polymorphic_nonptr (decl_type
))
866 if (POINTER_TYPE_P (decl_type
))
867 decl_type
= TREE_TYPE (decl_type
);
868 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
869 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
870 fatal_error (input_location
,
871 "Sorry, polymorphic arrays not yet supported for "
874 tree nelems
= build_int_cst (size_type_node
, 1); /* Scalar. */
875 tree src_data
= gfc_class_data_get (unshare_expr (src
));
876 tree dest_data
= gfc_class_data_get (unshare_expr (dest
));
877 bool unlimited
= gfc_is_unlimited_polymorphic_nonptr (type
);
879 gfc_start_block (&block
);
880 gfc_add_modify (&block
, gfc_class_vptr_get (dest
),
881 gfc_class_vptr_get (src
));
882 gfc_init_block (&cond_block
);
886 src_len
= gfc_class_len_get (src
);
887 gfc_add_modify (&cond_block
, gfc_class_len_get (unshare_expr (dest
)), src_len
);
890 /* Use: size = class._vtab._size * (class._len > 0 ? class._len : 1). */
891 size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (src
));
894 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
895 unshare_expr (src_len
),
896 build_zero_cst (TREE_TYPE (src_len
)));
897 cond
= build3_loc (input_location
, COND_EXPR
, size_type_node
, cond
,
898 fold_convert (size_type_node
,
899 unshare_expr (src_len
)),
900 build_int_cst (size_type_node
, 1));
901 size
= fold_build2_loc (input_location
, MULT_EXPR
, size_type_node
,
905 /* Malloc memory + call class->_vpt->_copy. */
906 call
= builtin_decl_explicit (BUILT_IN_MALLOC
);
907 call
= build_call_expr_loc (input_location
, call
, 1, size
);
908 gfc_add_modify (&cond_block
, dest_data
,
909 fold_convert (TREE_TYPE (dest_data
), call
));
910 gfc_add_expr_to_block (&cond_block
,
911 gfc_copy_class_to_class (src
, dest
, nelems
,
914 gcc_assert (TREE_CODE (dest_data
) == COMPONENT_REF
);
915 if (!GFC_DECL_GET_SCALAR_ALLOCATABLE (TREE_OPERAND (dest_data
, 1)))
917 gfc_add_block_to_block (&block
, &cond_block
);
921 /* Create: if (class._data != 0) <cond_block> else class._data = NULL; */
922 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
923 src_data
, null_pointer_node
);
924 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
925 void_type_node
, cond
,
926 gfc_finish_block (&cond_block
),
927 fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
928 unshare_expr (dest_data
), null_pointer_node
)));
930 return gfc_finish_block (&block
);
933 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
934 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
935 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
936 || !POINTER_TYPE_P (type
)))
938 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
940 gfc_start_block (&block
);
941 gfc_add_modify (&block
, dest
, src
);
942 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
943 WALK_ALLOC_COMPS_COPY_CTOR
);
944 gfc_add_expr_to_block (&block
, tem
);
945 return gfc_finish_block (&block
);
948 return build2_v (MODIFY_EXPR
, dest
, src
);
951 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
952 and copied from SRC. */
953 gfc_start_block (&block
);
955 gfc_init_block (&cond_block
);
957 gfc_add_modify (&cond_block
, dest
, fold_convert (TREE_TYPE (dest
), src
));
958 if (GFC_DESCRIPTOR_TYPE_P (type
))
960 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
961 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
962 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
964 gfc_conv_descriptor_lbound_get (dest
, rank
));
965 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
966 size
, gfc_index_one_node
);
967 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
968 size
= fold_build2_loc (input_location
, MULT_EXPR
,
969 gfc_array_index_type
, size
,
970 gfc_conv_descriptor_stride_get (dest
, rank
));
971 tree esize
= fold_convert (gfc_array_index_type
,
972 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
973 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
975 size
= unshare_expr (size
);
976 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
980 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
981 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
982 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
983 if (GFC_DESCRIPTOR_TYPE_P (type
))
984 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
986 gfc_add_modify (&cond_block
, unshare_expr (dest
),
987 fold_convert (TREE_TYPE (dest
), ptr
));
989 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
990 ? gfc_conv_descriptor_data_get (src
) : src
;
991 srcptr
= unshare_expr (srcptr
);
992 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
993 call
= build_call_expr_loc (input_location
,
994 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
996 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
997 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
999 tree tem
= gfc_walk_alloc_comps (src
, dest
,
1000 OMP_CLAUSE_DECL (clause
),
1001 WALK_ALLOC_COMPS_COPY_CTOR
);
1002 gfc_add_expr_to_block (&cond_block
, tem
);
1004 then_b
= gfc_finish_block (&cond_block
);
1006 gfc_init_block (&cond_block
);
1007 if (GFC_DESCRIPTOR_TYPE_P (type
))
1008 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
1011 gfc_add_modify (&cond_block
, unshare_expr (dest
),
1012 build_zero_cst (TREE_TYPE (dest
)));
1013 else_b
= gfc_finish_block (&cond_block
);
1015 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1016 unshare_expr (srcptr
), null_pointer_node
);
1017 gfc_add_expr_to_block (&block
,
1018 build3_loc (input_location
, COND_EXPR
,
1019 void_type_node
, cond
, then_b
, else_b
));
1020 /* Avoid -W*uninitialized warnings. */
1022 suppress_warning (dest
, OPT_Wuninitialized
);
1024 return gfc_finish_block (&block
);
1027 /* Similarly, except use an intrinsic or pointer assignment operator
1031 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
1033 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
1034 tree cond
, then_b
, else_b
;
1035 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
1037 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1038 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1039 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1040 || !POINTER_TYPE_P (type
)))
1042 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1044 gfc_start_block (&block
);
1045 /* First dealloc any allocatable components in DEST. */
1046 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
1047 OMP_CLAUSE_DECL (clause
),
1048 WALK_ALLOC_COMPS_DTOR
);
1049 gfc_add_expr_to_block (&block
, tem
);
1050 /* Then copy over toplevel data. */
1051 gfc_add_modify (&block
, dest
, src
);
1052 /* Finally allocate any allocatable components and copy. */
1053 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
1054 WALK_ALLOC_COMPS_COPY_CTOR
);
1055 gfc_add_expr_to_block (&block
, tem
);
1056 return gfc_finish_block (&block
);
1059 return build2_v (MODIFY_EXPR
, dest
, src
);
1062 gfc_start_block (&block
);
1064 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1066 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
1067 WALK_ALLOC_COMPS_DTOR
);
1068 tree tem
= fold_convert (pvoid_type_node
,
1069 GFC_DESCRIPTOR_TYPE_P (type
)
1070 ? gfc_conv_descriptor_data_get (dest
) : dest
);
1071 tem
= unshare_expr (tem
);
1072 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1073 tem
, null_pointer_node
);
1074 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1075 then_b
, build_empty_stmt (input_location
));
1076 gfc_add_expr_to_block (&block
, tem
);
1079 gfc_init_block (&cond_block
);
1081 if (GFC_DESCRIPTOR_TYPE_P (type
))
1083 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1084 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
1085 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1087 gfc_conv_descriptor_lbound_get (src
, rank
));
1088 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1089 size
, gfc_index_one_node
);
1090 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1091 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1092 gfc_array_index_type
, size
,
1093 gfc_conv_descriptor_stride_get (src
, rank
));
1094 tree esize
= fold_convert (gfc_array_index_type
,
1095 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1096 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1098 size
= unshare_expr (size
);
1099 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1103 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1104 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1106 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1107 ? gfc_conv_descriptor_data_get (dest
) : dest
;
1108 destptr
= unshare_expr (destptr
);
1109 destptr
= fold_convert (pvoid_type_node
, destptr
);
1110 gfc_add_modify (&cond_block
, ptr
, destptr
);
1112 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
1113 destptr
, null_pointer_node
);
1115 if (GFC_DESCRIPTOR_TYPE_P (type
))
1118 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
1120 tree rank
= gfc_rank_cst
[i
];
1121 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
1122 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
1123 gfc_array_index_type
, tem
,
1124 gfc_conv_descriptor_lbound_get (src
, rank
));
1125 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
1126 gfc_array_index_type
, tem
,
1127 gfc_conv_descriptor_lbound_get (dest
, rank
));
1128 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1129 tem
, gfc_conv_descriptor_ubound_get (dest
,
1131 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
1132 logical_type_node
, cond
, tem
);
1136 gfc_init_block (&cond_block2
);
1138 if (GFC_DESCRIPTOR_TYPE_P (type
))
1140 gfc_init_block (&inner_block
);
1141 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
1142 then_b
= gfc_finish_block (&inner_block
);
1144 gfc_init_block (&inner_block
);
1145 gfc_add_modify (&inner_block
, ptr
,
1146 gfc_call_realloc (&inner_block
, ptr
, size
));
1147 else_b
= gfc_finish_block (&inner_block
);
1149 gfc_add_expr_to_block (&cond_block2
,
1150 build3_loc (input_location
, COND_EXPR
,
1152 unshare_expr (nonalloc
),
1154 gfc_add_modify (&cond_block2
, dest
, src
);
1155 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
1159 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
1160 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
1161 fold_convert (type
, ptr
));
1163 then_b
= gfc_finish_block (&cond_block2
);
1164 else_b
= build_empty_stmt (input_location
);
1166 gfc_add_expr_to_block (&cond_block
,
1167 build3_loc (input_location
, COND_EXPR
,
1168 void_type_node
, unshare_expr (cond
),
1171 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1172 ? gfc_conv_descriptor_data_get (src
) : src
;
1173 srcptr
= unshare_expr (srcptr
);
1174 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
1175 call
= build_call_expr_loc (input_location
,
1176 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
1178 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1179 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1181 tree tem
= gfc_walk_alloc_comps (src
, dest
,
1182 OMP_CLAUSE_DECL (clause
),
1183 WALK_ALLOC_COMPS_COPY_CTOR
);
1184 gfc_add_expr_to_block (&cond_block
, tem
);
1186 then_b
= gfc_finish_block (&cond_block
);
1188 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
1190 gfc_init_block (&cond_block
);
1191 if (GFC_DESCRIPTOR_TYPE_P (type
))
1193 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
1194 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
1195 NULL_TREE
, NULL_TREE
, true, NULL
,
1196 GFC_CAF_COARRAY_NOCOARRAY
);
1197 gfc_add_expr_to_block (&cond_block
, tmp
);
1201 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
1202 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
1203 gfc_add_modify (&cond_block
, unshare_expr (dest
),
1204 build_zero_cst (TREE_TYPE (dest
)));
1206 else_b
= gfc_finish_block (&cond_block
);
1208 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1209 unshare_expr (srcptr
), null_pointer_node
);
1210 gfc_add_expr_to_block (&block
,
1211 build3_loc (input_location
, COND_EXPR
,
1212 void_type_node
, cond
,
1216 gfc_add_expr_to_block (&block
, then_b
);
1218 return gfc_finish_block (&block
);
1222 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
1223 tree add
, tree nelems
)
1225 stmtblock_t tmpblock
;
1226 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
1227 nelems
= gfc_evaluate_now (nelems
, block
);
1229 gfc_init_block (&tmpblock
);
1230 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
1232 desta
= gfc_build_array_ref (dest
, index
, NULL
);
1233 srca
= gfc_build_array_ref (src
, index
, NULL
);
1237 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
1238 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
1239 fold_convert (sizetype
, index
),
1240 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
1241 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1242 TREE_TYPE (dest
), dest
,
1244 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1245 TREE_TYPE (src
), src
,
1248 gfc_add_modify (&tmpblock
, desta
,
1249 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
1253 gfc_init_loopinfo (&loop
);
1255 loop
.from
[0] = gfc_index_zero_node
;
1256 loop
.loopvar
[0] = index
;
1257 loop
.to
[0] = nelems
;
1258 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
1259 gfc_add_block_to_block (block
, &loop
.pre
);
1262 /* Build and return code for a constructor of DEST that initializes
1263 it to SRC plus ADD (ADD is scalar integer). */
1266 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
1268 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
1271 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
1273 gfc_start_block (&block
);
1274 add
= gfc_evaluate_now (add
, &block
);
1276 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1277 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1278 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1279 || !POINTER_TYPE_P (type
)))
1281 bool compute_nelts
= false;
1282 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
1283 if (!TYPE_DOMAIN (type
)
1284 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
1285 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
1286 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
1287 compute_nelts
= true;
1288 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1290 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
1291 if (lookup_attribute ("omp dummy var", a
))
1292 compute_nelts
= true;
1296 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
1297 TYPE_SIZE_UNIT (type
),
1298 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1299 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
1302 nelems
= array_type_nelts (type
);
1303 nelems
= fold_convert (gfc_array_index_type
, nelems
);
1305 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
1306 return gfc_finish_block (&block
);
1309 /* Allocatable arrays in LINEAR clauses need to be allocated
1310 and copied from SRC. */
1311 gfc_add_modify (&block
, dest
, src
);
1312 if (GFC_DESCRIPTOR_TYPE_P (type
))
1314 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1315 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
1316 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1318 gfc_conv_descriptor_lbound_get (dest
, rank
));
1319 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1320 size
, gfc_index_one_node
);
1321 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1322 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1323 gfc_array_index_type
, size
,
1324 gfc_conv_descriptor_stride_get (dest
, rank
));
1325 tree esize
= fold_convert (gfc_array_index_type
,
1326 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1327 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
1328 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1329 nelems
, unshare_expr (esize
));
1330 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1332 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
1333 gfc_array_index_type
, nelems
,
1334 gfc_index_one_node
);
1337 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1338 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1339 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
1340 if (GFC_DESCRIPTOR_TYPE_P (type
))
1342 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
1343 tree etype
= gfc_get_element_type (type
);
1344 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
1345 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
1346 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
1347 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
1351 gfc_add_modify (&block
, unshare_expr (dest
),
1352 fold_convert (TREE_TYPE (dest
), ptr
));
1353 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
1354 tree dstm
= build_fold_indirect_ref (ptr
);
1355 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
1356 gfc_add_modify (&block
, dstm
,
1357 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
1359 return gfc_finish_block (&block
);
1362 /* Build and return code destructing DECL. Return NULL if nothing
1366 gfc_omp_clause_dtor (tree clause
, tree decl
)
1368 tree type
= TREE_TYPE (decl
), tem
;
1369 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
1371 /* Only pointer was privatized; cf. gfc_omp_clause_copy_ctor. */
1372 if (DECL_P (OMP_CLAUSE_DECL (clause
))
1373 && GFC_DECL_ASSOCIATE_VAR_P (OMP_CLAUSE_DECL (clause
)))
1376 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
1377 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
1378 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
1380 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
1381 if (gfc_is_polymorphic_nonptr (decl_type
))
1383 if (POINTER_TYPE_P (decl_type
))
1384 decl_type
= TREE_TYPE (decl_type
);
1385 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
1386 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
1387 fatal_error (input_location
,
1388 "Sorry, polymorphic arrays not yet supported for "
1390 stmtblock_t block
, cond_block
;
1391 gfc_start_block (&block
);
1392 gfc_init_block (&cond_block
);
1393 tree final
= gfc_class_vtab_final_get (decl
);
1394 tree size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (decl
));
1396 gfc_init_se (&se
, NULL
);
1397 symbol_attribute attr
= {};
1398 tree data
= gfc_class_data_get (decl
);
1399 tree desc
= gfc_conv_scalar_to_descriptor (&se
, data
, attr
);
1401 /* Call class->_vpt->_finalize + free. */
1402 tree call
= build_fold_indirect_ref (final
);
1403 call
= build_call_expr_loc (input_location
, call
, 3,
1404 gfc_build_addr_expr (NULL
, desc
),
1405 size
, boolean_false_node
);
1406 gfc_add_block_to_block (&cond_block
, &se
.pre
);
1407 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1408 gfc_add_block_to_block (&cond_block
, &se
.post
);
1409 /* Create: if (_vtab && _final) <cond_block> */
1410 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1411 gfc_class_vptr_get (decl
),
1413 tree cond2
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1414 final
, null_pointer_node
);
1415 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1416 boolean_type_node
, cond
, cond2
);
1417 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1418 void_type_node
, cond
,
1419 gfc_finish_block (&cond_block
), NULL_TREE
));
1420 call
= builtin_decl_explicit (BUILT_IN_FREE
);
1421 call
= build_call_expr_loc (input_location
, call
, 1, data
);
1422 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
1423 return gfc_finish_block (&block
);
1426 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1427 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1428 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1429 || !POINTER_TYPE_P (type
)))
1431 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1432 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
1433 OMP_CLAUSE_DECL (clause
),
1434 WALK_ALLOC_COMPS_DTOR
);
1438 if (GFC_DESCRIPTOR_TYPE_P (type
))
1440 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1441 to be deallocated if they were allocated. */
1442 tem
= gfc_conv_descriptor_data_get (decl
);
1443 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1444 NULL_TREE
, true, NULL
,
1445 GFC_CAF_COARRAY_NOCOARRAY
);
1448 tem
= gfc_call_free (decl
);
1449 tem
= gfc_omp_unshare_expr (tem
);
1451 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1456 gfc_init_block (&block
);
1457 gfc_add_expr_to_block (&block
,
1458 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1459 OMP_CLAUSE_DECL (clause
),
1460 WALK_ALLOC_COMPS_DTOR
));
1461 gfc_add_expr_to_block (&block
, tem
);
1462 then_b
= gfc_finish_block (&block
);
1464 tem
= fold_convert (pvoid_type_node
,
1465 GFC_DESCRIPTOR_TYPE_P (type
)
1466 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1467 tem
= unshare_expr (tem
);
1468 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1469 tem
, null_pointer_node
);
1470 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1471 then_b
, build_empty_stmt (input_location
));
1476 /* Build a conditional expression in BLOCK. If COND_VAL is not
1477 null, then the block THEN_B is executed, otherwise ELSE_VAL
1478 is assigned to VAL. */
1481 gfc_build_cond_assign (stmtblock_t
*block
, tree val
, tree cond_val
,
1482 tree then_b
, tree else_val
)
1484 stmtblock_t cond_block
;
1485 tree else_b
= NULL_TREE
;
1486 tree val_ty
= TREE_TYPE (val
);
1490 gfc_init_block (&cond_block
);
1491 gfc_add_modify (&cond_block
, val
, fold_convert (val_ty
, else_val
));
1492 else_b
= gfc_finish_block (&cond_block
);
1494 gfc_add_expr_to_block (block
,
1495 build3_loc (input_location
, COND_EXPR
, void_type_node
,
1496 cond_val
, then_b
, else_b
));
1499 /* Build a conditional expression in BLOCK, returning a temporary
1500 variable containing the result. If COND_VAL is not null, then
1501 THEN_VAL will be assigned to the variable, otherwise ELSE_VAL
1506 gfc_build_cond_assign_expr (stmtblock_t
*block
, tree cond_val
,
1507 tree then_val
, tree else_val
)
1510 tree val_ty
= TREE_TYPE (then_val
);
1511 stmtblock_t cond_block
;
1513 val
= create_tmp_var (val_ty
);
1515 gfc_init_block (&cond_block
);
1516 gfc_add_modify (&cond_block
, val
, then_val
);
1517 tree then_b
= gfc_finish_block (&cond_block
);
1519 gfc_build_cond_assign (block
, val
, cond_val
, then_b
, else_val
);
1525 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
, bool openacc
)
1527 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1530 tree decl
= OMP_CLAUSE_DECL (c
);
1532 /* Assumed-size arrays can't be mapped implicitly, they have to be
1533 mapped explicitly using array sections. */
1534 if (TREE_CODE (decl
) == PARM_DECL
1535 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1536 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1537 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1538 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1541 error_at (OMP_CLAUSE_LOCATION (c
),
1542 "implicit mapping of assumed size array %qD", decl
);
1546 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1547 tree present
= gfc_omp_check_optional_argument (decl
, true);
1548 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1550 if (!gfc_omp_privatize_by_reference (decl
)
1551 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1552 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1553 && !GFC_DECL_CRAY_POINTEE (decl
)
1554 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1556 tree orig_decl
= decl
;
1558 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1559 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1560 OMP_CLAUSE_DECL (c4
) = decl
;
1561 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1562 decl
= build_fold_indirect_ref (decl
);
1564 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1565 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1567 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1568 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_POINTER
);
1569 OMP_CLAUSE_DECL (c2
) = decl
;
1570 OMP_CLAUSE_SIZE (c2
) = size_int (0);
1573 gfc_start_block (&block
);
1575 ptr
= gfc_build_cond_assign_expr (&block
, present
, decl
,
1577 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
1578 ptr
= build_fold_indirect_ref (ptr
);
1579 OMP_CLAUSE_DECL (c
) = ptr
;
1580 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
1584 OMP_CLAUSE_DECL (c
) = decl
;
1585 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1587 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1588 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1589 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1591 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1592 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1593 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1594 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1595 decl
= build_fold_indirect_ref (decl
);
1596 OMP_CLAUSE_DECL (c
) = decl
;
1599 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1602 gfc_start_block (&block
);
1603 tree type
= TREE_TYPE (decl
);
1604 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1606 /* OpenMP: automatically map pointer targets with the pointer;
1607 hence, always update the descriptor/pointer itself.
1608 NOTE: This also remaps the pointer for allocatable arrays with
1609 'target' attribute which also don't have the 'restrict' qualifier. */
1610 bool always_modifier
= false;
1613 && !(TYPE_QUALS (TREE_TYPE (ptr
)) & TYPE_QUAL_RESTRICT
))
1614 always_modifier
= true;
1617 ptr
= gfc_build_cond_assign_expr (&block
, present
, ptr
,
1619 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
1620 ptr
= build_fold_indirect_ref (ptr
);
1621 OMP_CLAUSE_DECL (c
) = ptr
;
1622 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1623 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1626 ptr
= create_tmp_var (TREE_TYPE (TREE_OPERAND (decl
, 0)));
1627 gfc_add_modify (&block
, ptr
, TREE_OPERAND (decl
, 0));
1629 OMP_CLAUSE_DECL (c2
) = build_fold_indirect_ref (ptr
);
1632 OMP_CLAUSE_DECL (c2
) = decl
;
1633 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1634 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1635 OMP_CLAUSE_SET_MAP_KIND (c3
, always_modifier
? GOMP_MAP_ALWAYS_POINTER
1636 : GOMP_MAP_POINTER
);
1639 ptr
= gfc_conv_descriptor_data_get (decl
);
1640 ptr
= gfc_build_addr_expr (NULL
, ptr
);
1641 ptr
= gfc_build_cond_assign_expr (&block
, present
,
1642 ptr
, null_pointer_node
);
1643 ptr
= build_fold_indirect_ref (ptr
);
1644 OMP_CLAUSE_DECL (c3
) = ptr
;
1647 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1648 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1649 tree size
= create_tmp_var (gfc_array_index_type
);
1650 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1651 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1652 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
1653 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1654 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1656 stmtblock_t cond_block
;
1657 tree tem
, then_b
, else_b
, zero
, cond
;
1659 gfc_init_block (&cond_block
);
1660 tem
= gfc_full_array_size (&cond_block
, decl
,
1661 GFC_TYPE_ARRAY_RANK (type
));
1662 gfc_add_modify (&cond_block
, size
, tem
);
1663 gfc_add_modify (&cond_block
, size
,
1664 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1666 then_b
= gfc_finish_block (&cond_block
);
1667 gfc_init_block (&cond_block
);
1668 zero
= build_int_cst (gfc_array_index_type
, 0);
1669 gfc_add_modify (&cond_block
, size
, zero
);
1670 else_b
= gfc_finish_block (&cond_block
);
1671 tem
= gfc_conv_descriptor_data_get (decl
);
1672 tem
= fold_convert (pvoid_type_node
, tem
);
1673 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1674 boolean_type_node
, tem
, null_pointer_node
);
1677 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1678 boolean_type_node
, present
, cond
);
1680 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1681 void_type_node
, cond
,
1686 stmtblock_t cond_block
;
1689 gfc_init_block (&cond_block
);
1690 gfc_add_modify (&cond_block
, size
,
1691 gfc_full_array_size (&cond_block
, decl
,
1692 GFC_TYPE_ARRAY_RANK (type
)));
1693 gfc_add_modify (&cond_block
, size
,
1694 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1696 then_b
= gfc_finish_block (&cond_block
);
1698 gfc_build_cond_assign (&block
, size
, present
, then_b
,
1699 build_int_cst (gfc_array_index_type
, 0));
1703 gfc_add_modify (&block
, size
,
1704 gfc_full_array_size (&block
, decl
,
1705 GFC_TYPE_ARRAY_RANK (type
)));
1706 gfc_add_modify (&block
, size
,
1707 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1710 OMP_CLAUSE_SIZE (c
) = size
;
1711 tree stmt
= gfc_finish_block (&block
);
1712 gimplify_and_add (stmt
, pre_p
);
1715 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1717 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1718 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1719 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
1720 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
1721 OMP_CLAUSE_SIZE (c
) = size_int (0);
1724 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1725 OMP_CLAUSE_CHAIN (last
) = c2
;
1730 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1731 OMP_CLAUSE_CHAIN (last
) = c3
;
1736 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1737 OMP_CLAUSE_CHAIN (last
) = c4
;
1742 /* Return true if DECL is a scalar variable (for the purpose of
1743 implicit firstprivatization/mapping). Only if 'ptr_alloc_ok.'
1744 is true, allocatables and pointers are permitted. */
1747 gfc_omp_scalar_p (tree decl
, bool ptr_alloc_ok
)
1749 tree type
= TREE_TYPE (decl
);
1750 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1751 type
= TREE_TYPE (type
);
1752 if (TREE_CODE (type
) == POINTER_TYPE
)
1754 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1755 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1759 type
= TREE_TYPE (type
);
1761 if (GFC_ARRAY_TYPE_P (type
)
1762 || GFC_CLASS_TYPE_P (type
))
1765 if ((TREE_CODE (type
) == ARRAY_TYPE
|| TREE_CODE (type
) == INTEGER_TYPE
)
1766 && TYPE_STRING_FLAG (type
))
1768 if (INTEGRAL_TYPE_P (type
)
1769 || SCALAR_FLOAT_TYPE_P (type
)
1770 || COMPLEX_FLOAT_TYPE_P (type
))
1776 /* Return true if DECL is a scalar with target attribute but does not have the
1777 allocatable (or pointer) attribute (for the purpose of implicit mapping). */
1780 gfc_omp_scalar_target_p (tree decl
)
1782 return (DECL_P (decl
) && GFC_DECL_GET_SCALAR_TARGET (decl
)
1783 && gfc_omp_scalar_p (decl
, false));
1787 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1788 disregarded in OpenMP construct, because it is going to be
1789 remapped during OpenMP lowering. SHARED is true if DECL
1790 is going to be shared, false if it is going to be privatized. */
1793 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1795 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1796 && DECL_HAS_VALUE_EXPR_P (decl
))
1798 tree value
= DECL_VALUE_EXPR (decl
);
1800 if (TREE_CODE (value
) == COMPONENT_REF
1801 && VAR_P (TREE_OPERAND (value
, 0))
1802 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1804 /* If variable in COMMON or EQUIVALENCE is privatized, return
1805 true, as just that variable is supposed to be privatized,
1806 not the whole COMMON or whole EQUIVALENCE.
1807 For shared variables in COMMON or EQUIVALENCE, let them be
1808 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1809 from the same COMMON or EQUIVALENCE just one sharing of the
1810 whole COMMON or EQUIVALENCE is enough. */
1815 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1821 /* Return true if DECL that is shared iff SHARED is true should
1822 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1826 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1828 if (GFC_DECL_CRAY_POINTEE (decl
))
1831 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1832 && DECL_HAS_VALUE_EXPR_P (decl
))
1834 tree value
= DECL_VALUE_EXPR (decl
);
1836 if (TREE_CODE (value
) == COMPONENT_REF
1837 && VAR_P (TREE_OPERAND (value
, 0))
1838 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1845 /* Register language specific type size variables as potentially OpenMP
1846 firstprivate variables. */
1849 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1851 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1855 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1856 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1858 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1859 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1860 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1862 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1863 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1869 gfc_trans_add_clause (tree node
, tree tail
)
1871 OMP_CLAUSE_CHAIN (node
) = tail
;
1876 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1881 gfc_symbol
*proc_sym
;
1882 gfc_formal_arglist
*f
;
1884 gcc_assert (sym
->attr
.dummy
);
1885 proc_sym
= sym
->ns
->proc_name
;
1886 if (proc_sym
->attr
.entry_master
)
1888 if (gfc_return_by_reference (proc_sym
))
1891 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1894 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1900 return build_int_cst (integer_type_node
, cnt
);
1903 tree t
= gfc_get_symbol_decl (sym
);
1907 bool alternate_entry
;
1910 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1911 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1912 && sym
->result
== sym
;
1913 entry_master
= sym
->attr
.result
1914 && sym
->ns
->proc_name
->attr
.entry_master
1915 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1916 parent_decl
= current_function_decl
1917 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1919 if ((t
== parent_decl
&& return_value
)
1920 || (sym
->ns
&& sym
->ns
->proc_name
1921 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1922 && (alternate_entry
|| entry_master
)))
1927 /* Special case for assigning the return value of a function.
1928 Self recursive functions must have an explicit return value. */
1929 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1930 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1932 /* Similarly for alternate entry points. */
1933 else if (alternate_entry
1934 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1937 gfc_entry_list
*el
= NULL
;
1939 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1942 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1947 else if (entry_master
1948 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1950 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1956 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1957 gfc_omp_namelist
*namelist
, tree list
,
1960 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1961 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1963 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1964 if (t
!= error_mark_node
)
1967 node
= build_omp_clause (input_location
, code
);
1968 OMP_CLAUSE_DECL (node
) = t
;
1969 list
= gfc_trans_add_clause (node
, list
);
1971 if (code
== OMP_CLAUSE_LASTPRIVATE
1972 && namelist
->u
.lastprivate_conditional
)
1973 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (node
) = 1;
1979 struct omp_udr_find_orig_data
1981 gfc_omp_udr
*omp_udr
;
1986 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1989 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1990 if ((*e
)->expr_type
== EXPR_VARIABLE
1991 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1992 cd
->omp_orig_seen
= true;
1998 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
2000 gfc_symbol
*sym
= n
->sym
;
2001 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
2002 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
2003 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
2004 gfc_symbol omp_var_copy
[4];
2005 gfc_expr
*e1
, *e2
, *e3
, *e4
;
2007 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
2008 locus old_loc
= gfc_current_locus
;
2011 gfc_omp_udr
*udr
= n
->u2
.udr
? n
->u2
.udr
->udr
: NULL
;
2013 decl
= OMP_CLAUSE_DECL (c
);
2014 gfc_current_locus
= where
;
2015 type
= TREE_TYPE (decl
);
2016 outer_decl
= create_tmp_var_raw (type
);
2017 if (TREE_CODE (decl
) == PARM_DECL
2018 && TREE_CODE (type
) == REFERENCE_TYPE
2019 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
2020 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
2022 decl
= build_fold_indirect_ref (decl
);
2023 type
= TREE_TYPE (type
);
2026 /* Create a fake symbol for init value. */
2027 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
2028 init_val_sym
.ns
= sym
->ns
;
2029 init_val_sym
.name
= sym
->name
;
2030 init_val_sym
.ts
= sym
->ts
;
2031 init_val_sym
.attr
.referenced
= 1;
2032 init_val_sym
.declared_at
= where
;
2033 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
2034 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
2035 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
2036 else if (udr
->initializer_ns
)
2037 backend_decl
= NULL
;
2039 switch (sym
->ts
.type
)
2045 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
2048 backend_decl
= NULL_TREE
;
2051 init_val_sym
.backend_decl
= backend_decl
;
2053 /* Create a fake symbol for the outer array reference. */
2056 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
2057 outer_sym
.attr
.dummy
= 0;
2058 outer_sym
.attr
.result
= 0;
2059 outer_sym
.attr
.flavor
= FL_VARIABLE
;
2060 outer_sym
.backend_decl
= outer_decl
;
2061 if (decl
!= OMP_CLAUSE_DECL (c
))
2062 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
2064 /* Create fake symtrees for it. */
2065 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
2066 symtree1
->n
.sym
= sym
;
2067 gcc_assert (symtree1
== root1
);
2069 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
2070 symtree2
->n
.sym
= &init_val_sym
;
2071 gcc_assert (symtree2
== root2
);
2073 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
2074 symtree3
->n
.sym
= &outer_sym
;
2075 gcc_assert (symtree3
== root3
);
2077 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
2080 omp_var_copy
[0] = *udr
->omp_out
;
2081 omp_var_copy
[1] = *udr
->omp_in
;
2082 *udr
->omp_out
= outer_sym
;
2083 *udr
->omp_in
= *sym
;
2084 if (udr
->initializer_ns
)
2086 omp_var_copy
[2] = *udr
->omp_priv
;
2087 omp_var_copy
[3] = *udr
->omp_orig
;
2088 *udr
->omp_priv
= *sym
;
2089 *udr
->omp_orig
= outer_sym
;
2093 /* Create expressions. */
2094 e1
= gfc_get_expr ();
2095 e1
->expr_type
= EXPR_VARIABLE
;
2097 e1
->symtree
= symtree1
;
2099 if (sym
->attr
.dimension
)
2101 e1
->ref
= ref
= gfc_get_ref ();
2102 ref
->type
= REF_ARRAY
;
2103 ref
->u
.ar
.where
= where
;
2104 ref
->u
.ar
.as
= sym
->as
;
2105 ref
->u
.ar
.type
= AR_FULL
;
2106 ref
->u
.ar
.dimen
= 0;
2108 t
= gfc_resolve_expr (e1
);
2112 if (backend_decl
!= NULL_TREE
)
2114 e2
= gfc_get_expr ();
2115 e2
->expr_type
= EXPR_VARIABLE
;
2117 e2
->symtree
= symtree2
;
2119 t
= gfc_resolve_expr (e2
);
2122 else if (udr
->initializer_ns
== NULL
)
2124 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
2125 e2
= gfc_default_initializer (&sym
->ts
);
2127 t
= gfc_resolve_expr (e2
);
2130 else if (n
->u2
.udr
->initializer
->op
== EXEC_ASSIGN
)
2132 e2
= gfc_copy_expr (n
->u2
.udr
->initializer
->expr2
);
2133 t
= gfc_resolve_expr (e2
);
2136 if (udr
&& udr
->initializer_ns
)
2138 struct omp_udr_find_orig_data cd
;
2140 cd
.omp_orig_seen
= false;
2141 gfc_code_walker (&n
->u2
.udr
->initializer
,
2142 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
2143 if (cd
.omp_orig_seen
)
2144 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
2147 e3
= gfc_copy_expr (e1
);
2148 e3
->symtree
= symtree3
;
2149 t
= gfc_resolve_expr (e3
);
2154 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
2158 e4
= gfc_add (e3
, e1
);
2161 e4
= gfc_multiply (e3
, e1
);
2163 case TRUTH_ANDIF_EXPR
:
2164 e4
= gfc_and (e3
, e1
);
2166 case TRUTH_ORIF_EXPR
:
2167 e4
= gfc_or (e3
, e1
);
2170 e4
= gfc_eqv (e3
, e1
);
2173 e4
= gfc_neqv (e3
, e1
);
2191 if (n
->u2
.udr
->combiner
->op
== EXEC_ASSIGN
)
2194 e3
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr1
);
2195 e4
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr2
);
2196 t
= gfc_resolve_expr (e3
);
2198 t
= gfc_resolve_expr (e4
);
2207 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
2208 intrinsic_sym
.ns
= sym
->ns
;
2209 intrinsic_sym
.name
= iname
;
2210 intrinsic_sym
.ts
= sym
->ts
;
2211 intrinsic_sym
.attr
.referenced
= 1;
2212 intrinsic_sym
.attr
.intrinsic
= 1;
2213 intrinsic_sym
.attr
.function
= 1;
2214 intrinsic_sym
.attr
.implicit_type
= 1;
2215 intrinsic_sym
.result
= &intrinsic_sym
;
2216 intrinsic_sym
.declared_at
= where
;
2218 symtree4
= gfc_new_symtree (&root4
, iname
);
2219 symtree4
->n
.sym
= &intrinsic_sym
;
2220 gcc_assert (symtree4
== root4
);
2222 e4
= gfc_get_expr ();
2223 e4
->expr_type
= EXPR_FUNCTION
;
2225 e4
->symtree
= symtree4
;
2226 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
2227 e4
->value
.function
.actual
->expr
= e3
;
2228 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
2229 e4
->value
.function
.actual
->next
->expr
= e1
;
2231 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
2233 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
2234 e1
= gfc_copy_expr (e1
);
2235 e3
= gfc_copy_expr (e3
);
2236 t
= gfc_resolve_expr (e4
);
2240 /* Create the init statement list. */
2243 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
2245 stmt
= gfc_trans_call (n
->u2
.udr
->initializer
, false,
2246 NULL_TREE
, NULL_TREE
, false);
2247 if (TREE_CODE (stmt
) != BIND_EXPR
)
2248 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2251 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
2253 /* Create the merge statement list. */
2256 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
2258 stmt
= gfc_trans_call (n
->u2
.udr
->combiner
, false,
2259 NULL_TREE
, NULL_TREE
, false);
2260 if (TREE_CODE (stmt
) != BIND_EXPR
)
2261 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2264 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
2266 /* And stick the placeholder VAR_DECL into the clause as well. */
2267 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
2269 gfc_current_locus
= old_loc
;
2282 gfc_free_array_spec (outer_sym
.as
);
2286 *udr
->omp_out
= omp_var_copy
[0];
2287 *udr
->omp_in
= omp_var_copy
[1];
2288 if (udr
->initializer_ns
)
2290 *udr
->omp_priv
= omp_var_copy
[2];
2291 *udr
->omp_orig
= omp_var_copy
[3];
2297 gfc_trans_omp_reduction_list (int kind
, gfc_omp_namelist
*namelist
, tree list
,
2298 locus where
, bool mark_addressable
)
2300 omp_clause_code clause
= OMP_CLAUSE_REDUCTION
;
2303 case OMP_LIST_REDUCTION
:
2304 case OMP_LIST_REDUCTION_INSCAN
:
2305 case OMP_LIST_REDUCTION_TASK
:
2307 case OMP_LIST_IN_REDUCTION
:
2308 clause
= OMP_CLAUSE_IN_REDUCTION
;
2310 case OMP_LIST_TASK_REDUCTION
:
2311 clause
= OMP_CLAUSE_TASK_REDUCTION
;
2316 for (; namelist
!= NULL
; namelist
= namelist
->next
)
2317 if (namelist
->sym
->attr
.referenced
)
2319 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
2320 if (t
!= error_mark_node
)
2322 tree node
= build_omp_clause (gfc_get_location (&namelist
->where
),
2324 OMP_CLAUSE_DECL (node
) = t
;
2325 if (mark_addressable
)
2326 TREE_ADDRESSABLE (t
) = 1;
2327 if (kind
== OMP_LIST_REDUCTION_INSCAN
)
2328 OMP_CLAUSE_REDUCTION_INSCAN (node
) = 1;
2329 if (kind
== OMP_LIST_REDUCTION_TASK
)
2330 OMP_CLAUSE_REDUCTION_TASK (node
) = 1;
2331 switch (namelist
->u
.reduction_op
)
2333 case OMP_REDUCTION_PLUS
:
2334 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
2336 case OMP_REDUCTION_MINUS
:
2337 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
2339 case OMP_REDUCTION_TIMES
:
2340 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
2342 case OMP_REDUCTION_AND
:
2343 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
2345 case OMP_REDUCTION_OR
:
2346 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
2348 case OMP_REDUCTION_EQV
:
2349 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
2351 case OMP_REDUCTION_NEQV
:
2352 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
2354 case OMP_REDUCTION_MAX
:
2355 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
2357 case OMP_REDUCTION_MIN
:
2358 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
2360 case OMP_REDUCTION_IAND
:
2361 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
2363 case OMP_REDUCTION_IOR
:
2364 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
2366 case OMP_REDUCTION_IEOR
:
2367 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
2369 case OMP_REDUCTION_USER
:
2370 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
2375 if (namelist
->sym
->attr
.dimension
2376 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
2377 || namelist
->sym
->attr
.allocatable
)
2378 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
2379 list
= gfc_trans_add_clause (node
, list
);
2386 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
2391 gfc_init_se (&se
, NULL
);
2392 gfc_conv_expr (&se
, expr
);
2393 gfc_add_block_to_block (block
, &se
.pre
);
2394 result
= gfc_evaluate_now (se
.expr
, block
);
2395 gfc_add_block_to_block (block
, &se
.post
);
2400 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
2403 /* Translate an array section or array element. */
2406 gfc_trans_omp_array_section (stmtblock_t
*block
, gfc_exec_op op
,
2407 gfc_omp_namelist
*n
, tree decl
, bool element
,
2408 gomp_map_kind ptr_kind
, tree
&node
, tree
&node2
,
2409 tree
&node3
, tree
&node4
)
2413 tree elemsz
= NULL_TREE
;
2415 gfc_init_se (&se
, NULL
);
2418 gfc_conv_expr_reference (&se
, n
->expr
);
2419 gfc_add_block_to_block (block
, &se
.pre
);
2424 gfc_conv_expr_descriptor (&se
, n
->expr
);
2425 ptr
= gfc_conv_array_data (se
.expr
);
2427 if (n
->expr
->ts
.type
== BT_CHARACTER
&& n
->expr
->ts
.deferred
)
2429 gcc_assert (se
.string_length
);
2430 tree len
= gfc_evaluate_now (se
.string_length
, block
);
2431 elemsz
= gfc_get_char_type (n
->expr
->ts
.kind
);
2432 elemsz
= TYPE_SIZE_UNIT (elemsz
);
2433 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
2434 fold_convert (size_type_node
, len
), elemsz
);
2439 elemsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr
)));
2440 OMP_CLAUSE_SIZE (node
) = elemsz
;
2444 tree type
= TREE_TYPE (se
.expr
);
2445 gfc_add_block_to_block (block
, &se
.pre
);
2446 OMP_CLAUSE_SIZE (node
) = gfc_full_array_size (block
, se
.expr
,
2447 GFC_TYPE_ARRAY_RANK (type
));
2449 elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2450 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2451 OMP_CLAUSE_SIZE (node
) = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2452 OMP_CLAUSE_SIZE (node
), elemsz
);
2454 gcc_assert (se
.post
.head
== NULL_TREE
);
2455 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
2456 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2457 ptr
= fold_convert (ptrdiff_type_node
, ptr
);
2459 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2460 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
2461 && ptr_kind
== GOMP_MAP_POINTER
2462 && op
!= EXEC_OMP_TARGET_EXIT_DATA
2463 && OMP_CLAUSE_MAP_KIND (node
) != GOMP_MAP_RELEASE
2464 && OMP_CLAUSE_MAP_KIND (node
) != GOMP_MAP_DELETE
)
2467 node4
= build_omp_clause (input_location
,
2469 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2470 OMP_CLAUSE_DECL (node4
) = decl
;
2471 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2472 decl
= build_fold_indirect_ref (decl
);
2474 else if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
2475 && n
->expr
->ts
.type
== BT_CHARACTER
2476 && n
->expr
->ts
.deferred
)
2478 gomp_map_kind map_kind
;
2479 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
)
2480 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
2481 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
2482 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_RELEASE
)
2483 map_kind
= GOMP_MAP_RELEASE
;
2485 map_kind
= GOMP_MAP_TO
;
2486 gcc_assert (se
.string_length
);
2487 node4
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2488 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
2489 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
2490 OMP_CLAUSE_SIZE (node4
) = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
2492 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2495 tree type
= TREE_TYPE (decl
);
2496 ptr2
= gfc_conv_descriptor_data_get (decl
);
2497 desc_node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2498 OMP_CLAUSE_DECL (desc_node
) = decl
;
2499 OMP_CLAUSE_SIZE (desc_node
) = TYPE_SIZE_UNIT (type
);
2500 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
)
2502 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_DELETE
);
2505 else if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_RELEASE
2506 || op
== EXEC_OMP_TARGET_EXIT_DATA
)
2508 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_RELEASE
);
2511 else if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
)
2513 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_TO
);
2515 node
= desc_node
; /* Needs to come first. */
2519 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_TO_PSET
);
2522 if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
2524 node3
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2525 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2526 OMP_CLAUSE_DECL (node3
) = gfc_conv_descriptor_data_get (decl
);
2527 /* This purposely does not include GOMP_MAP_ALWAYS_POINTER. The extra
2528 cast prevents gimplify.cc from recognising it as being part of the
2529 struct - and adding an 'alloc: for the 'desc.data' pointer, which
2530 would break as the 'desc' (the descriptor) is also mapped
2531 (see node4 above). */
2532 if (ptr_kind
== GOMP_MAP_ATTACH_DETACH
)
2533 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
2537 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2540 ptr2
= build_fold_addr_expr (decl
);
2541 offset
= fold_build2 (MINUS_EXPR
, ptrdiff_type_node
, ptr
,
2542 fold_convert (ptrdiff_type_node
, ptr2
));
2543 offset
= build2 (TRUNC_DIV_EXPR
, ptrdiff_type_node
,
2544 offset
, fold_convert (ptrdiff_type_node
, elemsz
));
2545 offset
= build4_loc (input_location
, ARRAY_REF
,
2546 TREE_TYPE (TREE_TYPE (decl
)),
2547 decl
, offset
, NULL_TREE
, NULL_TREE
);
2548 OMP_CLAUSE_DECL (node
) = offset
;
2550 if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
)
2555 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2558 node3
= build_omp_clause (input_location
,
2560 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2561 OMP_CLAUSE_DECL (node3
) = decl
;
2563 ptr2
= fold_convert (ptrdiff_type_node
, ptr2
);
2564 OMP_CLAUSE_SIZE (node3
) = fold_build2 (MINUS_EXPR
, ptrdiff_type_node
,
2569 handle_iterator (gfc_namespace
*ns
, stmtblock_t
*iter_block
, tree block
)
2571 tree list
= NULL_TREE
;
2572 for (gfc_symbol
*sym
= ns
->omp_affinity_iterators
; sym
; sym
= sym
->tlink
)
2577 tree last
= make_tree_vec (6);
2578 tree iter_var
= gfc_get_symbol_decl (sym
);
2579 tree type
= TREE_TYPE (iter_var
);
2580 TREE_VEC_ELT (last
, 0) = iter_var
;
2581 DECL_CHAIN (iter_var
) = BLOCK_VARS (block
);
2582 BLOCK_VARS (block
) = iter_var
;
2585 c
= gfc_constructor_first (sym
->value
->value
.constructor
);
2586 gfc_init_se (&se
, NULL
);
2587 gfc_conv_expr (&se
, c
->expr
);
2588 gfc_add_block_to_block (iter_block
, &se
.pre
);
2589 gfc_add_block_to_block (iter_block
, &se
.post
);
2590 TREE_VEC_ELT (last
, 1) = fold_convert (type
,
2591 gfc_evaluate_now (se
.expr
,
2594 c
= gfc_constructor_next (c
);
2595 gfc_init_se (&se
, NULL
);
2596 gfc_conv_expr (&se
, c
->expr
);
2597 gfc_add_block_to_block (iter_block
, &se
.pre
);
2598 gfc_add_block_to_block (iter_block
, &se
.post
);
2599 TREE_VEC_ELT (last
, 2) = fold_convert (type
,
2600 gfc_evaluate_now (se
.expr
,
2603 c
= gfc_constructor_next (c
);
2607 gfc_init_se (&se
, NULL
);
2608 gfc_conv_expr (&se
, c
->expr
);
2609 gfc_add_block_to_block (iter_block
, &se
.pre
);
2610 gfc_add_block_to_block (iter_block
, &se
.post
);
2611 gfc_conv_expr (&se
, c
->expr
);
2612 step
= fold_convert (type
,
2613 gfc_evaluate_now (se
.expr
,
2617 step
= build_int_cst (type
, 1);
2618 TREE_VEC_ELT (last
, 3) = step
;
2620 TREE_VEC_ELT (last
, 4) = save_expr (step
);
2621 TREE_CHAIN (last
) = list
;
2628 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
2629 locus where
, bool declare_simd
= false,
2630 bool openacc
= false, gfc_exec_op op
= EXEC_NOP
)
2632 tree omp_clauses
= NULL_TREE
, prev_clauses
, chunk_size
, c
;
2633 tree iterator
= NULL_TREE
;
2634 tree tree_block
= NULL_TREE
;
2635 stmtblock_t iter_block
;
2637 enum omp_clause_code clause_code
;
2638 gfc_omp_namelist
*prev
= NULL
;
2641 if (clauses
== NULL
)
2644 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
2646 gfc_omp_namelist
*n
= clauses
->lists
[list
];
2652 case OMP_LIST_REDUCTION
:
2653 case OMP_LIST_REDUCTION_INSCAN
:
2654 case OMP_LIST_REDUCTION_TASK
:
2655 case OMP_LIST_IN_REDUCTION
:
2656 case OMP_LIST_TASK_REDUCTION
:
2657 /* An OpenACC async clause indicates the need to set reduction
2658 arguments addressable, to allow asynchronous copy-out. */
2659 omp_clauses
= gfc_trans_omp_reduction_list (list
, n
, omp_clauses
,
2660 where
, clauses
->async
);
2662 case OMP_LIST_PRIVATE
:
2663 clause_code
= OMP_CLAUSE_PRIVATE
;
2665 case OMP_LIST_SHARED
:
2666 clause_code
= OMP_CLAUSE_SHARED
;
2668 case OMP_LIST_FIRSTPRIVATE
:
2669 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
2671 case OMP_LIST_LASTPRIVATE
:
2672 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
2674 case OMP_LIST_COPYIN
:
2675 clause_code
= OMP_CLAUSE_COPYIN
;
2677 case OMP_LIST_COPYPRIVATE
:
2678 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
2680 case OMP_LIST_UNIFORM
:
2681 clause_code
= OMP_CLAUSE_UNIFORM
;
2683 case OMP_LIST_USE_DEVICE
:
2684 case OMP_LIST_USE_DEVICE_PTR
:
2685 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
2687 case OMP_LIST_USE_DEVICE_ADDR
:
2688 clause_code
= OMP_CLAUSE_USE_DEVICE_ADDR
;
2690 case OMP_LIST_IS_DEVICE_PTR
:
2691 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
2693 case OMP_LIST_HAS_DEVICE_ADDR
:
2694 clause_code
= OMP_CLAUSE_HAS_DEVICE_ADDR
;
2696 case OMP_LIST_NONTEMPORAL
:
2697 clause_code
= OMP_CLAUSE_NONTEMPORAL
;
2699 case OMP_LIST_SCAN_IN
:
2700 clause_code
= OMP_CLAUSE_INCLUSIVE
;
2702 case OMP_LIST_SCAN_EX
:
2703 clause_code
= OMP_CLAUSE_EXCLUSIVE
;
2708 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
2711 case OMP_LIST_ALIGNED
:
2712 for (; n
!= NULL
; n
= n
->next
)
2713 if (n
->sym
->attr
.referenced
|| declare_simd
)
2715 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2716 if (t
!= error_mark_node
)
2718 tree node
= build_omp_clause (input_location
,
2719 OMP_CLAUSE_ALIGNED
);
2720 OMP_CLAUSE_DECL (node
) = t
;
2726 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
2729 gfc_init_se (&se
, NULL
);
2730 gfc_conv_expr (&se
, n
->expr
);
2731 gfc_add_block_to_block (block
, &se
.pre
);
2732 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
2733 gfc_add_block_to_block (block
, &se
.post
);
2735 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
2737 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2741 case OMP_LIST_ALLOCATE
:
2742 for (; n
!= NULL
; n
= n
->next
)
2743 if (n
->sym
->attr
.referenced
)
2745 tree t
= gfc_trans_omp_variable (n
->sym
, false);
2746 if (t
!= error_mark_node
)
2748 tree node
= build_omp_clause (input_location
,
2749 OMP_CLAUSE_ALLOCATE
);
2750 OMP_CLAUSE_DECL (node
) = t
;
2751 if (n
->u2
.allocator
)
2754 gfc_init_se (&se
, NULL
);
2755 gfc_conv_expr (&se
, n
->u2
.allocator
);
2756 allocator_
= gfc_evaluate_now (se
.expr
, block
);
2757 OMP_CLAUSE_ALLOCATE_ALLOCATOR (node
) = allocator_
;
2762 gfc_init_se (&se
, NULL
);
2763 gfc_conv_expr (&se
, n
->u
.align
);
2764 align_
= gfc_evaluate_now (se
.expr
, block
);
2765 OMP_CLAUSE_ALLOCATE_ALIGN (node
) = align_
;
2767 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2771 case OMP_LIST_LINEAR
:
2773 gfc_expr
*last_step_expr
= NULL
;
2774 tree last_step
= NULL_TREE
;
2775 bool last_step_parm
= false;
2777 for (; n
!= NULL
; n
= n
->next
)
2781 last_step_expr
= n
->expr
;
2782 last_step
= NULL_TREE
;
2783 last_step_parm
= false;
2785 if (n
->sym
->attr
.referenced
|| declare_simd
)
2787 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2788 if (t
!= error_mark_node
)
2790 tree node
= build_omp_clause (input_location
,
2792 OMP_CLAUSE_DECL (node
) = t
;
2793 omp_clause_linear_kind kind
;
2794 switch (n
->u
.linear
.op
)
2796 case OMP_LINEAR_DEFAULT
:
2797 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
2799 case OMP_LINEAR_REF
:
2800 kind
= OMP_CLAUSE_LINEAR_REF
;
2802 case OMP_LINEAR_VAL
:
2803 kind
= OMP_CLAUSE_LINEAR_VAL
;
2805 case OMP_LINEAR_UVAL
:
2806 kind
= OMP_CLAUSE_LINEAR_UVAL
;
2811 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
2812 OMP_CLAUSE_LINEAR_OLD_LINEAR_MODIFIER (node
)
2813 = n
->u
.linear
.old_modifier
;
2814 if (last_step_expr
&& last_step
== NULL_TREE
)
2818 gfc_init_se (&se
, NULL
);
2819 gfc_conv_expr (&se
, last_step_expr
);
2820 gfc_add_block_to_block (block
, &se
.pre
);
2821 last_step
= gfc_evaluate_now (se
.expr
, block
);
2822 gfc_add_block_to_block (block
, &se
.post
);
2824 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
2826 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
2827 last_step
= gfc_trans_omp_variable (s
, true);
2828 last_step_parm
= true;
2832 = gfc_conv_constant_to_tree (last_step_expr
);
2836 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
2837 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
2841 if (kind
== OMP_CLAUSE_LINEAR_REF
)
2844 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
2846 type
= gfc_get_function_type (n
->sym
);
2847 type
= build_pointer_type (type
);
2850 type
= gfc_sym_type (n
->sym
);
2851 if (POINTER_TYPE_P (type
))
2852 type
= TREE_TYPE (type
);
2853 /* Otherwise to be determined what exactly
2855 tree t
= fold_convert (sizetype
, last_step
);
2856 t
= size_binop (MULT_EXPR
, t
,
2857 TYPE_SIZE_UNIT (type
));
2858 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
2863 = gfc_typenode_for_spec (&n
->sym
->ts
);
2864 OMP_CLAUSE_LINEAR_STEP (node
)
2865 = fold_convert (type
, last_step
);
2868 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
2869 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
2870 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2876 case OMP_LIST_AFFINITY
:
2877 case OMP_LIST_DEPEND
:
2878 iterator
= NULL_TREE
;
2880 prev_clauses
= omp_clauses
;
2881 for (; n
!= NULL
; n
= n
->next
)
2883 if (iterator
&& prev
->u2
.ns
!= n
->u2
.ns
)
2885 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
2886 TREE_VEC_ELT (iterator
, 5) = tree_block
;
2887 for (tree c
= omp_clauses
; c
!= prev_clauses
;
2888 c
= OMP_CLAUSE_CHAIN (c
))
2889 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
2890 OMP_CLAUSE_DECL (c
));
2891 prev_clauses
= omp_clauses
;
2892 iterator
= NULL_TREE
;
2894 if (n
->u2
.ns
&& (!prev
|| prev
->u2
.ns
!= n
->u2
.ns
))
2896 gfc_init_block (&iter_block
);
2897 tree_block
= make_node (BLOCK
);
2898 TREE_USED (tree_block
) = 1;
2899 BLOCK_VARS (tree_block
) = NULL_TREE
;
2900 iterator
= handle_iterator (n
->u2
.ns
, block
,
2904 gfc_init_block (&iter_block
);
2906 if (list
== OMP_LIST_DEPEND
2907 && (n
->u
.depend_doacross_op
== OMP_DOACROSS_SINK_FIRST
2908 || n
->u
.depend_doacross_op
== OMP_DEPEND_SINK_FIRST
))
2910 tree vec
= NULL_TREE
;
2913 = n
->u
.depend_doacross_op
== OMP_DEPEND_SINK_FIRST
;
2916 tree addend
= integer_zero_node
, t
;
2918 if (n
->sym
&& n
->expr
)
2920 addend
= gfc_conv_constant_to_tree (n
->expr
);
2921 if (TREE_CODE (addend
) == INTEGER_CST
2922 && tree_int_cst_sgn (addend
) == -1)
2925 addend
= const_unop (NEGATE_EXPR
,
2926 TREE_TYPE (addend
), addend
);
2931 t
= null_pointer_node
; /* "omp_cur_iteration - 1". */
2933 t
= gfc_trans_omp_variable (n
->sym
, false);
2934 if (t
!= error_mark_node
)
2936 if (i
< vec_safe_length (doacross_steps
)
2937 && !integer_zerop (addend
)
2938 && (*doacross_steps
)[i
])
2940 tree step
= (*doacross_steps
)[i
];
2941 addend
= fold_convert (TREE_TYPE (step
), addend
);
2942 addend
= build2 (TRUNC_DIV_EXPR
,
2943 TREE_TYPE (step
), addend
, step
);
2945 vec
= tree_cons (addend
, t
, vec
);
2947 OMP_CLAUSE_DOACROSS_SINK_NEGATIVE (vec
) = 1;
2950 || n
->next
->u
.depend_doacross_op
!= OMP_DOACROSS_SINK
)
2954 if (vec
== NULL_TREE
)
2957 tree node
= build_omp_clause (input_location
,
2958 OMP_CLAUSE_DOACROSS
);
2959 OMP_CLAUSE_DOACROSS_KIND (node
) = OMP_CLAUSE_DOACROSS_SINK
;
2960 OMP_CLAUSE_DOACROSS_DEPEND (node
) = is_depend
;
2961 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2962 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2966 if (n
->sym
&& !n
->sym
->attr
.referenced
)
2969 tree node
= build_omp_clause (input_location
,
2970 list
== OMP_LIST_DEPEND
2972 : OMP_CLAUSE_AFFINITY
);
2973 if (n
->sym
== NULL
) /* omp_all_memory */
2974 OMP_CLAUSE_DECL (node
) = null_pointer_node
;
2975 else if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2977 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2978 if (gfc_omp_privatize_by_reference (decl
))
2979 decl
= build_fold_indirect_ref (decl
);
2980 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2982 decl
= gfc_conv_descriptor_data_get (decl
);
2983 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2984 decl
= build_fold_indirect_ref (decl
);
2986 else if (n
->sym
->attr
.allocatable
|| n
->sym
->attr
.pointer
)
2987 decl
= build_fold_indirect_ref (decl
);
2988 else if (DECL_P (decl
))
2989 TREE_ADDRESSABLE (decl
) = 1;
2990 OMP_CLAUSE_DECL (node
) = decl
;
2995 gfc_init_se (&se
, NULL
);
2996 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2998 gfc_conv_expr_reference (&se
, n
->expr
);
3003 gfc_conv_expr_descriptor (&se
, n
->expr
);
3004 ptr
= gfc_conv_array_data (se
.expr
);
3006 gfc_add_block_to_block (&iter_block
, &se
.pre
);
3007 gfc_add_block_to_block (&iter_block
, &se
.post
);
3008 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
3009 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
3011 if (list
== OMP_LIST_DEPEND
)
3012 switch (n
->u
.depend_doacross_op
)
3015 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
3017 case OMP_DEPEND_OUT
:
3018 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
3020 case OMP_DEPEND_INOUT
:
3021 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
3023 case OMP_DEPEND_INOUTSET
:
3024 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUTSET
;
3026 case OMP_DEPEND_MUTEXINOUTSET
:
3027 OMP_CLAUSE_DEPEND_KIND (node
)
3028 = OMP_CLAUSE_DEPEND_MUTEXINOUTSET
;
3030 case OMP_DEPEND_DEPOBJ
:
3031 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_DEPOBJ
;
3037 gfc_add_block_to_block (block
, &iter_block
);
3038 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3042 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
3043 TREE_VEC_ELT (iterator
, 5) = tree_block
;
3044 for (tree c
= omp_clauses
; c
!= prev_clauses
;
3045 c
= OMP_CLAUSE_CHAIN (c
))
3046 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
3047 OMP_CLAUSE_DECL (c
));
3051 for (; n
!= NULL
; n
= n
->next
)
3053 if (!n
->sym
->attr
.referenced
)
3056 bool always_modifier
= false;
3057 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3058 tree node2
= NULL_TREE
;
3059 tree node3
= NULL_TREE
;
3060 tree node4
= NULL_TREE
;
3061 tree node5
= NULL_TREE
;
3063 /* OpenMP: automatically map pointer targets with the pointer;
3064 hence, always update the descriptor/pointer itself. */
3066 && ((n
->expr
== NULL
&& n
->sym
->attr
.pointer
)
3067 || (n
->expr
&& gfc_expr_attr (n
->expr
).pointer
)))
3068 always_modifier
= true;
3070 switch (n
->u
.map_op
)
3073 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
3075 case OMP_MAP_IF_PRESENT
:
3076 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_IF_PRESENT
);
3078 case OMP_MAP_ATTACH
:
3079 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ATTACH
);
3082 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
3085 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
3087 case OMP_MAP_TOFROM
:
3088 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
3090 case OMP_MAP_ALWAYS_TO
:
3091 always_modifier
= true;
3092 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
3094 case OMP_MAP_ALWAYS_FROM
:
3095 always_modifier
= true;
3096 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
3098 case OMP_MAP_ALWAYS_TOFROM
:
3099 always_modifier
= true;
3100 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
3102 case OMP_MAP_PRESENT_ALLOC
:
3103 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_ALLOC
);
3105 case OMP_MAP_PRESENT_TO
:
3106 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_TO
);
3108 case OMP_MAP_PRESENT_FROM
:
3109 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_FROM
);
3111 case OMP_MAP_PRESENT_TOFROM
:
3112 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_TOFROM
);
3114 case OMP_MAP_ALWAYS_PRESENT_TO
:
3115 always_modifier
= true;
3116 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_PRESENT_TO
);
3118 case OMP_MAP_ALWAYS_PRESENT_FROM
:
3119 always_modifier
= true;
3120 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_PRESENT_FROM
);
3122 case OMP_MAP_ALWAYS_PRESENT_TOFROM
:
3123 always_modifier
= true;
3124 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_PRESENT_TOFROM
);
3126 case OMP_MAP_RELEASE
:
3127 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
3129 case OMP_MAP_DELETE
:
3130 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
3132 case OMP_MAP_DETACH
:
3133 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DETACH
);
3135 case OMP_MAP_FORCE_ALLOC
:
3136 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
3138 case OMP_MAP_FORCE_TO
:
3139 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
3141 case OMP_MAP_FORCE_FROM
:
3142 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
3144 case OMP_MAP_FORCE_TOFROM
:
3145 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
3147 case OMP_MAP_FORCE_PRESENT
:
3148 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
3150 case OMP_MAP_FORCE_DEVICEPTR
:
3151 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
3157 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
3159 TREE_ADDRESSABLE (decl
) = 1;
3161 gfc_ref
*lastref
= NULL
;
3164 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3165 if (ref
->type
== REF_COMPONENT
|| ref
->type
== REF_ARRAY
)
3168 bool allocatable
= false, pointer
= false;
3170 if (lastref
&& lastref
->type
== REF_COMPONENT
)
3172 gfc_component
*c
= lastref
->u
.c
.component
;
3174 if (c
->ts
.type
== BT_CLASS
)
3176 pointer
= CLASS_DATA (c
)->attr
.class_pointer
;
3177 allocatable
= CLASS_DATA (c
)->attr
.allocatable
;
3181 pointer
= c
->attr
.pointer
;
3182 allocatable
= c
->attr
.allocatable
;
3187 || (n
->expr
->ref
->type
== REF_ARRAY
3188 && n
->expr
->ref
->u
.ar
.type
== AR_FULL
))
3190 gomp_map_kind map_kind
;
3191 tree type
= TREE_TYPE (decl
);
3192 if (n
->sym
->ts
.type
== BT_CHARACTER
3193 && n
->sym
->ts
.deferred
3194 && n
->sym
->attr
.omp_declare_target
3195 && (always_modifier
|| n
->sym
->attr
.pointer
)
3196 && op
!= EXEC_OMP_TARGET_EXIT_DATA
3197 && n
->u
.map_op
!= OMP_MAP_DELETE
3198 && n
->u
.map_op
!= OMP_MAP_RELEASE
)
3200 gcc_assert (n
->sym
->ts
.u
.cl
->backend_decl
);
3201 node5
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3202 OMP_CLAUSE_SET_MAP_KIND (node5
, GOMP_MAP_ALWAYS_TO
);
3203 OMP_CLAUSE_DECL (node5
) = n
->sym
->ts
.u
.cl
->backend_decl
;
3204 OMP_CLAUSE_SIZE (node5
)
3205 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3208 tree present
= gfc_omp_check_optional_argument (decl
, true);
3209 if (openacc
&& n
->sym
->ts
.type
== BT_CLASS
)
3211 if (n
->sym
->attr
.optional
)
3212 sorry ("optional class parameter");
3213 tree ptr
= gfc_class_data_get (decl
);
3214 ptr
= build_fold_indirect_ref (ptr
);
3215 OMP_CLAUSE_DECL (node
) = ptr
;
3216 OMP_CLAUSE_SIZE (node
) = gfc_class_vtab_size_get (decl
);
3217 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3218 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_ATTACH_DETACH
);
3219 OMP_CLAUSE_DECL (node2
) = gfc_class_data_get (decl
);
3220 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3221 goto finalize_map_clause
;
3223 else if (POINTER_TYPE_P (type
)
3224 && (gfc_omp_privatize_by_reference (decl
)
3225 || GFC_DECL_GET_SCALAR_POINTER (decl
)
3226 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
3227 || GFC_DECL_CRAY_POINTEE (decl
)
3228 || GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
3229 || (n
->sym
->ts
.type
== BT_DERIVED
3230 && (n
->sym
->ts
.u
.derived
->ts
.f90_type
3233 tree orig_decl
= decl
;
3235 /* For nonallocatable, nonpointer arrays, a temporary
3236 variable is generated, but this one is only defined if
3237 the variable is present; hence, we now set it to NULL
3238 to avoid accessing undefined variables. We cannot use
3239 a temporary variable here as otherwise the replacement
3240 of the variables in omp-low.cc will not work. */
3241 if (present
&& GFC_ARRAY_TYPE_P (type
))
3243 tree tmp
= fold_build2_loc (input_location
,
3245 void_type_node
, decl
,
3247 tree cond
= fold_build1_loc (input_location
,
3251 gfc_add_expr_to_block (block
,
3252 build3_loc (input_location
,
3258 /* For descriptor types, the unmapping happens below. */
3259 if (op
!= EXEC_OMP_TARGET_EXIT_DATA
3260 || !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3262 enum gomp_map_kind gmk
= GOMP_MAP_POINTER
;
3263 if (op
== EXEC_OMP_TARGET_EXIT_DATA
3264 && n
->u
.map_op
== OMP_MAP_DELETE
)
3265 gmk
= GOMP_MAP_DELETE
;
3266 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3267 gmk
= GOMP_MAP_RELEASE
;
3269 if (gmk
== GOMP_MAP_RELEASE
|| gmk
== GOMP_MAP_DELETE
)
3270 size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
3272 size
= size_int (0);
3273 node4
= build_omp_clause (input_location
,
3275 OMP_CLAUSE_SET_MAP_KIND (node4
, gmk
);
3276 OMP_CLAUSE_DECL (node4
) = decl
;
3277 OMP_CLAUSE_SIZE (node4
) = size
;
3279 decl
= build_fold_indirect_ref (decl
);
3280 if ((TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
3281 || gfc_omp_is_optional_argument (orig_decl
))
3282 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
3283 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
3285 enum gomp_map_kind gmk
;
3286 if (op
== EXEC_OMP_TARGET_EXIT_DATA
3287 && n
->u
.map_op
== OMP_MAP_DELETE
)
3288 gmk
= GOMP_MAP_DELETE
;
3289 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3290 gmk
= GOMP_MAP_RELEASE
;
3292 gmk
= GOMP_MAP_POINTER
;
3294 if (gmk
== GOMP_MAP_RELEASE
|| gmk
== GOMP_MAP_DELETE
)
3295 size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
3297 size
= size_int (0);
3298 node3
= build_omp_clause (input_location
,
3300 OMP_CLAUSE_SET_MAP_KIND (node3
, gmk
);
3301 OMP_CLAUSE_DECL (node3
) = decl
;
3302 OMP_CLAUSE_SIZE (node3
) = size
;
3303 decl
= build_fold_indirect_ref (decl
);
3306 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3308 tree type
= TREE_TYPE (decl
);
3309 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3311 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
3313 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
3314 ptr
= build_fold_indirect_ref (ptr
);
3315 OMP_CLAUSE_DECL (node
) = ptr
;
3316 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3317 OMP_CLAUSE_DECL (node2
) = decl
;
3318 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3319 if (n
->u
.map_op
== OMP_MAP_DELETE
)
3320 map_kind
= GOMP_MAP_DELETE
;
3321 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
3322 || n
->u
.map_op
== OMP_MAP_RELEASE
)
3323 map_kind
= GOMP_MAP_RELEASE
;
3325 map_kind
= GOMP_MAP_TO_PSET
;
3326 OMP_CLAUSE_SET_MAP_KIND (node2
, map_kind
);
3328 if (op
!= EXEC_OMP_TARGET_EXIT_DATA
3329 && n
->u
.map_op
!= OMP_MAP_DELETE
3330 && n
->u
.map_op
!= OMP_MAP_RELEASE
)
3332 node3
= build_omp_clause (input_location
,
3336 ptr
= gfc_conv_descriptor_data_get (decl
);
3337 ptr
= gfc_build_addr_expr (NULL
, ptr
);
3338 ptr
= gfc_build_cond_assign_expr (
3339 block
, present
, ptr
, null_pointer_node
);
3340 ptr
= build_fold_indirect_ref (ptr
);
3341 OMP_CLAUSE_DECL (node3
) = ptr
;
3344 OMP_CLAUSE_DECL (node3
)
3345 = gfc_conv_descriptor_data_get (decl
);
3346 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3348 if (n
->u
.map_op
== OMP_MAP_ATTACH
)
3350 /* Standalone attach clauses used with arrays with
3351 descriptors must copy the descriptor to the
3352 target, else they won't have anything to
3353 perform the attachment onto (see OpenACC 2.6,
3354 "2.6.3. Data Structures with Pointers"). */
3355 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH
);
3356 /* We don't want to map PTR at all in this case,
3357 so delete its node and shuffle the others
3362 goto finalize_map_clause
;
3364 else if (n
->u
.map_op
== OMP_MAP_DETACH
)
3366 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_DETACH
);
3367 /* Similarly to above, we don't want to unmap PTR
3372 goto finalize_map_clause
;
3375 OMP_CLAUSE_SET_MAP_KIND (node3
,
3377 ? GOMP_MAP_ALWAYS_POINTER
3378 : GOMP_MAP_POINTER
);
3381 /* We have to check for n->sym->attr.dimension because
3382 of scalar coarrays. */
3383 if ((n
->sym
->attr
.pointer
|| n
->sym
->attr
.allocatable
)
3384 && n
->sym
->attr
.dimension
)
3386 stmtblock_t cond_block
;
3388 = gfc_create_var (gfc_array_index_type
, NULL
);
3389 tree tem
, then_b
, else_b
, zero
, cond
;
3391 gfc_init_block (&cond_block
);
3393 = gfc_full_array_size (&cond_block
, decl
,
3394 GFC_TYPE_ARRAY_RANK (type
));
3396 if (n
->sym
->ts
.type
== BT_CHARACTER
3397 && n
->sym
->ts
.deferred
)
3399 tree len
= n
->sym
->ts
.u
.cl
->backend_decl
;
3400 len
= fold_convert (size_type_node
, len
);
3401 elemsz
= gfc_get_char_type (n
->sym
->ts
.kind
);
3402 elemsz
= TYPE_SIZE_UNIT (elemsz
);
3403 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
3408 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3409 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3410 tem
= fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3412 gfc_add_modify (&cond_block
, size
, tem
);
3413 then_b
= gfc_finish_block (&cond_block
);
3414 gfc_init_block (&cond_block
);
3415 zero
= build_int_cst (gfc_array_index_type
, 0);
3416 gfc_add_modify (&cond_block
, size
, zero
);
3417 else_b
= gfc_finish_block (&cond_block
);
3418 tem
= gfc_conv_descriptor_data_get (decl
);
3419 tem
= fold_convert (pvoid_type_node
, tem
);
3420 cond
= fold_build2_loc (input_location
, NE_EXPR
,
3422 tem
, null_pointer_node
);
3424 cond
= fold_build2_loc (input_location
,
3428 gfc_add_expr_to_block (block
,
3429 build3_loc (input_location
,
3434 OMP_CLAUSE_SIZE (node
) = size
;
3436 else if (n
->sym
->attr
.dimension
)
3438 stmtblock_t cond_block
;
3439 gfc_init_block (&cond_block
);
3440 tree size
= gfc_full_array_size (&cond_block
, decl
,
3441 GFC_TYPE_ARRAY_RANK (type
));
3443 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3444 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3445 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3447 size
= gfc_evaluate_now (size
, &cond_block
);
3450 tree var
= gfc_create_var (gfc_array_index_type
,
3452 gfc_add_modify (&cond_block
, var
, size
);
3453 tree cond_body
= gfc_finish_block (&cond_block
);
3454 tree cond
= build3_loc (input_location
, COND_EXPR
,
3455 void_type_node
, present
,
3456 cond_body
, NULL_TREE
);
3457 gfc_add_expr_to_block (block
, cond
);
3458 OMP_CLAUSE_SIZE (node
) = var
;
3462 gfc_add_block_to_block (block
, &cond_block
);
3463 OMP_CLAUSE_SIZE (node
) = size
;
3468 && INDIRECT_REF_P (decl
)
3469 && INDIRECT_REF_P (TREE_OPERAND (decl
, 0)))
3471 /* A single indirectref is handled by the middle end. */
3472 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
3473 decl
= TREE_OPERAND (decl
, 0);
3474 decl
= gfc_build_cond_assign_expr (block
, present
, decl
,
3476 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (decl
);
3479 OMP_CLAUSE_DECL (node
) = decl
;
3481 if (!n
->sym
->attr
.dimension
3482 && n
->sym
->ts
.type
== BT_CHARACTER
3483 && n
->sym
->ts
.deferred
)
3487 gcc_assert (TREE_CODE (decl
) == INDIRECT_REF
);
3488 decl
= TREE_OPERAND (decl
, 0);
3490 tree cond
= fold_build2_loc (input_location
, NE_EXPR
,
3492 decl
, null_pointer_node
);
3494 cond
= fold_build2_loc (input_location
,
3498 tree len
= n
->sym
->ts
.u
.cl
->backend_decl
;
3499 len
= fold_convert (size_type_node
, len
);
3500 tree size
= gfc_get_char_type (n
->sym
->ts
.kind
);
3501 size
= TYPE_SIZE_UNIT (size
);
3502 size
= fold_build2 (MULT_EXPR
, size_type_node
, len
, size
);
3503 size
= build3_loc (input_location
,
3508 size
= gfc_evaluate_now (size
, block
);
3509 OMP_CLAUSE_SIZE (node
) = size
;
3513 && n
->expr
->expr_type
== EXPR_VARIABLE
3514 && n
->expr
->ref
->type
== REF_ARRAY
3515 && !n
->expr
->ref
->next
)
3517 /* An array element or array section which is not part of a
3518 derived type, etc. */
3519 bool element
= n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
;
3520 tree type
= TREE_TYPE (decl
);
3521 gomp_map_kind k
= GOMP_MAP_POINTER
;
3523 && !GFC_DESCRIPTOR_TYPE_P (type
)
3524 && !(POINTER_TYPE_P (type
)
3525 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))))
3526 k
= GOMP_MAP_FIRSTPRIVATE_POINTER
;
3527 gfc_trans_omp_array_section (block
, op
, n
, decl
, element
, k
,
3528 node
, node2
, node3
, node4
);
3531 && n
->expr
->expr_type
== EXPR_VARIABLE
3532 && (n
->expr
->ref
->type
== REF_COMPONENT
3533 || n
->expr
->ref
->type
== REF_ARRAY
)
3535 && lastref
->type
== REF_COMPONENT
3536 && lastref
->u
.c
.component
->ts
.type
!= BT_CLASS
3537 && lastref
->u
.c
.component
->ts
.type
!= BT_DERIVED
3538 && !lastref
->u
.c
.component
->attr
.dimension
)
3540 /* Derived type access with last component being a scalar. */
3541 gfc_init_se (&se
, NULL
);
3543 gfc_conv_expr (&se
, n
->expr
);
3544 gfc_add_block_to_block (block
, &se
.pre
);
3545 /* For BT_CHARACTER a pointer is returned. */
3546 OMP_CLAUSE_DECL (node
)
3547 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3548 ? build_fold_indirect_ref (se
.expr
) : se
.expr
;
3549 gfc_add_block_to_block (block
, &se
.post
);
3550 if (pointer
|| allocatable
)
3552 /* If it's a bare attach/detach clause, we just want
3553 to perform a single attach/detach operation, of the
3554 pointer itself, not of the pointed-to object. */
3556 && (n
->u
.map_op
== OMP_MAP_ATTACH
3557 || n
->u
.map_op
== OMP_MAP_DETACH
))
3559 OMP_CLAUSE_DECL (node
)
3560 = build_fold_addr_expr (OMP_CLAUSE_DECL (node
));
3561 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3562 goto finalize_map_clause
;
3565 node2
= build_omp_clause (input_location
,
3568 = (openacc
? GOMP_MAP_ATTACH_DETACH
3569 : GOMP_MAP_ALWAYS_POINTER
);
3570 OMP_CLAUSE_SET_MAP_KIND (node2
, kind
);
3571 OMP_CLAUSE_DECL (node2
)
3572 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3574 : gfc_build_addr_expr (NULL
, se
.expr
);
3575 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3577 && n
->expr
->ts
.type
== BT_CHARACTER
3578 && n
->expr
->ts
.deferred
)
3580 gcc_assert (se
.string_length
);
3582 = gfc_get_char_type (n
->expr
->ts
.kind
);
3583 OMP_CLAUSE_SIZE (node
)
3584 = fold_build2 (MULT_EXPR
, size_type_node
,
3585 fold_convert (size_type_node
,
3587 TYPE_SIZE_UNIT (tmp
));
3588 if (n
->u
.map_op
== OMP_MAP_DELETE
)
3589 kind
= GOMP_MAP_DELETE
;
3590 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3591 kind
= GOMP_MAP_RELEASE
;
3594 node3
= build_omp_clause (input_location
,
3596 OMP_CLAUSE_SET_MAP_KIND (node3
, kind
);
3597 OMP_CLAUSE_DECL (node3
) = se
.string_length
;
3598 OMP_CLAUSE_SIZE (node3
)
3599 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3604 && n
->expr
->expr_type
== EXPR_VARIABLE
3605 && (n
->expr
->ref
->type
== REF_COMPONENT
3606 || n
->expr
->ref
->type
== REF_ARRAY
))
3608 gfc_init_se (&se
, NULL
);
3609 se
.expr
= gfc_maybe_dereference_var (n
->sym
, decl
);
3611 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3613 if (ref
->type
== REF_COMPONENT
)
3615 if (ref
->u
.c
.sym
->attr
.extension
)
3616 conv_parent_component_references (&se
, ref
);
3618 gfc_conv_component_ref (&se
, ref
);
3620 else if (ref
->type
== REF_ARRAY
)
3622 if (ref
->u
.ar
.type
== AR_ELEMENT
&& ref
->next
)
3623 gfc_conv_array_ref (&se
, &ref
->u
.ar
, n
->expr
,
3626 gcc_assert (!ref
->next
);
3629 sorry ("unhandled expression type");
3632 tree inner
= se
.expr
;
3634 /* Last component is a derived type or class pointer. */
3635 if (lastref
->type
== REF_COMPONENT
3636 && (lastref
->u
.c
.component
->ts
.type
== BT_DERIVED
3637 || lastref
->u
.c
.component
->ts
.type
== BT_CLASS
))
3639 if (pointer
|| (openacc
&& allocatable
))
3641 /* If it's a bare attach/detach clause, we just want
3642 to perform a single attach/detach operation, of the
3643 pointer itself, not of the pointed-to object. */
3645 && (n
->u
.map_op
== OMP_MAP_ATTACH
3646 || n
->u
.map_op
== OMP_MAP_DETACH
))
3648 OMP_CLAUSE_DECL (node
)
3649 = build_fold_addr_expr (inner
);
3650 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3651 goto finalize_map_clause
;
3656 if (lastref
->u
.c
.component
->ts
.type
== BT_CLASS
)
3658 data
= gfc_class_data_get (inner
);
3659 gcc_assert (POINTER_TYPE_P (TREE_TYPE (data
)));
3660 data
= build_fold_indirect_ref (data
);
3661 size
= gfc_class_vtab_size_get (inner
);
3663 else /* BT_DERIVED. */
3666 size
= TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3669 OMP_CLAUSE_DECL (node
) = data
;
3670 OMP_CLAUSE_SIZE (node
) = size
;
3671 node2
= build_omp_clause (input_location
,
3673 OMP_CLAUSE_SET_MAP_KIND (node2
,
3675 ? GOMP_MAP_ATTACH_DETACH
3676 : GOMP_MAP_ALWAYS_POINTER
);
3677 OMP_CLAUSE_DECL (node2
) = build_fold_addr_expr (data
);
3678 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3682 OMP_CLAUSE_DECL (node
) = inner
;
3683 OMP_CLAUSE_SIZE (node
)
3684 = TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3687 else if (lastref
->type
== REF_ARRAY
3688 && lastref
->u
.ar
.type
== AR_FULL
)
3690 /* Bare attach and detach clauses don't want any
3691 additional nodes. */
3692 if ((n
->u
.map_op
== OMP_MAP_ATTACH
3693 || n
->u
.map_op
== OMP_MAP_DETACH
)
3694 && (POINTER_TYPE_P (TREE_TYPE (inner
))
3695 || GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
))))
3697 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
3699 tree ptr
= gfc_conv_descriptor_data_get (inner
);
3700 OMP_CLAUSE_DECL (node
) = ptr
;
3703 OMP_CLAUSE_DECL (node
) = inner
;
3704 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3705 goto finalize_map_clause
;
3708 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
3710 gomp_map_kind map_kind
;
3712 tree type
= TREE_TYPE (inner
);
3713 tree ptr
= gfc_conv_descriptor_data_get (inner
);
3714 ptr
= build_fold_indirect_ref (ptr
);
3715 OMP_CLAUSE_DECL (node
) = ptr
;
3716 int rank
= GFC_TYPE_ARRAY_RANK (type
);
3717 OMP_CLAUSE_SIZE (node
)
3718 = gfc_full_array_size (block
, inner
, rank
);
3720 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3721 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
3722 if (GOMP_MAP_COPY_TO_P (map_kind
)
3723 || map_kind
== GOMP_MAP_ALLOC
)
3724 map_kind
= ((GOMP_MAP_ALWAYS_P (map_kind
)
3725 || gfc_expr_attr (n
->expr
).pointer
)
3726 ? GOMP_MAP_ALWAYS_TO
: GOMP_MAP_TO
);
3727 else if (n
->u
.map_op
== OMP_MAP_RELEASE
3728 || n
->u
.map_op
== OMP_MAP_DELETE
)
3730 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3731 map_kind
= GOMP_MAP_RELEASE
;
3733 map_kind
= GOMP_MAP_ALLOC
;
3735 && n
->expr
->ts
.type
== BT_CHARACTER
3736 && n
->expr
->ts
.deferred
)
3738 gcc_assert (se
.string_length
);
3739 tree len
= fold_convert (size_type_node
,
3741 elemsz
= gfc_get_char_type (n
->expr
->ts
.kind
);
3742 elemsz
= TYPE_SIZE_UNIT (elemsz
);
3743 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
3745 node4
= build_omp_clause (input_location
,
3747 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
3748 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
3749 OMP_CLAUSE_SIZE (node4
)
3750 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3752 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3753 OMP_CLAUSE_SIZE (node
)
3754 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3755 OMP_CLAUSE_SIZE (node
), elemsz
);
3756 desc_node
= build_omp_clause (input_location
,
3759 OMP_CLAUSE_SET_MAP_KIND (desc_node
,
3762 OMP_CLAUSE_SET_MAP_KIND (desc_node
, map_kind
);
3763 OMP_CLAUSE_DECL (desc_node
) = inner
;
3764 OMP_CLAUSE_SIZE (desc_node
) = TYPE_SIZE_UNIT (type
);
3770 node
= desc_node
; /* Put first. */
3772 if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3773 goto finalize_map_clause
;
3774 node3
= build_omp_clause (input_location
,
3776 OMP_CLAUSE_SET_MAP_KIND (node3
,
3778 ? GOMP_MAP_ATTACH_DETACH
3779 : GOMP_MAP_ALWAYS_POINTER
);
3780 OMP_CLAUSE_DECL (node3
)
3781 = gfc_conv_descriptor_data_get (inner
);
3782 /* Similar to gfc_trans_omp_array_section (details
3783 there), we add/keep the cast for OpenMP to prevent
3784 that an 'alloc:' gets added for node3 ('desc.data')
3785 as that is part of the whole descriptor (node3).
3786 TODO: Remove once the ME handles this properly. */
3788 OMP_CLAUSE_DECL (node3
)
3789 = fold_convert (TREE_TYPE (TREE_OPERAND(ptr
, 0)),
3790 OMP_CLAUSE_DECL (node3
));
3792 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
3793 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3796 OMP_CLAUSE_DECL (node
) = inner
;
3798 else if (lastref
->type
== REF_ARRAY
)
3800 /* An array element or section. */
3801 bool element
= lastref
->u
.ar
.type
== AR_ELEMENT
;
3802 gomp_map_kind kind
= (openacc
? GOMP_MAP_ATTACH_DETACH
3803 : GOMP_MAP_ALWAYS_POINTER
);
3804 gfc_trans_omp_array_section (block
, op
, n
, inner
, element
,
3805 kind
, node
, node2
, node3
,
3812 sorry ("unhandled expression");
3814 finalize_map_clause
:
3816 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3818 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
3820 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
3822 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
3824 omp_clauses
= gfc_trans_add_clause (node5
, omp_clauses
);
3829 case OMP_LIST_CACHE
:
3830 for (; n
!= NULL
; n
= n
->next
)
3832 if (!n
->sym
->attr
.referenced
)
3838 clause_code
= OMP_CLAUSE_TO
;
3841 clause_code
= OMP_CLAUSE_FROM
;
3843 case OMP_LIST_CACHE
:
3844 clause_code
= OMP_CLAUSE__CACHE_
;
3849 tree node
= build_omp_clause (input_location
, clause_code
);
3851 || (n
->expr
->ref
->type
== REF_ARRAY
3852 && n
->expr
->ref
->u
.ar
.type
== AR_FULL
3853 && n
->expr
->ref
->next
== NULL
))
3855 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
3856 if (gfc_omp_privatize_by_reference (decl
))
3858 if (gfc_omp_is_allocatable_or_ptr (decl
))
3859 decl
= build_fold_indirect_ref (decl
);
3860 decl
= build_fold_indirect_ref (decl
);
3862 else if (DECL_P (decl
))
3863 TREE_ADDRESSABLE (decl
) = 1;
3864 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3866 tree type
= TREE_TYPE (decl
);
3867 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3868 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
3869 ptr
= build_fold_indirect_ref (ptr
);
3870 OMP_CLAUSE_DECL (node
) = ptr
;
3871 OMP_CLAUSE_SIZE (node
)
3872 = gfc_full_array_size (block
, decl
,
3873 GFC_TYPE_ARRAY_RANK (type
));
3875 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3876 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3877 OMP_CLAUSE_SIZE (node
)
3878 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3879 OMP_CLAUSE_SIZE (node
), elemsz
);
3883 OMP_CLAUSE_DECL (node
) = decl
;
3884 if (gfc_omp_is_allocatable_or_ptr (decl
))
3885 OMP_CLAUSE_SIZE (node
)
3886 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
3892 gfc_init_se (&se
, NULL
);
3893 if (n
->expr
->rank
== 0)
3895 gfc_conv_expr_reference (&se
, n
->expr
);
3897 gfc_add_block_to_block (block
, &se
.pre
);
3898 OMP_CLAUSE_SIZE (node
)
3899 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr
)));
3903 gfc_conv_expr_descriptor (&se
, n
->expr
);
3904 ptr
= gfc_conv_array_data (se
.expr
);
3905 tree type
= TREE_TYPE (se
.expr
);
3906 gfc_add_block_to_block (block
, &se
.pre
);
3907 OMP_CLAUSE_SIZE (node
)
3908 = gfc_full_array_size (block
, se
.expr
,
3909 GFC_TYPE_ARRAY_RANK (type
));
3911 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3912 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3913 OMP_CLAUSE_SIZE (node
)
3914 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3915 OMP_CLAUSE_SIZE (node
), elemsz
);
3917 gfc_add_block_to_block (block
, &se
.post
);
3918 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
3919 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
3921 if (n
->u
.present_modifier
)
3922 OMP_CLAUSE_MOTION_PRESENT (node
) = 1;
3923 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3926 case OMP_LIST_USES_ALLOCATORS
:
3927 /* Ignore pre-defined allocators as no special treatment is needed. */
3928 for (; n
!= NULL
; n
= n
->next
)
3929 if (n
->sym
->attr
.flavor
== FL_VARIABLE
)
3932 sorry_at (input_location
, "%<uses_allocators%> clause with traits "
3933 "and memory spaces");
3940 if (clauses
->if_expr
)
3944 gfc_init_se (&se
, NULL
);
3945 gfc_conv_expr (&se
, clauses
->if_expr
);
3946 gfc_add_block_to_block (block
, &se
.pre
);
3947 if_var
= gfc_evaluate_now (se
.expr
, block
);
3948 gfc_add_block_to_block (block
, &se
.post
);
3950 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3951 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
3952 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
3953 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3955 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
3956 if (clauses
->if_exprs
[ifc
])
3960 gfc_init_se (&se
, NULL
);
3961 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
3962 gfc_add_block_to_block (block
, &se
.pre
);
3963 if_var
= gfc_evaluate_now (se
.expr
, block
);
3964 gfc_add_block_to_block (block
, &se
.post
);
3966 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3970 OMP_CLAUSE_IF_MODIFIER (c
) = VOID_CST
;
3972 case OMP_IF_PARALLEL
:
3973 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
3976 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_SIMD
;
3979 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
3981 case OMP_IF_TASKLOOP
:
3982 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
3985 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
3987 case OMP_IF_TARGET_DATA
:
3988 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
3990 case OMP_IF_TARGET_UPDATE
:
3991 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
3993 case OMP_IF_TARGET_ENTER_DATA
:
3994 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
3996 case OMP_IF_TARGET_EXIT_DATA
:
3997 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
4002 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
4003 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4006 if (clauses
->final_expr
)
4010 gfc_init_se (&se
, NULL
);
4011 gfc_conv_expr (&se
, clauses
->final_expr
);
4012 gfc_add_block_to_block (block
, &se
.pre
);
4013 final_var
= gfc_evaluate_now (se
.expr
, block
);
4014 gfc_add_block_to_block (block
, &se
.post
);
4016 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINAL
);
4017 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
4018 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4021 if (clauses
->num_threads
)
4025 gfc_init_se (&se
, NULL
);
4026 gfc_conv_expr (&se
, clauses
->num_threads
);
4027 gfc_add_block_to_block (block
, &se
.pre
);
4028 num_threads
= gfc_evaluate_now (se
.expr
, block
);
4029 gfc_add_block_to_block (block
, &se
.post
);
4031 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_THREADS
);
4032 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
4033 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4036 chunk_size
= NULL_TREE
;
4037 if (clauses
->chunk_size
)
4039 gfc_init_se (&se
, NULL
);
4040 gfc_conv_expr (&se
, clauses
->chunk_size
);
4041 gfc_add_block_to_block (block
, &se
.pre
);
4042 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
4043 gfc_add_block_to_block (block
, &se
.post
);
4046 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
4048 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SCHEDULE
);
4049 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
4050 switch (clauses
->sched_kind
)
4052 case OMP_SCHED_STATIC
:
4053 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
4055 case OMP_SCHED_DYNAMIC
:
4056 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
4058 case OMP_SCHED_GUIDED
:
4059 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
4061 case OMP_SCHED_RUNTIME
:
4062 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
4064 case OMP_SCHED_AUTO
:
4065 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
4070 if (clauses
->sched_monotonic
)
4071 OMP_CLAUSE_SCHEDULE_KIND (c
)
4072 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
4073 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
4074 else if (clauses
->sched_nonmonotonic
)
4075 OMP_CLAUSE_SCHEDULE_KIND (c
)
4076 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
4077 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
4078 if (clauses
->sched_simd
)
4079 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
4080 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4083 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
4085 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULT
);
4086 switch (clauses
->default_sharing
)
4088 case OMP_DEFAULT_NONE
:
4089 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
4091 case OMP_DEFAULT_SHARED
:
4092 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
4094 case OMP_DEFAULT_PRIVATE
:
4095 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
4097 case OMP_DEFAULT_FIRSTPRIVATE
:
4098 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
4100 case OMP_DEFAULT_PRESENT
:
4101 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
4106 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4109 if (clauses
->nowait
)
4111 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOWAIT
);
4112 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4115 if (clauses
->ordered
)
4117 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDERED
);
4118 OMP_CLAUSE_ORDERED_EXPR (c
)
4119 = clauses
->orderedc
? build_int_cst (integer_type_node
,
4120 clauses
->orderedc
) : NULL_TREE
;
4121 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4124 if (clauses
->order_concurrent
)
4126 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDER
);
4127 OMP_CLAUSE_ORDER_UNCONSTRAINED (c
) = clauses
->order_unconstrained
;
4128 OMP_CLAUSE_ORDER_REPRODUCIBLE (c
) = clauses
->order_reproducible
;
4129 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4132 if (clauses
->untied
)
4134 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_UNTIED
);
4135 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4138 if (clauses
->mergeable
)
4140 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_MERGEABLE
);
4141 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4144 if (clauses
->collapse
)
4146 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_COLLAPSE
);
4147 OMP_CLAUSE_COLLAPSE_EXPR (c
)
4148 = build_int_cst (integer_type_node
, clauses
->collapse
);
4149 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4152 if (clauses
->inbranch
)
4154 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INBRANCH
);
4155 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4158 if (clauses
->notinbranch
)
4160 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOTINBRANCH
);
4161 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4164 switch (clauses
->cancel
)
4166 case OMP_CANCEL_UNKNOWN
:
4168 case OMP_CANCEL_PARALLEL
:
4169 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PARALLEL
);
4170 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4172 case OMP_CANCEL_SECTIONS
:
4173 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SECTIONS
);
4174 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4177 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FOR
);
4178 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4180 case OMP_CANCEL_TASKGROUP
:
4181 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TASKGROUP
);
4182 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4186 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
4188 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PROC_BIND
);
4189 switch (clauses
->proc_bind
)
4191 case OMP_PROC_BIND_PRIMARY
:
4192 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_PRIMARY
;
4194 case OMP_PROC_BIND_MASTER
:
4195 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
4197 case OMP_PROC_BIND_SPREAD
:
4198 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
4200 case OMP_PROC_BIND_CLOSE
:
4201 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
4206 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4209 if (clauses
->safelen_expr
)
4213 gfc_init_se (&se
, NULL
);
4214 gfc_conv_expr (&se
, clauses
->safelen_expr
);
4215 gfc_add_block_to_block (block
, &se
.pre
);
4216 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
4217 gfc_add_block_to_block (block
, &se
.post
);
4219 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SAFELEN
);
4220 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
4221 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4224 if (clauses
->simdlen_expr
)
4228 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
4229 OMP_CLAUSE_SIMDLEN_EXPR (c
)
4230 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
4231 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4237 gfc_init_se (&se
, NULL
);
4238 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
4239 gfc_add_block_to_block (block
, &se
.pre
);
4240 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
4241 gfc_add_block_to_block (block
, &se
.post
);
4243 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
4244 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
4245 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4249 if (clauses
->num_teams_upper
)
4251 tree num_teams_lower
= NULL_TREE
, num_teams_upper
;
4253 gfc_init_se (&se
, NULL
);
4254 gfc_conv_expr (&se
, clauses
->num_teams_upper
);
4255 gfc_add_block_to_block (block
, &se
.pre
);
4256 num_teams_upper
= gfc_evaluate_now (se
.expr
, block
);
4257 gfc_add_block_to_block (block
, &se
.post
);
4259 if (clauses
->num_teams_lower
)
4261 gfc_init_se (&se
, NULL
);
4262 gfc_conv_expr (&se
, clauses
->num_teams_lower
);
4263 gfc_add_block_to_block (block
, &se
.pre
);
4264 num_teams_lower
= gfc_evaluate_now (se
.expr
, block
);
4265 gfc_add_block_to_block (block
, &se
.post
);
4267 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TEAMS
);
4268 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
4269 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
4270 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4273 if (clauses
->device
)
4277 gfc_init_se (&se
, NULL
);
4278 gfc_conv_expr (&se
, clauses
->device
);
4279 gfc_add_block_to_block (block
, &se
.pre
);
4280 device
= gfc_evaluate_now (se
.expr
, block
);
4281 gfc_add_block_to_block (block
, &se
.post
);
4283 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEVICE
);
4284 OMP_CLAUSE_DEVICE_ID (c
) = device
;
4286 if (clauses
->ancestor
)
4287 OMP_CLAUSE_DEVICE_ANCESTOR (c
) = 1;
4289 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4292 if (clauses
->thread_limit
)
4296 gfc_init_se (&se
, NULL
);
4297 gfc_conv_expr (&se
, clauses
->thread_limit
);
4298 gfc_add_block_to_block (block
, &se
.pre
);
4299 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
4300 gfc_add_block_to_block (block
, &se
.post
);
4302 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREAD_LIMIT
);
4303 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
4304 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4307 chunk_size
= NULL_TREE
;
4308 if (clauses
->dist_chunk_size
)
4310 gfc_init_se (&se
, NULL
);
4311 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
4312 gfc_add_block_to_block (block
, &se
.pre
);
4313 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
4314 gfc_add_block_to_block (block
, &se
.post
);
4317 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
4319 c
= build_omp_clause (gfc_get_location (&where
),
4320 OMP_CLAUSE_DIST_SCHEDULE
);
4321 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
4322 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4325 if (clauses
->grainsize
)
4329 gfc_init_se (&se
, NULL
);
4330 gfc_conv_expr (&se
, clauses
->grainsize
);
4331 gfc_add_block_to_block (block
, &se
.pre
);
4332 grainsize
= gfc_evaluate_now (se
.expr
, block
);
4333 gfc_add_block_to_block (block
, &se
.post
);
4335 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GRAINSIZE
);
4336 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
4337 if (clauses
->grainsize_strict
)
4338 OMP_CLAUSE_GRAINSIZE_STRICT (c
) = 1;
4339 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4342 if (clauses
->num_tasks
)
4346 gfc_init_se (&se
, NULL
);
4347 gfc_conv_expr (&se
, clauses
->num_tasks
);
4348 gfc_add_block_to_block (block
, &se
.pre
);
4349 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
4350 gfc_add_block_to_block (block
, &se
.post
);
4352 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TASKS
);
4353 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
4354 if (clauses
->num_tasks_strict
)
4355 OMP_CLAUSE_NUM_TASKS_STRICT (c
) = 1;
4356 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4359 if (clauses
->priority
)
4363 gfc_init_se (&se
, NULL
);
4364 gfc_conv_expr (&se
, clauses
->priority
);
4365 gfc_add_block_to_block (block
, &se
.pre
);
4366 priority
= gfc_evaluate_now (se
.expr
, block
);
4367 gfc_add_block_to_block (block
, &se
.post
);
4369 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PRIORITY
);
4370 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
4371 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4374 if (clauses
->detach
)
4378 gfc_init_se (&se
, NULL
);
4379 gfc_conv_expr (&se
, clauses
->detach
);
4380 gfc_add_block_to_block (block
, &se
.pre
);
4382 gfc_add_block_to_block (block
, &se
.post
);
4384 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DETACH
);
4385 TREE_ADDRESSABLE (detach
) = 1;
4386 OMP_CLAUSE_DECL (c
) = detach
;
4387 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4390 if (clauses
->filter
)
4394 gfc_init_se (&se
, NULL
);
4395 gfc_conv_expr (&se
, clauses
->filter
);
4396 gfc_add_block_to_block (block
, &se
.pre
);
4397 filter
= gfc_evaluate_now (se
.expr
, block
);
4398 gfc_add_block_to_block (block
, &se
.post
);
4400 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FILTER
);
4401 OMP_CLAUSE_FILTER_EXPR (c
) = filter
;
4402 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4409 gfc_init_se (&se
, NULL
);
4410 gfc_conv_expr (&se
, clauses
->hint
);
4411 gfc_add_block_to_block (block
, &se
.pre
);
4412 hint
= gfc_evaluate_now (se
.expr
, block
);
4413 gfc_add_block_to_block (block
, &se
.post
);
4415 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_HINT
);
4416 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
4417 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4422 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMD
);
4423 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4425 if (clauses
->threads
)
4427 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREADS
);
4428 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4430 if (clauses
->nogroup
)
4432 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOGROUP
);
4433 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4436 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
4438 if (clauses
->defaultmap
[i
] == OMP_DEFAULTMAP_UNSET
)
4440 enum omp_clause_defaultmap_kind behavior
, category
;
4441 switch ((gfc_omp_defaultmap_category
) i
)
4443 case OMP_DEFAULTMAP_CAT_UNCATEGORIZED
:
4444 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
4446 case OMP_DEFAULTMAP_CAT_ALL
:
4447 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL
;
4449 case OMP_DEFAULTMAP_CAT_SCALAR
:
4450 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
;
4452 case OMP_DEFAULTMAP_CAT_AGGREGATE
:
4453 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
;
4455 case OMP_DEFAULTMAP_CAT_ALLOCATABLE
:
4456 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
;
4458 case OMP_DEFAULTMAP_CAT_POINTER
:
4459 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
;
4461 default: gcc_unreachable ();
4463 switch (clauses
->defaultmap
[i
])
4465 case OMP_DEFAULTMAP_ALLOC
:
4466 behavior
= OMP_CLAUSE_DEFAULTMAP_ALLOC
;
4468 case OMP_DEFAULTMAP_TO
: behavior
= OMP_CLAUSE_DEFAULTMAP_TO
; break;
4469 case OMP_DEFAULTMAP_FROM
: behavior
= OMP_CLAUSE_DEFAULTMAP_FROM
; break;
4470 case OMP_DEFAULTMAP_TOFROM
:
4471 behavior
= OMP_CLAUSE_DEFAULTMAP_TOFROM
;
4473 case OMP_DEFAULTMAP_FIRSTPRIVATE
:
4474 behavior
= OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
4476 case OMP_DEFAULTMAP_PRESENT
:
4477 behavior
= OMP_CLAUSE_DEFAULTMAP_PRESENT
;
4479 case OMP_DEFAULTMAP_NONE
: behavior
= OMP_CLAUSE_DEFAULTMAP_NONE
; break;
4480 case OMP_DEFAULTMAP_DEFAULT
:
4481 behavior
= OMP_CLAUSE_DEFAULTMAP_DEFAULT
;
4483 default: gcc_unreachable ();
4485 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULTMAP
);
4486 OMP_CLAUSE_DEFAULTMAP_SET_KIND (c
, behavior
, category
);
4487 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4490 if (clauses
->doacross_source
)
4492 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DOACROSS
);
4493 OMP_CLAUSE_DOACROSS_KIND (c
) = OMP_CLAUSE_DOACROSS_SOURCE
;
4494 OMP_CLAUSE_DOACROSS_DEPEND (c
) = clauses
->depend_source
;
4495 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4500 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ASYNC
);
4501 if (clauses
->async_expr
)
4502 OMP_CLAUSE_ASYNC_EXPR (c
)
4503 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
4505 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
4506 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4510 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SEQ
);
4511 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4513 if (clauses
->par_auto
)
4515 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_AUTO
);
4516 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4518 if (clauses
->if_present
)
4520 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF_PRESENT
);
4521 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4523 if (clauses
->finalize
)
4525 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINALIZE
);
4526 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4528 if (clauses
->independent
)
4530 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INDEPENDENT
);
4531 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4533 if (clauses
->wait_list
)
4537 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4539 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WAIT
);
4540 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
4541 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
4545 if (clauses
->num_gangs_expr
)
4548 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
4549 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_GANGS
);
4550 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
4551 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4553 if (clauses
->num_workers_expr
)
4555 tree num_workers_var
4556 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
4557 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_WORKERS
);
4558 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
4559 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4561 if (clauses
->vector_length_expr
)
4563 tree vector_length_var
4564 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
4565 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR_LENGTH
);
4566 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
4567 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4569 if (clauses
->tile_list
)
4571 vec
<tree
, va_gc
> *tvec
;
4574 vec_alloc (tvec
, 4);
4576 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
4577 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
4579 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TILE
);
4580 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
4581 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4584 if (clauses
->vector
)
4586 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR
);
4587 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4589 if (clauses
->vector_expr
)
4592 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
4593 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
4595 /* TODO: We're not capturing location information for individual
4596 clauses. However, if we have an expression attached to the
4597 clause, that one provides better location information. */
4598 OMP_CLAUSE_LOCATION (c
)
4599 = gfc_get_location (&clauses
->vector_expr
->where
);
4602 if (clauses
->worker
)
4604 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WORKER
);
4605 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4607 if (clauses
->worker_expr
)
4610 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
4611 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
4613 /* TODO: We're not capturing location information for individual
4614 clauses. However, if we have an expression attached to the
4615 clause, that one provides better location information. */
4616 OMP_CLAUSE_LOCATION (c
)
4617 = gfc_get_location (&clauses
->worker_expr
->where
);
4623 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GANG
);
4624 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4626 if (clauses
->gang_num_expr
)
4628 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
4629 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
4631 /* TODO: We're not capturing location information for individual
4632 clauses. However, if we have an expression attached to the
4633 clause, that one provides better location information. */
4634 OMP_CLAUSE_LOCATION (c
)
4635 = gfc_get_location (&clauses
->gang_num_expr
->where
);
4638 if (clauses
->gang_static
)
4640 arg
= clauses
->gang_static_expr
4641 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
4642 : integer_minus_one_node
;
4643 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
4646 if (clauses
->bind
!= OMP_BIND_UNSET
)
4648 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_BIND
);
4649 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4650 switch (clauses
->bind
)
4652 case OMP_BIND_TEAMS
:
4653 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_TEAMS
;
4655 case OMP_BIND_PARALLEL
:
4656 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_PARALLEL
;
4658 case OMP_BIND_THREAD
:
4659 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_THREAD
;
4665 /* OpenACC 'nohost' clauses cannot appear here. */
4666 gcc_checking_assert (!clauses
->nohost
);
4668 return nreverse (omp_clauses
);
4671 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
4674 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
4679 stmt
= gfc_trans_code (code
);
4680 if (TREE_CODE (stmt
) != BIND_EXPR
)
4682 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
4684 tree block
= poplevel (1, 0);
4685 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
4695 /* Translate OpenACC 'parallel', 'kernels', 'serial', 'data', 'host_data'
4699 gfc_trans_oacc_construct (gfc_code
*code
)
4702 tree stmt
, oacc_clauses
;
4703 enum tree_code construct_code
;
4707 case EXEC_OACC_PARALLEL
:
4708 construct_code
= OACC_PARALLEL
;
4710 case EXEC_OACC_KERNELS
:
4711 construct_code
= OACC_KERNELS
;
4713 case EXEC_OACC_SERIAL
:
4714 construct_code
= OACC_SERIAL
;
4716 case EXEC_OACC_DATA
:
4717 construct_code
= OACC_DATA
;
4719 case EXEC_OACC_HOST_DATA
:
4720 construct_code
= OACC_HOST_DATA
;
4726 gfc_start_block (&block
);
4727 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4728 code
->loc
, false, true);
4730 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4731 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4732 stmt
= build2_loc (gfc_get_location (&code
->loc
), construct_code
,
4733 void_type_node
, stmt
, oacc_clauses
);
4734 gfc_add_expr_to_block (&block
, stmt
);
4735 return gfc_finish_block (&block
);
4738 /* update, enter_data, exit_data, cache. */
4740 gfc_trans_oacc_executable_directive (gfc_code
*code
)
4743 tree stmt
, oacc_clauses
;
4744 enum tree_code construct_code
;
4748 case EXEC_OACC_UPDATE
:
4749 construct_code
= OACC_UPDATE
;
4751 case EXEC_OACC_ENTER_DATA
:
4752 construct_code
= OACC_ENTER_DATA
;
4754 case EXEC_OACC_EXIT_DATA
:
4755 construct_code
= OACC_EXIT_DATA
;
4757 case EXEC_OACC_CACHE
:
4758 construct_code
= OACC_CACHE
;
4764 gfc_start_block (&block
);
4765 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4766 code
->loc
, false, true);
4767 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
4769 gfc_add_expr_to_block (&block
, stmt
);
4770 return gfc_finish_block (&block
);
4774 gfc_trans_oacc_wait_directive (gfc_code
*code
)
4778 vec
<tree
, va_gc
> *args
;
4781 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4782 location_t loc
= input_location
;
4784 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4787 vec_alloc (args
, nparms
+ 2);
4788 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
4790 gfc_start_block (&block
);
4792 if (clauses
->async_expr
)
4793 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
4795 t
= build_int_cst (integer_type_node
, -2);
4797 args
->quick_push (t
);
4798 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
4800 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4801 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
4803 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
4804 gfc_add_expr_to_block (&block
, stmt
);
4808 return gfc_finish_block (&block
);
4811 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
4812 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
4815 gfc_trans_omp_assume (gfc_code
*code
)
4818 gfc_init_block (&block
);
4819 gfc_omp_assumptions
*assume
= code
->ext
.omp_clauses
->assume
;
4821 for (gfc_expr_list
*el
= assume
->holds
; el
; el
= el
->next
)
4823 location_t loc
= gfc_get_location (&el
->expr
->where
);
4825 gfc_init_se (&se
, NULL
);
4826 gfc_conv_expr (&se
, el
->expr
);
4828 if (se
.pre
.head
== NULL_TREE
&& se
.post
.head
== NULL_TREE
)
4832 tree var
= create_tmp_var_raw (boolean_type_node
);
4833 DECL_CONTEXT (var
) = current_function_decl
;
4835 gfc_init_block (&block2
);
4836 gfc_add_block_to_block (&block2
, &se
.pre
);
4837 gfc_add_modify_loc (loc
, &block2
, var
,
4838 fold_convert_loc (loc
, boolean_type_node
,
4840 gfc_add_block_to_block (&block2
, &se
.post
);
4841 t
= gfc_finish_block (&block2
);
4842 t
= build4 (TARGET_EXPR
, boolean_type_node
, var
, t
, NULL
, NULL
);
4844 t
= build_call_expr_internal_loc (loc
, IFN_ASSUME
,
4845 void_type_node
, 1, t
);
4846 gfc_add_expr_to_block (&block
, t
);
4848 gfc_add_expr_to_block (&block
, gfc_trans_omp_code (code
->block
->next
, true));
4849 return gfc_finish_block (&block
);
4853 gfc_trans_omp_atomic (gfc_code
*code
)
4855 gfc_code
*atomic_code
= code
->block
;
4859 gfc_expr
*expr1
, *expr2
, *e
, *capture_expr1
= NULL
, *capture_expr2
= NULL
;
4862 tree lhsaddr
, type
, rhs
, x
, compare
= NULL_TREE
, comp_tgt
= NULL_TREE
;
4863 enum tree_code op
= ERROR_MARK
;
4864 enum tree_code aop
= OMP_ATOMIC
;
4865 bool var_on_left
= false, else_branch
= false;
4866 enum omp_memory_order mo
, fail_mo
;
4867 switch (atomic_code
->ext
.omp_clauses
->memorder
)
4869 case OMP_MEMORDER_UNSET
: mo
= OMP_MEMORY_ORDER_UNSPECIFIED
; break;
4870 case OMP_MEMORDER_ACQ_REL
: mo
= OMP_MEMORY_ORDER_ACQ_REL
; break;
4871 case OMP_MEMORDER_ACQUIRE
: mo
= OMP_MEMORY_ORDER_ACQUIRE
; break;
4872 case OMP_MEMORDER_RELAXED
: mo
= OMP_MEMORY_ORDER_RELAXED
; break;
4873 case OMP_MEMORDER_RELEASE
: mo
= OMP_MEMORY_ORDER_RELEASE
; break;
4874 case OMP_MEMORDER_SEQ_CST
: mo
= OMP_MEMORY_ORDER_SEQ_CST
; break;
4875 default: gcc_unreachable ();
4877 switch (atomic_code
->ext
.omp_clauses
->fail
)
4879 case OMP_MEMORDER_UNSET
: fail_mo
= OMP_FAIL_MEMORY_ORDER_UNSPECIFIED
; break;
4880 case OMP_MEMORDER_ACQUIRE
: fail_mo
= OMP_FAIL_MEMORY_ORDER_ACQUIRE
; break;
4881 case OMP_MEMORDER_RELAXED
: fail_mo
= OMP_FAIL_MEMORY_ORDER_RELAXED
; break;
4882 case OMP_MEMORDER_SEQ_CST
: fail_mo
= OMP_FAIL_MEMORY_ORDER_SEQ_CST
; break;
4883 default: gcc_unreachable ();
4885 mo
= (omp_memory_order
) (mo
| fail_mo
);
4887 code
= code
->block
->next
;
4888 if (atomic_code
->ext
.omp_clauses
->compare
)
4890 gfc_expr
*comp_expr
;
4891 if (code
->op
== EXEC_IF
)
4893 comp_expr
= code
->block
->expr1
;
4894 gcc_assert (code
->block
->next
->op
== EXEC_ASSIGN
);
4895 expr1
= code
->block
->next
->expr1
;
4896 expr2
= code
->block
->next
->expr2
;
4897 if (code
->block
->block
)
4899 gcc_assert (atomic_code
->ext
.omp_clauses
->capture
4900 && code
->block
->block
->next
->op
== EXEC_ASSIGN
);
4902 aop
= OMP_ATOMIC_CAPTURE_OLD
;
4903 capture_expr1
= code
->block
->block
->next
->expr1
;
4904 capture_expr2
= code
->block
->block
->next
->expr2
;
4906 else if (atomic_code
->ext
.omp_clauses
->capture
)
4908 gcc_assert (code
->next
->op
== EXEC_ASSIGN
);
4909 aop
= OMP_ATOMIC_CAPTURE_NEW
;
4910 capture_expr1
= code
->next
->expr1
;
4911 capture_expr2
= code
->next
->expr2
;
4916 gcc_assert (atomic_code
->ext
.omp_clauses
->capture
4917 && code
->op
== EXEC_ASSIGN
4918 && code
->next
->op
== EXEC_IF
);
4919 aop
= OMP_ATOMIC_CAPTURE_OLD
;
4920 capture_expr1
= code
->expr1
;
4921 capture_expr2
= code
->expr2
;
4922 expr1
= code
->next
->block
->next
->expr1
;
4923 expr2
= code
->next
->block
->next
->expr2
;
4924 comp_expr
= code
->next
->block
->expr1
;
4926 gfc_init_se (&lse
, NULL
);
4927 gfc_conv_expr (&lse
, comp_expr
->value
.op
.op2
);
4928 gfc_add_block_to_block (&block
, &lse
.pre
);
4930 var
= expr1
->symtree
->n
.sym
;
4934 gcc_assert (code
->op
== EXEC_ASSIGN
);
4935 expr1
= code
->expr1
;
4936 expr2
= code
->expr2
;
4937 if (atomic_code
->ext
.omp_clauses
->capture
4938 && (expr2
->expr_type
== EXPR_VARIABLE
4939 || (expr2
->expr_type
== EXPR_FUNCTION
4940 && expr2
->value
.function
.isym
4941 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
4942 && (expr2
->value
.function
.actual
->expr
->expr_type
4943 == EXPR_VARIABLE
))))
4945 capture_expr1
= expr1
;
4946 capture_expr2
= expr2
;
4947 expr1
= code
->next
->expr1
;
4948 expr2
= code
->next
->expr2
;
4949 aop
= OMP_ATOMIC_CAPTURE_OLD
;
4951 else if (atomic_code
->ext
.omp_clauses
->capture
)
4953 aop
= OMP_ATOMIC_CAPTURE_NEW
;
4954 capture_expr1
= code
->next
->expr1
;
4955 capture_expr2
= code
->next
->expr2
;
4957 var
= expr1
->symtree
->n
.sym
;
4960 gfc_init_se (&lse
, NULL
);
4961 gfc_init_se (&rse
, NULL
);
4962 gfc_init_se (&vse
, NULL
);
4963 gfc_start_block (&block
);
4965 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4966 != GFC_OMP_ATOMIC_WRITE
)
4967 && expr2
->expr_type
== EXPR_FUNCTION
4968 && expr2
->value
.function
.isym
4969 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4970 expr2
= expr2
->value
.function
.actual
->expr
;
4972 if ((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4973 == GFC_OMP_ATOMIC_READ
)
4975 gfc_conv_expr (&vse
, expr1
);
4976 gfc_add_block_to_block (&block
, &vse
.pre
);
4978 gfc_conv_expr (&lse
, expr2
);
4979 gfc_add_block_to_block (&block
, &lse
.pre
);
4980 type
= TREE_TYPE (lse
.expr
);
4981 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
4983 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
4984 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4985 x
= convert (TREE_TYPE (vse
.expr
), x
);
4986 gfc_add_modify (&block
, vse
.expr
, x
);
4988 gfc_add_block_to_block (&block
, &lse
.pre
);
4989 gfc_add_block_to_block (&block
, &rse
.pre
);
4991 return gfc_finish_block (&block
);
4995 && capture_expr2
->expr_type
== EXPR_FUNCTION
4996 && capture_expr2
->value
.function
.isym
4997 && capture_expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4998 capture_expr2
= capture_expr2
->value
.function
.actual
->expr
;
4999 gcc_assert (!capture_expr2
|| capture_expr2
->expr_type
== EXPR_VARIABLE
);
5001 if (aop
== OMP_ATOMIC_CAPTURE_OLD
)
5003 gfc_conv_expr (&vse
, capture_expr1
);
5004 gfc_add_block_to_block (&block
, &vse
.pre
);
5005 gfc_conv_expr (&lse
, capture_expr2
);
5006 gfc_add_block_to_block (&block
, &lse
.pre
);
5007 gfc_init_se (&lse
, NULL
);
5010 gfc_conv_expr (&lse
, expr1
);
5011 gfc_add_block_to_block (&block
, &lse
.pre
);
5012 type
= TREE_TYPE (lse
.expr
);
5013 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
5015 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
5016 == GFC_OMP_ATOMIC_WRITE
)
5017 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
)
5020 gfc_conv_expr (&rse
, expr2
);
5021 gfc_add_block_to_block (&block
, &rse
.pre
);
5023 else if (expr2
->expr_type
== EXPR_OP
)
5026 switch (expr2
->value
.op
.op
)
5028 case INTRINSIC_PLUS
:
5031 case INTRINSIC_TIMES
:
5034 case INTRINSIC_MINUS
:
5037 case INTRINSIC_DIVIDE
:
5038 if (expr2
->ts
.type
== BT_INTEGER
)
5039 op
= TRUNC_DIV_EXPR
;
5044 op
= TRUTH_ANDIF_EXPR
;
5047 op
= TRUTH_ORIF_EXPR
;
5052 case INTRINSIC_NEQV
:
5058 e
= expr2
->value
.op
.op1
;
5059 if (e
->expr_type
== EXPR_FUNCTION
5060 && e
->value
.function
.isym
5061 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5062 e
= e
->value
.function
.actual
->expr
;
5063 if (e
->expr_type
== EXPR_VARIABLE
5064 && e
->symtree
!= NULL
5065 && e
->symtree
->n
.sym
== var
)
5067 expr2
= expr2
->value
.op
.op2
;
5072 e
= expr2
->value
.op
.op2
;
5073 if (e
->expr_type
== EXPR_FUNCTION
5074 && e
->value
.function
.isym
5075 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5076 e
= e
->value
.function
.actual
->expr
;
5077 gcc_assert (e
->expr_type
== EXPR_VARIABLE
5078 && e
->symtree
!= NULL
5079 && e
->symtree
->n
.sym
== var
);
5080 expr2
= expr2
->value
.op
.op1
;
5081 var_on_left
= false;
5083 gfc_conv_expr (&rse
, expr2
);
5084 gfc_add_block_to_block (&block
, &rse
.pre
);
5088 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
5089 switch (expr2
->value
.function
.isym
->id
)
5109 e
= expr2
->value
.function
.actual
->expr
;
5110 if (e
->expr_type
== EXPR_FUNCTION
5111 && e
->value
.function
.isym
5112 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5113 e
= e
->value
.function
.actual
->expr
;
5114 gcc_assert (e
->expr_type
== EXPR_VARIABLE
5115 && e
->symtree
!= NULL
5116 && e
->symtree
->n
.sym
== var
);
5118 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
5119 gfc_add_block_to_block (&block
, &rse
.pre
);
5120 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
5122 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
5123 gfc_actual_arglist
*arg
;
5125 gfc_add_modify (&block
, accum
, rse
.expr
);
5126 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
5129 gfc_init_block (&rse
.pre
);
5130 gfc_conv_expr (&rse
, arg
->expr
);
5131 gfc_add_block_to_block (&block
, &rse
.pre
);
5132 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
5134 gfc_add_modify (&block
, accum
, x
);
5140 expr2
= expr2
->value
.function
.actual
->next
->expr
;
5143 lhsaddr
= save_expr (lhsaddr
);
5144 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
5145 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
5146 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
5148 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
5149 it even after unsharing function body. */
5150 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
5151 DECL_CONTEXT (var
) = current_function_decl
;
5152 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
5153 NULL_TREE
, NULL_TREE
);
5158 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
5159 DECL_CONTEXT (var
) = current_function_decl
;
5160 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
, NULL
,
5162 lse
.expr
= build_fold_indirect_ref_loc (input_location
, lhsaddr
);
5163 compare
= convert (TREE_TYPE (lse
.expr
), compare
);
5164 compare
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
5168 if (expr2
->expr_type
== EXPR_VARIABLE
|| compare
)
5171 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
5173 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
5174 == GFC_OMP_ATOMIC_WRITE
)
5175 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
)
5180 x
= convert (TREE_TYPE (rhs
),
5181 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
5183 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
5185 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
5188 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
5189 && TREE_CODE (type
) != COMPLEX_TYPE
)
5190 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
5191 TREE_TYPE (TREE_TYPE (rhs
)), x
);
5193 gfc_add_block_to_block (&block
, &lse
.pre
);
5194 gfc_add_block_to_block (&block
, &rse
.pre
);
5196 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
5198 gfc_conv_expr (&vse
, capture_expr1
);
5199 gfc_add_block_to_block (&block
, &vse
.pre
);
5200 gfc_add_block_to_block (&block
, &lse
.pre
);
5203 if (compare
&& else_branch
)
5205 tree var2
= create_tmp_var_raw (boolean_type_node
);
5206 DECL_CONTEXT (var2
) = current_function_decl
;
5207 comp_tgt
= build4 (TARGET_EXPR
, boolean_type_node
, var2
,
5208 boolean_false_node
, NULL
, NULL
);
5209 compare
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (var2
),
5211 TREE_OPERAND (compare
, 0) = comp_tgt
;
5212 compare
= omit_one_operand_loc (input_location
, boolean_type_node
,
5217 x
= build3_loc (input_location
, COND_EXPR
, type
, compare
,
5218 convert (type
, x
), lse
.expr
);
5220 if (aop
== OMP_ATOMIC
)
5222 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
5223 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
5224 OMP_ATOMIC_WEAK (x
) = atomic_code
->ext
.omp_clauses
->weak
;
5225 gfc_add_expr_to_block (&block
, x
);
5229 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
5230 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
5231 OMP_ATOMIC_WEAK (x
) = atomic_code
->ext
.omp_clauses
->weak
;
5232 if (compare
&& else_branch
)
5234 tree vtmp
= create_tmp_var_raw (TREE_TYPE (x
));
5235 DECL_CONTEXT (vtmp
) = current_function_decl
;
5236 x
= fold_build2_loc (input_location
, MODIFY_EXPR
,
5237 TREE_TYPE (vtmp
), vtmp
, x
);
5238 vtmp
= build4 (TARGET_EXPR
, TREE_TYPE (vtmp
), vtmp
,
5239 build_zero_cst (TREE_TYPE (vtmp
)), NULL
, NULL
);
5240 TREE_OPERAND (x
, 0) = vtmp
;
5241 tree x2
= convert (TREE_TYPE (vse
.expr
), vtmp
);
5242 x2
= fold_build2_loc (input_location
, MODIFY_EXPR
,
5243 TREE_TYPE (vse
.expr
), vse
.expr
, x2
);
5244 x2
= build3_loc (input_location
, COND_EXPR
, void_type_node
, comp_tgt
,
5246 x
= omit_one_operand_loc (input_location
, TREE_TYPE (x2
), x2
, x
);
5247 gfc_add_expr_to_block (&block
, x
);
5251 x
= convert (TREE_TYPE (vse
.expr
), x
);
5252 gfc_add_modify (&block
, vse
.expr
, x
);
5256 return gfc_finish_block (&block
);
5260 gfc_trans_omp_barrier (void)
5262 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
5263 return build_call_expr_loc (input_location
, decl
, 0);
5267 gfc_trans_omp_cancel (gfc_code
*code
)
5270 tree ifc
= boolean_true_node
;
5272 switch (code
->ext
.omp_clauses
->cancel
)
5274 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
5275 case OMP_CANCEL_DO
: mask
= 2; break;
5276 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
5277 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
5278 default: gcc_unreachable ();
5280 gfc_start_block (&block
);
5281 if (code
->ext
.omp_clauses
->if_expr
5282 || code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
])
5287 gcc_assert ((code
->ext
.omp_clauses
->if_expr
== NULL
)
5288 ^ (code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
] == NULL
));
5289 gfc_init_se (&se
, NULL
);
5290 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
!= NULL
5291 ? code
->ext
.omp_clauses
->if_expr
5292 : code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
]);
5293 gfc_add_block_to_block (&block
, &se
.pre
);
5294 if_var
= gfc_evaluate_now (se
.expr
, &block
);
5295 gfc_add_block_to_block (&block
, &se
.post
);
5296 tree type
= TREE_TYPE (if_var
);
5297 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
5298 boolean_type_node
, if_var
,
5299 build_zero_cst (type
));
5301 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5302 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
5303 ifc
= fold_convert (c_bool_type
, ifc
);
5304 gfc_add_expr_to_block (&block
,
5305 build_call_expr_loc (input_location
, decl
, 2,
5306 build_int_cst (integer_type_node
,
5308 return gfc_finish_block (&block
);
5312 gfc_trans_omp_cancellation_point (gfc_code
*code
)
5315 switch (code
->ext
.omp_clauses
->cancel
)
5317 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
5318 case OMP_CANCEL_DO
: mask
= 2; break;
5319 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
5320 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
5321 default: gcc_unreachable ();
5323 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
5324 return build_call_expr_loc (input_location
, decl
, 1,
5325 build_int_cst (integer_type_node
, mask
));
5329 gfc_trans_omp_critical (gfc_code
*code
)
5332 tree stmt
, name
= NULL_TREE
;
5333 if (code
->ext
.omp_clauses
->critical_name
!= NULL
)
5334 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
5335 gfc_start_block (&block
);
5336 stmt
= make_node (OMP_CRITICAL
);
5337 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
5338 TREE_TYPE (stmt
) = void_type_node
;
5339 OMP_CRITICAL_BODY (stmt
) = gfc_trans_code (code
->block
->next
);
5340 OMP_CRITICAL_NAME (stmt
) = name
;
5341 OMP_CRITICAL_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
5342 code
->ext
.omp_clauses
,
5344 gfc_add_expr_to_block (&block
, stmt
);
5345 return gfc_finish_block (&block
);
5348 typedef struct dovar_init_d
{
5356 gfc_nonrect_loop_expr (stmtblock_t
*pblock
, gfc_se
*sep
, int loop_n
,
5357 gfc_code
*code
, gfc_expr
*expr
, vec
<dovar_init
> *inits
,
5358 int simple
, gfc_expr
*curr_loop_var
)
5361 for (i
= 0; i
< loop_n
; i
++)
5363 gcc_assert (code
->ext
.iterator
->var
->expr_type
== EXPR_VARIABLE
);
5364 if (gfc_find_sym_in_expr (code
->ext
.iterator
->var
->symtree
->n
.sym
, expr
))
5366 code
= code
->block
->next
;
5371 /* Canonical format: TREE_VEC with [var, multiplier, offset]. */
5372 gfc_symbol
*var
= code
->ext
.iterator
->var
->symtree
->n
.sym
;
5374 tree tree_var
= NULL_TREE
;
5375 tree a1
= integer_one_node
;
5376 tree a2
= integer_zero_node
;
5380 /* FIXME: Handle non-const iter steps, cf. PR fortran/110735. */
5381 sorry_at (gfc_get_location (&curr_loop_var
->where
),
5382 "non-rectangular loop nest with non-constant step for %qs",
5383 curr_loop_var
->symtree
->n
.sym
->name
);
5389 FOR_EACH_VEC_ELT (*inits
, ix
, di
)
5392 if (!di
->non_unit_iter
)
5394 tree_var
= di
->init
;
5395 gcc_assert (DECL_P (tree_var
));
5400 /* FIXME: Handle non-const iter steps, cf. PR fortran/110735. */
5401 sorry_at (gfc_get_location (&code
->loc
),
5402 "non-rectangular loop nest with non-constant step "
5403 "for %qs", var
->name
);
5404 inform (gfc_get_location (&expr
->where
), "Used here");
5408 if (tree_var
== NULL_TREE
)
5409 tree_var
= var
->backend_decl
;
5411 if (expr
->expr_type
== EXPR_VARIABLE
)
5412 gcc_assert (expr
->symtree
->n
.sym
== var
);
5413 else if (expr
->expr_type
!= EXPR_OP
5414 || (expr
->value
.op
.op
!= INTRINSIC_TIMES
5415 && expr
->value
.op
.op
!= INTRINSIC_PLUS
5416 && expr
->value
.op
.op
!= INTRINSIC_MINUS
))
5421 gfc_expr
*et
= NULL
, *eo
= NULL
, *e
= expr
;
5422 if (expr
->value
.op
.op
!= INTRINSIC_TIMES
)
5424 if (gfc_find_sym_in_expr (var
, expr
->value
.op
.op1
))
5426 e
= expr
->value
.op
.op1
;
5427 eo
= expr
->value
.op
.op2
;
5431 eo
= expr
->value
.op
.op1
;
5432 e
= expr
->value
.op
.op2
;
5435 if (e
->value
.op
.op
== INTRINSIC_TIMES
)
5437 if (e
->value
.op
.op1
->expr_type
== EXPR_VARIABLE
5438 && e
->value
.op
.op1
->symtree
->n
.sym
== var
)
5439 et
= e
->value
.op
.op2
;
5442 et
= e
->value
.op
.op1
;
5443 gcc_assert (e
->value
.op
.op2
->expr_type
== EXPR_VARIABLE
5444 && e
->value
.op
.op2
->symtree
->n
.sym
== var
);
5448 gcc_assert (e
->expr_type
== EXPR_VARIABLE
&& e
->symtree
->n
.sym
== var
);
5451 gfc_init_se (&se
, NULL
);
5452 gfc_conv_expr_val (&se
, et
);
5453 gfc_add_block_to_block (pblock
, &se
.pre
);
5458 gfc_init_se (&se
, NULL
);
5459 gfc_conv_expr_val (&se
, eo
);
5460 gfc_add_block_to_block (pblock
, &se
.pre
);
5462 if (expr
->value
.op
.op
== INTRINSIC_MINUS
&& expr
->value
.op
.op2
== eo
)
5463 /* outer-var - a2. */
5464 a2
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (a2
), a2
);
5465 else if (expr
->value
.op
.op
== INTRINSIC_MINUS
)
5466 /* a2 - outer-var. */
5467 a1
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (a1
), a1
);
5469 a1
= DECL_P (a1
) ? a1
: gfc_evaluate_now (a1
, pblock
);
5470 a2
= DECL_P (a2
) ? a2
: gfc_evaluate_now (a2
, pblock
);
5473 gfc_init_se (sep
, NULL
);
5474 sep
->expr
= make_tree_vec (3);
5475 TREE_VEC_ELT (sep
->expr
, 0) = tree_var
;
5476 TREE_VEC_ELT (sep
->expr
, 1) = fold_convert (TREE_TYPE (tree_var
), a1
);
5477 TREE_VEC_ELT (sep
->expr
, 2) = fold_convert (TREE_TYPE (tree_var
), a2
);
5483 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
5484 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
5487 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
5488 tree local_dovar
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
5491 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
5492 int i
, collapse
= clauses
->collapse
;
5493 vec
<dovar_init
> inits
= vNULL
;
5496 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
5497 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
5498 gfc_code
*orig_code
= code
;
5500 /* Both collapsed and tiled loops are lowered the same way. In
5501 OpenACC, those clauses are not compatible, so prioritize the tile
5502 clause, if present. */
5506 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
5510 doacross_steps
= NULL
;
5511 if (clauses
->orderedc
)
5512 collapse
= clauses
->orderedc
;
5516 code
= code
->block
->next
;
5517 gcc_assert (code
->op
== EXEC_DO
);
5519 init
= make_tree_vec (collapse
);
5520 cond
= make_tree_vec (collapse
);
5521 incr
= make_tree_vec (collapse
);
5522 orig_decls
= clauses
->ordered
? make_tree_vec (collapse
) : NULL_TREE
;
5526 gfc_start_block (&block
);
5530 /* simd schedule modifier is only useful for composite do simd and other
5531 constructs including that, where gfc_trans_omp_do is only called
5532 on the simd construct and DO's clauses are translated elsewhere. */
5533 do_clauses
->sched_simd
= false;
5535 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
5537 for (i
= 0; i
< collapse
; i
++)
5540 int dovar_found
= 0;
5545 gfc_omp_namelist
*n
= NULL
;
5546 if (op
== EXEC_OMP_SIMD
&& collapse
== 1)
5547 for (n
= clauses
->lists
[OMP_LIST_LINEAR
];
5548 n
!= NULL
; n
= n
->next
)
5549 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
5554 if (n
== NULL
&& op
!= EXEC_OMP_DISTRIBUTE
)
5555 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
];
5556 n
!= NULL
; n
= n
->next
)
5557 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
5563 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
5564 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
5571 /* Evaluate all the expressions in the iterator. */
5572 gfc_init_se (&se
, NULL
);
5573 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
5574 gfc_add_block_to_block (pblock
, &se
.pre
);
5575 local_dovar
= dovar_decl
= dovar
= se
.expr
;
5576 type
= TREE_TYPE (dovar
);
5577 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
5579 gfc_init_se (&se
, NULL
);
5580 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
5581 gfc_add_block_to_block (pblock
, &se
.pre
);
5582 step
= gfc_evaluate_now (se
.expr
, pblock
);
5584 if (TREE_CODE (step
) == INTEGER_CST
)
5585 simple
= tree_int_cst_sgn (step
);
5587 gfc_init_se (&se
, NULL
);
5588 if (!clauses
->non_rectangular
5589 || !gfc_nonrect_loop_expr (pblock
, &se
, i
, orig_code
->block
->next
,
5590 code
->ext
.iterator
->start
, &inits
, simple
,
5591 code
->ext
.iterator
->var
))
5593 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
5594 gfc_add_block_to_block (pblock
, &se
.pre
);
5595 if (!DECL_P (se
.expr
))
5596 se
.expr
= gfc_evaluate_now (se
.expr
, pblock
);
5600 gfc_init_se (&se
, NULL
);
5601 if (!clauses
->non_rectangular
5602 || !gfc_nonrect_loop_expr (pblock
, &se
, i
, orig_code
->block
->next
,
5603 code
->ext
.iterator
->end
, &inits
, simple
,
5604 code
->ext
.iterator
->var
))
5606 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
5607 gfc_add_block_to_block (pblock
, &se
.pre
);
5608 if (!DECL_P (se
.expr
))
5609 se
.expr
= gfc_evaluate_now (se
.expr
, pblock
);
5613 if (!DECL_P (dovar
))
5615 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
5617 if (simple
&& !DECL_P (dovar
))
5619 const char *name
= code
->ext
.iterator
->var
->symtree
->n
.sym
->name
;
5620 local_dovar
= gfc_create_var (type
, name
);
5621 dovar_init e
= {code
->ext
.iterator
->var
->symtree
->n
.sym
,
5622 dovar
, local_dovar
, false};
5623 inits
.safe_push (e
);
5628 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, local_dovar
, from
);
5629 /* The condition should not be folded. */
5630 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
5631 ? LE_EXPR
: GE_EXPR
,
5632 logical_type_node
, local_dovar
,
5634 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
5635 type
, local_dovar
, step
);
5636 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
5639 TREE_VEC_ELT (incr
, i
));
5640 if (orig_decls
&& !clauses
->orderedc
)
5642 else if (orig_decls
)
5643 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
5647 /* STEP is not 1 or -1. Use:
5648 for (count = 0; count < (to + step - from) / step; count++)
5650 dovar = from + count * step;
5654 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
5655 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
5656 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
5658 tmp
= gfc_evaluate_now (tmp
, pblock
);
5659 local_dovar
= gfc_create_var (type
, "count");
5660 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, local_dovar
,
5661 build_int_cst (type
, 0));
5662 /* The condition should not be folded. */
5663 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
5666 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
5668 build_int_cst (type
, 1));
5669 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
5672 TREE_VEC_ELT (incr
, i
));
5674 /* Initialize DOVAR. */
5675 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, local_dovar
,
5677 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
5678 dovar_init e
= {code
->ext
.iterator
->var
->symtree
->n
.sym
,
5680 inits
.safe_push (e
);
5681 if (clauses
->orderedc
)
5683 if (doacross_steps
== NULL
)
5684 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
, true);
5685 (*doacross_steps
)[i
] = step
;
5688 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
5691 if (dovar_found
== 3
5692 && op
== EXEC_OMP_SIMD
5694 && local_dovar
!= dovar
)
5696 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
5697 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
5698 && OMP_CLAUSE_DECL (tmp
) == dovar
)
5700 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5704 if (!dovar_found
&& op
== EXEC_OMP_SIMD
)
5708 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5709 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
5710 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5711 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5712 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5713 if (local_dovar
!= dovar
)
5717 else if (!dovar_found
&& local_dovar
!= dovar
)
5719 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5720 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5721 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5723 if (dovar_found
> 1)
5728 if (local_dovar
!= dovar
)
5730 /* If dovar is lastprivate, but different counter is used,
5731 dovar += step needs to be added to
5732 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
5733 will have the value on entry of the last loop, rather
5734 than value after iterator increment. */
5735 if (clauses
->orderedc
)
5737 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
5740 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
5742 build_one_cst (type
));
5743 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
5745 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5749 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5751 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
5753 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5754 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5755 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5757 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
5760 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5761 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5763 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
5767 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
5769 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5770 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5771 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5773 tree l
= build_omp_clause (input_location
,
5774 OMP_CLAUSE_LASTPRIVATE
);
5775 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5776 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (l
) = 1;
5777 OMP_CLAUSE_DECL (l
) = dovar_decl
;
5778 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
5779 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
5781 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
5785 gcc_assert (local_dovar
== dovar
|| c
!= NULL
);
5787 if (local_dovar
!= dovar
)
5789 if (op
!= EXEC_OMP_SIMD
|| dovar_found
== 1)
5790 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5791 else if (collapse
== 1)
5793 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5794 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
5795 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5796 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
5799 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
5800 OMP_CLAUSE_DECL (tmp
) = local_dovar
;
5801 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5804 if (i
+ 1 < collapse
)
5805 code
= code
->block
->next
;
5808 if (pblock
!= &block
)
5811 gfc_start_block (&block
);
5814 gfc_start_block (&body
);
5816 FOR_EACH_VEC_ELT (inits
, ix
, di
)
5817 gfc_add_modify (&body
, di
->var
, di
->init
);
5820 /* Cycle statement is implemented with a goto. Exit statement must not be
5821 present for this loop. */
5822 cycle_label
= gfc_build_label_decl (NULL_TREE
);
5824 /* Put these labels where they can be found later. */
5826 code
->cycle_label
= cycle_label
;
5827 code
->exit_label
= NULL_TREE
;
5829 /* Main loop body. */
5830 if (clauses
->lists
[OMP_LIST_REDUCTION_INSCAN
])
5832 gfc_code
*code1
, *scan
, *code2
, *tmpcode
;
5833 code1
= tmpcode
= code
->block
->next
;
5834 if (tmpcode
&& tmpcode
->op
!= EXEC_OMP_SCAN
)
5835 while (tmpcode
&& tmpcode
->next
&& tmpcode
->next
->op
!= EXEC_OMP_SCAN
)
5836 tmpcode
= tmpcode
->next
;
5837 scan
= tmpcode
->op
== EXEC_OMP_SCAN
? tmpcode
: tmpcode
->next
;
5839 tmpcode
->next
= NULL
;
5841 gcc_assert (scan
->op
== EXEC_OMP_SCAN
);
5842 location_t loc
= gfc_get_location (&scan
->loc
);
5844 tmp
= code1
!= scan
? gfc_trans_code (code1
) : build_empty_stmt (loc
);
5845 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, NULL_TREE
);
5846 SET_EXPR_LOCATION (tmp
, loc
);
5847 gfc_add_expr_to_block (&body
, tmp
);
5848 input_location
= loc
;
5849 tree c
= gfc_trans_omp_clauses (&body
, scan
->ext
.omp_clauses
, scan
->loc
);
5850 tmp
= code2
? gfc_trans_code (code2
) : build_empty_stmt (loc
);
5851 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, c
);
5852 SET_EXPR_LOCATION (tmp
, loc
);
5854 tmpcode
->next
= scan
;
5857 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
5858 gfc_add_expr_to_block (&body
, tmp
);
5860 /* Label for cycle statements (if needed). */
5861 if (TREE_USED (cycle_label
))
5863 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
5864 gfc_add_expr_to_block (&body
, tmp
);
5867 /* End of loop body. */
5870 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
5871 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
5872 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
5873 case EXEC_OMP_LOOP
: stmt
= make_node (OMP_LOOP
); break;
5874 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
5875 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
5876 default: gcc_unreachable ();
5879 SET_EXPR_LOCATION (stmt
, gfc_get_location (&orig_code
->loc
));
5880 TREE_TYPE (stmt
) = void_type_node
;
5881 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
5882 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
5883 OMP_FOR_INIT (stmt
) = init
;
5884 OMP_FOR_COND (stmt
) = cond
;
5885 OMP_FOR_INCR (stmt
) = incr
;
5887 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
5888 OMP_FOR_NON_RECTANGULAR (stmt
) = clauses
->non_rectangular
;
5889 gfc_add_expr_to_block (&block
, stmt
);
5891 vec_free (doacross_steps
);
5892 doacross_steps
= saved_doacross_steps
;
5894 return gfc_finish_block (&block
);
5897 /* Translate combined OpenACC 'parallel loop', 'kernels loop', 'serial loop'
5901 gfc_trans_oacc_combined_directive (gfc_code
*code
)
5903 stmtblock_t block
, *pblock
= NULL
;
5904 gfc_omp_clauses construct_clauses
, loop_clauses
;
5905 tree stmt
, oacc_clauses
= NULL_TREE
;
5906 enum tree_code construct_code
;
5907 location_t loc
= input_location
;
5911 case EXEC_OACC_PARALLEL_LOOP
:
5912 construct_code
= OACC_PARALLEL
;
5914 case EXEC_OACC_KERNELS_LOOP
:
5915 construct_code
= OACC_KERNELS
;
5917 case EXEC_OACC_SERIAL_LOOP
:
5918 construct_code
= OACC_SERIAL
;
5924 gfc_start_block (&block
);
5926 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
5927 if (code
->ext
.omp_clauses
!= NULL
)
5929 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
5930 sizeof (construct_clauses
));
5931 loop_clauses
.collapse
= construct_clauses
.collapse
;
5932 loop_clauses
.gang
= construct_clauses
.gang
;
5933 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
5934 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
5935 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
5936 loop_clauses
.vector
= construct_clauses
.vector
;
5937 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
5938 loop_clauses
.worker
= construct_clauses
.worker
;
5939 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
5940 loop_clauses
.seq
= construct_clauses
.seq
;
5941 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
5942 loop_clauses
.independent
= construct_clauses
.independent
;
5943 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
5944 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
5945 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
5946 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
5947 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
5948 construct_clauses
.gang
= false;
5949 construct_clauses
.gang_static
= false;
5950 construct_clauses
.gang_num_expr
= NULL
;
5951 construct_clauses
.gang_static_expr
= NULL
;
5952 construct_clauses
.vector
= false;
5953 construct_clauses
.vector_expr
= NULL
;
5954 construct_clauses
.worker
= false;
5955 construct_clauses
.worker_expr
= NULL
;
5956 construct_clauses
.seq
= false;
5957 construct_clauses
.par_auto
= false;
5958 construct_clauses
.independent
= false;
5959 construct_clauses
.independent
= false;
5960 construct_clauses
.tile_list
= NULL
;
5961 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
5962 if (construct_code
== OACC_KERNELS
)
5963 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
5964 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
5965 code
->loc
, false, true);
5967 if (!loop_clauses
.seq
)
5971 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
5972 protected_set_expr_location (stmt
, loc
);
5973 if (TREE_CODE (stmt
) != BIND_EXPR
)
5974 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5977 stmt
= build2_loc (loc
, construct_code
, void_type_node
, stmt
, oacc_clauses
);
5978 gfc_add_expr_to_block (&block
, stmt
);
5979 return gfc_finish_block (&block
);
5983 gfc_trans_omp_depobj (gfc_code
*code
)
5987 gfc_init_se (&se
, NULL
);
5988 gfc_init_block (&block
);
5989 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->depobj
);
5990 gcc_assert (se
.pre
.head
== NULL
&& se
.post
.head
== NULL
);
5991 tree depobj
= se
.expr
;
5992 location_t loc
= EXPR_LOCATION (depobj
);
5993 if (!POINTER_TYPE_P (TREE_TYPE (depobj
)))
5994 depobj
= gfc_build_addr_expr (NULL
, depobj
);
5995 depobj
= fold_convert (build_pointer_type_for_mode (ptr_type_node
,
5996 TYPE_MODE (ptr_type_node
),
5998 gfc_omp_namelist
*n
= code
->ext
.omp_clauses
->lists
[OMP_LIST_DEPEND
];
6002 if (!n
->sym
) /* omp_all_memory. */
6003 var
= null_pointer_node
;
6004 else if (n
->expr
&& n
->expr
->ref
->u
.ar
.type
!= AR_FULL
)
6006 gfc_init_se (&se
, NULL
);
6007 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
6009 gfc_conv_expr_reference (&se
, n
->expr
);
6014 gfc_conv_expr_descriptor (&se
, n
->expr
);
6015 var
= gfc_conv_array_data (se
.expr
);
6017 gfc_add_block_to_block (&block
, &se
.pre
);
6018 gfc_add_block_to_block (&block
, &se
.post
);
6019 gcc_assert (POINTER_TYPE_P (TREE_TYPE (var
)));
6023 var
= gfc_get_symbol_decl (n
->sym
);
6024 if (POINTER_TYPE_P (TREE_TYPE (var
))
6025 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (var
))))
6026 var
= build_fold_indirect_ref (var
);
6027 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (var
)))
6029 var
= gfc_conv_descriptor_data_get (var
);
6030 gcc_assert (POINTER_TYPE_P (TREE_TYPE (var
)));
6032 else if ((n
->sym
->attr
.allocatable
|| n
->sym
->attr
.pointer
)
6033 && n
->sym
->attr
.dummy
)
6034 var
= build_fold_indirect_ref (var
);
6035 else if (!POINTER_TYPE_P (TREE_TYPE (var
))
6036 || (n
->sym
->ts
.f90_type
== BT_VOID
6037 && !POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (var
)))
6038 && !GFC_ARRAY_TYPE_P (TREE_TYPE (TREE_TYPE (var
)))))
6040 TREE_ADDRESSABLE (var
) = 1;
6041 var
= gfc_build_addr_expr (NULL
, var
);
6044 depobj
= save_expr (depobj
);
6045 tree r
= build_fold_indirect_ref_loc (loc
, depobj
);
6046 gfc_add_expr_to_block (&block
,
6047 build2 (MODIFY_EXPR
, void_type_node
, r
, var
));
6050 /* Only one may be set. */
6051 gcc_assert (((int)(n
!= NULL
) + (int)(code
->ext
.omp_clauses
->destroy
)
6052 + (int)(code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
))
6054 int k
= -1; /* omp_clauses->destroy */
6055 if (!code
->ext
.omp_clauses
->destroy
)
6056 switch (code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
6057 ? code
->ext
.omp_clauses
->depobj_update
: n
->u
.depend_doacross_op
)
6059 case OMP_DEPEND_IN
: k
= GOMP_DEPEND_IN
; break;
6060 case OMP_DEPEND_OUT
: k
= GOMP_DEPEND_OUT
; break;
6061 case OMP_DEPEND_INOUT
: k
= GOMP_DEPEND_INOUT
; break;
6062 case OMP_DEPEND_INOUTSET
: k
= GOMP_DEPEND_INOUTSET
; break;
6063 case OMP_DEPEND_MUTEXINOUTSET
: k
= GOMP_DEPEND_MUTEXINOUTSET
; break;
6064 default: gcc_unreachable ();
6066 tree t
= build_int_cst (ptr_type_node
, k
);
6067 depobj
= build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (depobj
), depobj
,
6068 TYPE_SIZE_UNIT (ptr_type_node
));
6069 depobj
= build_fold_indirect_ref_loc (loc
, depobj
);
6070 gfc_add_expr_to_block (&block
, build2 (MODIFY_EXPR
, void_type_node
, depobj
, t
));
6072 return gfc_finish_block (&block
);
6076 gfc_trans_omp_error (gfc_code
*code
)
6081 bool fatal
= code
->ext
.omp_clauses
->severity
== OMP_SEVERITY_FATAL
;
6082 tree fndecl
= builtin_decl_explicit (fatal
? BUILT_IN_GOMP_ERROR
6083 : BUILT_IN_GOMP_WARNING
);
6084 gfc_start_block (&block
);
6085 gfc_init_se (&se
, NULL
);
6086 if (!code
->ext
.omp_clauses
->message
)
6088 message
= null_pointer_node
;
6089 len
= build_int_cst (size_type_node
, 0);
6093 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->message
);
6095 if (!POINTER_TYPE_P (TREE_TYPE (message
)))
6096 /* To ensure an ARRAY_TYPE is not passed as such. */
6097 message
= gfc_build_addr_expr (NULL
, message
);
6098 len
= se
.string_length
;
6100 gfc_add_block_to_block (&block
, &se
.pre
);
6101 gfc_add_expr_to_block (&block
, build_call_expr_loc (input_location
, fndecl
,
6103 gfc_add_block_to_block (&block
, &se
.post
);
6104 return gfc_finish_block (&block
);
6108 gfc_trans_omp_flush (gfc_code
*code
)
6111 if (!code
->ext
.omp_clauses
6112 || code
->ext
.omp_clauses
->memorder
== OMP_MEMORDER_UNSET
6113 || code
->ext
.omp_clauses
->memorder
== OMP_MEMORDER_SEQ_CST
)
6115 call
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
6116 call
= build_call_expr_loc (input_location
, call
, 0);
6120 enum memmodel mo
= MEMMODEL_LAST
;
6121 switch (code
->ext
.omp_clauses
->memorder
)
6123 case OMP_MEMORDER_ACQ_REL
: mo
= MEMMODEL_ACQ_REL
; break;
6124 case OMP_MEMORDER_RELEASE
: mo
= MEMMODEL_RELEASE
; break;
6125 case OMP_MEMORDER_ACQUIRE
: mo
= MEMMODEL_ACQUIRE
; break;
6126 default: gcc_unreachable (); break;
6128 call
= builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE
);
6129 call
= build_call_expr_loc (input_location
, call
, 1,
6130 build_int_cst (integer_type_node
, mo
));
6136 gfc_trans_omp_master (gfc_code
*code
)
6138 tree stmt
= gfc_trans_code (code
->block
->next
);
6139 if (IS_EMPTY_STMT (stmt
))
6141 return build1_v (OMP_MASTER
, stmt
);
6145 gfc_trans_omp_masked (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6148 tree body
= gfc_trans_code (code
->block
->next
);
6149 if (IS_EMPTY_STMT (body
))
6152 clauses
= code
->ext
.omp_clauses
;
6153 gfc_start_block (&block
);
6154 tree omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
6155 tree stmt
= make_node (OMP_MASKED
);
6156 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
6157 TREE_TYPE (stmt
) = void_type_node
;
6158 OMP_MASKED_BODY (stmt
) = body
;
6159 OMP_MASKED_CLAUSES (stmt
) = omp_clauses
;
6160 gfc_add_expr_to_block (&block
, stmt
);
6161 return gfc_finish_block (&block
);
6166 gfc_trans_omp_ordered (gfc_code
*code
)
6170 if (!code
->ext
.omp_clauses
->simd
)
6171 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
6172 code
->ext
.omp_clauses
->threads
= 0;
6174 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
6176 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
6177 code
->block
? gfc_trans_code (code
->block
->next
)
6178 : NULL_TREE
, omp_clauses
);
6182 gfc_trans_omp_parallel (gfc_code
*code
)
6185 tree stmt
, omp_clauses
;
6187 gfc_start_block (&block
);
6188 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6191 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6192 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6193 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
6195 gfc_add_expr_to_block (&block
, stmt
);
6196 return gfc_finish_block (&block
);
6203 GFC_OMP_SPLIT_PARALLEL
,
6204 GFC_OMP_SPLIT_DISTRIBUTE
,
6205 GFC_OMP_SPLIT_TEAMS
,
6206 GFC_OMP_SPLIT_TARGET
,
6207 GFC_OMP_SPLIT_TASKLOOP
,
6208 GFC_OMP_SPLIT_MASKED
,
6214 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
6215 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
6216 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
6217 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
6218 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
6219 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
6220 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
),
6221 GFC_OMP_MASK_MASKED
= (1 << GFC_OMP_SPLIT_MASKED
)
6224 /* If a var is in lastprivate/firstprivate/reduction but not in a
6225 data mapping/sharing clause, add it to 'map(tofrom:' if is_target
6226 and to 'shared' otherwise. */
6228 gfc_add_clause_implicitly (gfc_omp_clauses
*clauses_out
,
6229 gfc_omp_clauses
*clauses_in
,
6230 bool is_target
, bool is_parallel_do
)
6232 int clauselist_to_add
= is_target
? OMP_LIST_MAP
: OMP_LIST_SHARED
;
6233 gfc_omp_namelist
*tail
= NULL
;
6234 for (int i
= 0; i
< 5; ++i
)
6236 gfc_omp_namelist
*n
;
6239 case 0: n
= clauses_in
->lists
[OMP_LIST_FIRSTPRIVATE
]; break;
6240 case 1: n
= clauses_in
->lists
[OMP_LIST_LASTPRIVATE
]; break;
6241 case 2: n
= clauses_in
->lists
[OMP_LIST_REDUCTION
]; break;
6242 case 3: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_INSCAN
]; break;
6243 case 4: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_TASK
]; break;
6244 default: gcc_unreachable ();
6246 for (; n
!= NULL
; n
= n
->next
)
6248 gfc_omp_namelist
*n2
, **n_firstp
= NULL
, **n_lastp
= NULL
;
6249 for (int j
= 0; j
< 6; ++j
)
6251 gfc_omp_namelist
**n2ref
= NULL
, *prev2
= NULL
;
6255 n2ref
= &clauses_out
->lists
[clauselist_to_add
];
6258 n2ref
= &clauses_out
->lists
[OMP_LIST_FIRSTPRIVATE
];
6262 n2ref
= &clauses_in
->lists
[OMP_LIST_LASTPRIVATE
];
6264 n2ref
= &clauses_out
->lists
[OMP_LIST_LASTPRIVATE
];
6266 case 3: n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION
]; break;
6268 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_INSCAN
];
6271 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_TASK
];
6273 default: gcc_unreachable ();
6275 for (n2
= *n2ref
; n2
!= NULL
; prev2
= n2
, n2
= n2
->next
)
6276 if (n2
->sym
== n
->sym
)
6280 if (j
== 0 /* clauselist_to_add */)
6281 break; /* Already present. */
6282 if (j
== 1 /* OMP_LIST_FIRSTPRIVATE */)
6284 n_firstp
= prev2
? &prev2
->next
: n2ref
;
6287 if (j
== 2 /* OMP_LIST_LASTPRIVATE */)
6289 n_lastp
= prev2
? &prev2
->next
: n2ref
;
6295 if (n_firstp
&& n_lastp
)
6297 /* For parallel do, GCC puts firstprivate/lastprivate
6301 *n_firstp
= (*n_firstp
)->next
;
6303 *n_lastp
= (*n_lastp
)->next
;
6305 else if (is_target
&& n_lastp
)
6307 else if (n2
|| n_firstp
|| n_lastp
)
6309 if (clauses_out
->lists
[clauselist_to_add
]
6310 && (clauses_out
->lists
[clauselist_to_add
]
6311 == clauses_in
->lists
[clauselist_to_add
]))
6313 gfc_omp_namelist
*p
= NULL
;
6314 for (n2
= clauses_in
->lists
[clauselist_to_add
]; n2
; n2
= n2
->next
)
6318 p
->next
= gfc_get_omp_namelist ();
6323 p
= gfc_get_omp_namelist ();
6324 clauses_out
->lists
[clauselist_to_add
] = p
;
6331 tail
= clauses_out
->lists
[clauselist_to_add
];
6332 for (; tail
&& tail
->next
; tail
= tail
->next
)
6335 n2
= gfc_get_omp_namelist ();
6336 n2
->where
= n
->where
;
6339 n2
->u
.map_op
= OMP_MAP_TOFROM
;
6346 clauses_out
->lists
[clauselist_to_add
] = n2
;
6351 /* Kind of opposite to above, add firstprivate to CLAUSES_OUT if it is mapped
6352 in CLAUSES_IN's FIRSTPRIVATE list but not its MAP list. */
6355 gfc_add_firstprivate_if_unmapped (gfc_omp_clauses
*clauses_out
,
6356 gfc_omp_clauses
*clauses_in
)
6358 gfc_omp_namelist
*n
= clauses_in
->lists
[OMP_LIST_FIRSTPRIVATE
];
6359 gfc_omp_namelist
**tail
= NULL
;
6361 for (; n
!= NULL
; n
= n
->next
)
6363 gfc_omp_namelist
*n2
= clauses_out
->lists
[OMP_LIST_MAP
];
6364 for (; n2
!= NULL
; n2
= n2
->next
)
6365 if (n
->sym
== n2
->sym
)
6369 gfc_omp_namelist
*dup
= gfc_get_omp_namelist ();
6374 tail
= &clauses_out
->lists
[OMP_LIST_FIRSTPRIVATE
];
6375 while (*tail
&& (*tail
)->next
)
6376 tail
= &(*tail
)->next
;
6379 tail
= &(*tail
)->next
;
6385 gfc_free_split_omp_clauses (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
6387 for (int i
= 0; i
< GFC_OMP_SPLIT_NUM
; ++i
)
6388 for (int j
= 0; j
< OMP_LIST_NUM
; ++j
)
6389 if (clausesa
[i
].lists
[j
] && clausesa
[i
].lists
[j
] != code
->ext
.omp_clauses
->lists
[j
])
6390 for (gfc_omp_namelist
*n
= clausesa
[i
].lists
[j
]; n
;)
6392 gfc_omp_namelist
*p
= n
;
6399 gfc_split_omp_clauses (gfc_code
*code
,
6400 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
6402 int mask
= 0, innermost
= 0;
6403 bool is_loop
= false;
6404 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
6407 case EXEC_OMP_DISTRIBUTE
:
6408 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
6410 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
6411 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6412 innermost
= GFC_OMP_SPLIT_DO
;
6414 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
6415 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
6416 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6417 innermost
= GFC_OMP_SPLIT_SIMD
;
6419 case EXEC_OMP_DISTRIBUTE_SIMD
:
6420 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
6421 innermost
= GFC_OMP_SPLIT_SIMD
;
6425 innermost
= GFC_OMP_SPLIT_DO
;
6427 case EXEC_OMP_DO_SIMD
:
6428 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6429 innermost
= GFC_OMP_SPLIT_SIMD
;
6431 case EXEC_OMP_PARALLEL
:
6432 innermost
= GFC_OMP_SPLIT_PARALLEL
;
6434 case EXEC_OMP_PARALLEL_DO
:
6435 case EXEC_OMP_PARALLEL_LOOP
:
6436 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6437 innermost
= GFC_OMP_SPLIT_DO
;
6439 case EXEC_OMP_PARALLEL_DO_SIMD
:
6440 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6441 innermost
= GFC_OMP_SPLIT_SIMD
;
6443 case EXEC_OMP_PARALLEL_MASKED
:
6444 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
;
6445 innermost
= GFC_OMP_SPLIT_MASKED
;
6447 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
6448 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
6449 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
6450 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6452 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
6453 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6454 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6456 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
6457 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
6458 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
6459 innermost
= GFC_OMP_SPLIT_SIMD
;
6461 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
6462 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6463 innermost
= GFC_OMP_SPLIT_SIMD
;
6466 innermost
= GFC_OMP_SPLIT_SIMD
;
6468 case EXEC_OMP_TARGET
:
6469 innermost
= GFC_OMP_SPLIT_TARGET
;
6471 case EXEC_OMP_TARGET_PARALLEL
:
6472 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
6473 innermost
= GFC_OMP_SPLIT_PARALLEL
;
6475 case EXEC_OMP_TARGET_PARALLEL_DO
:
6476 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
6477 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6478 innermost
= GFC_OMP_SPLIT_DO
;
6480 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
6481 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
6482 | GFC_OMP_MASK_SIMD
;
6483 innermost
= GFC_OMP_SPLIT_SIMD
;
6485 case EXEC_OMP_TARGET_SIMD
:
6486 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
6487 innermost
= GFC_OMP_SPLIT_SIMD
;
6489 case EXEC_OMP_TARGET_TEAMS
:
6490 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
6491 innermost
= GFC_OMP_SPLIT_TEAMS
;
6493 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
6494 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
6495 | GFC_OMP_MASK_DISTRIBUTE
;
6496 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
6498 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6499 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6500 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6501 innermost
= GFC_OMP_SPLIT_DO
;
6503 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6504 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6505 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6506 innermost
= GFC_OMP_SPLIT_SIMD
;
6508 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
6509 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
6510 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
6511 innermost
= GFC_OMP_SPLIT_SIMD
;
6513 case EXEC_OMP_TARGET_TEAMS_LOOP
:
6514 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
6515 innermost
= GFC_OMP_SPLIT_DO
;
6517 case EXEC_OMP_MASKED_TASKLOOP
:
6518 mask
= GFC_OMP_MASK_MASKED
| GFC_OMP_MASK_TASKLOOP
;
6519 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6521 case EXEC_OMP_MASTER_TASKLOOP
:
6522 case EXEC_OMP_TASKLOOP
:
6523 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6525 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
6526 mask
= GFC_OMP_MASK_MASKED
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6527 innermost
= GFC_OMP_SPLIT_SIMD
;
6529 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
6530 case EXEC_OMP_TASKLOOP_SIMD
:
6531 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6532 innermost
= GFC_OMP_SPLIT_SIMD
;
6534 case EXEC_OMP_TEAMS
:
6535 innermost
= GFC_OMP_SPLIT_TEAMS
;
6537 case EXEC_OMP_TEAMS_DISTRIBUTE
:
6538 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
6539 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
6541 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6542 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6543 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6544 innermost
= GFC_OMP_SPLIT_DO
;
6546 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6547 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6548 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6549 innermost
= GFC_OMP_SPLIT_SIMD
;
6551 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
6552 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
6553 innermost
= GFC_OMP_SPLIT_SIMD
;
6555 case EXEC_OMP_TEAMS_LOOP
:
6556 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
6557 innermost
= GFC_OMP_SPLIT_DO
;
6564 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
6567 /* Loops are similar to DO but still a bit different. */
6571 case EXEC_OMP_PARALLEL_LOOP
:
6572 case EXEC_OMP_TEAMS_LOOP
:
6573 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
6574 case EXEC_OMP_TARGET_TEAMS_LOOP
:
6579 if (code
->ext
.omp_clauses
!= NULL
)
6581 if (mask
& GFC_OMP_MASK_TARGET
)
6583 /* First the clauses that are unique to some constructs. */
6584 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
6585 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
6586 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
6587 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
6588 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_HAS_DEVICE_ADDR
]
6589 = code
->ext
.omp_clauses
->lists
[OMP_LIST_HAS_DEVICE_ADDR
];
6590 clausesa
[GFC_OMP_SPLIT_TARGET
].device
6591 = code
->ext
.omp_clauses
->device
;
6592 clausesa
[GFC_OMP_SPLIT_TARGET
].thread_limit
6593 = code
->ext
.omp_clauses
->thread_limit
;
6594 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_USES_ALLOCATORS
]
6595 = code
->ext
.omp_clauses
->lists
[OMP_LIST_USES_ALLOCATORS
];
6596 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
6597 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
[i
]
6598 = code
->ext
.omp_clauses
->defaultmap
[i
];
6599 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
6600 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
6601 /* And this is copied to all. */
6602 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
6603 = code
->ext
.omp_clauses
->if_expr
;
6604 clausesa
[GFC_OMP_SPLIT_TARGET
].nowait
6605 = code
->ext
.omp_clauses
->nowait
;
6607 if (mask
& GFC_OMP_MASK_TEAMS
)
6609 /* First the clauses that are unique to some constructs. */
6610 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_lower
6611 = code
->ext
.omp_clauses
->num_teams_lower
;
6612 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
6613 = code
->ext
.omp_clauses
->num_teams_upper
;
6614 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
6615 = code
->ext
.omp_clauses
->thread_limit
;
6616 /* Shared and default clauses are allowed on parallel, teams
6618 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
6619 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
6620 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
6621 = code
->ext
.omp_clauses
->default_sharing
;
6623 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6625 /* First the clauses that are unique to some constructs. */
6626 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
6627 = code
->ext
.omp_clauses
->dist_sched_kind
;
6628 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
6629 = code
->ext
.omp_clauses
->dist_chunk_size
;
6630 /* Duplicate collapse. */
6631 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
6632 = code
->ext
.omp_clauses
->collapse
;
6633 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_concurrent
6634 = code
->ext
.omp_clauses
->order_concurrent
;
6635 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_unconstrained
6636 = code
->ext
.omp_clauses
->order_unconstrained
;
6637 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_reproducible
6638 = code
->ext
.omp_clauses
->order_reproducible
;
6640 if (mask
& GFC_OMP_MASK_PARALLEL
)
6642 /* First the clauses that are unique to some constructs. */
6643 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
6644 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
6645 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
6646 = code
->ext
.omp_clauses
->num_threads
;
6647 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
6648 = code
->ext
.omp_clauses
->proc_bind
;
6649 /* Shared and default clauses are allowed on parallel, teams
6651 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
6652 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
6653 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
6654 = code
->ext
.omp_clauses
->default_sharing
;
6655 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
6656 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
6657 /* And this is copied to all. */
6658 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
6659 = code
->ext
.omp_clauses
->if_expr
;
6661 if (mask
& GFC_OMP_MASK_MASKED
)
6662 clausesa
[GFC_OMP_SPLIT_MASKED
].filter
= code
->ext
.omp_clauses
->filter
;
6663 if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
6665 /* First the clauses that are unique to some constructs. */
6666 clausesa
[GFC_OMP_SPLIT_DO
].ordered
6667 = code
->ext
.omp_clauses
->ordered
;
6668 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
6669 = code
->ext
.omp_clauses
->orderedc
;
6670 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
6671 = code
->ext
.omp_clauses
->sched_kind
;
6672 if (innermost
== GFC_OMP_SPLIT_SIMD
)
6673 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
6674 = code
->ext
.omp_clauses
->sched_simd
;
6675 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
6676 = code
->ext
.omp_clauses
->sched_monotonic
;
6677 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
6678 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
6679 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
6680 = code
->ext
.omp_clauses
->chunk_size
;
6681 clausesa
[GFC_OMP_SPLIT_DO
].nowait
6682 = code
->ext
.omp_clauses
->nowait
;
6684 if (mask
& GFC_OMP_MASK_DO
)
6686 clausesa
[GFC_OMP_SPLIT_DO
].bind
6687 = code
->ext
.omp_clauses
->bind
;
6688 /* Duplicate collapse. */
6689 clausesa
[GFC_OMP_SPLIT_DO
].collapse
6690 = code
->ext
.omp_clauses
->collapse
;
6691 clausesa
[GFC_OMP_SPLIT_DO
].order_concurrent
6692 = code
->ext
.omp_clauses
->order_concurrent
;
6693 clausesa
[GFC_OMP_SPLIT_DO
].order_unconstrained
6694 = code
->ext
.omp_clauses
->order_unconstrained
;
6695 clausesa
[GFC_OMP_SPLIT_DO
].order_reproducible
6696 = code
->ext
.omp_clauses
->order_reproducible
;
6698 if (mask
& GFC_OMP_MASK_SIMD
)
6700 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
6701 = code
->ext
.omp_clauses
->safelen_expr
;
6702 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
6703 = code
->ext
.omp_clauses
->simdlen_expr
;
6704 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
6705 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
6706 /* Duplicate collapse. */
6707 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
6708 = code
->ext
.omp_clauses
->collapse
;
6709 clausesa
[GFC_OMP_SPLIT_SIMD
].if_exprs
[OMP_IF_SIMD
]
6710 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_SIMD
];
6711 clausesa
[GFC_OMP_SPLIT_SIMD
].order_concurrent
6712 = code
->ext
.omp_clauses
->order_concurrent
;
6713 clausesa
[GFC_OMP_SPLIT_SIMD
].order_unconstrained
6714 = code
->ext
.omp_clauses
->order_unconstrained
;
6715 clausesa
[GFC_OMP_SPLIT_SIMD
].order_reproducible
6716 = code
->ext
.omp_clauses
->order_reproducible
;
6717 /* And this is copied to all. */
6718 clausesa
[GFC_OMP_SPLIT_SIMD
].if_expr
6719 = code
->ext
.omp_clauses
->if_expr
;
6721 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6723 /* First the clauses that are unique to some constructs. */
6724 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
6725 = code
->ext
.omp_clauses
->nogroup
;
6726 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
6727 = code
->ext
.omp_clauses
->grainsize
;
6728 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize_strict
6729 = code
->ext
.omp_clauses
->grainsize_strict
;
6730 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
6731 = code
->ext
.omp_clauses
->num_tasks
;
6732 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks_strict
6733 = code
->ext
.omp_clauses
->num_tasks_strict
;
6734 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
6735 = code
->ext
.omp_clauses
->priority
;
6736 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
6737 = code
->ext
.omp_clauses
->final_expr
;
6738 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
6739 = code
->ext
.omp_clauses
->untied
;
6740 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
6741 = code
->ext
.omp_clauses
->mergeable
;
6742 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
6743 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
6744 /* And this is copied to all. */
6745 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
6746 = code
->ext
.omp_clauses
->if_expr
;
6747 /* Shared and default clauses are allowed on parallel, teams
6749 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
6750 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
6751 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
6752 = code
->ext
.omp_clauses
->default_sharing
;
6753 /* Duplicate collapse. */
6754 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
6755 = code
->ext
.omp_clauses
->collapse
;
6757 /* Private clause is supported on all constructs but master/masked,
6758 it is enough to put it on the innermost one except for master/masked. For
6759 !$ omp parallel do put it on parallel though,
6760 as that's what we did for OpenMP 3.1. */
6761 clausesa
[((innermost
== GFC_OMP_SPLIT_DO
&& !is_loop
)
6762 || code
->op
== EXEC_OMP_PARALLEL_MASTER
6763 || code
->op
== EXEC_OMP_PARALLEL_MASKED
)
6764 ? (int) GFC_OMP_SPLIT_PARALLEL
6765 : innermost
].lists
[OMP_LIST_PRIVATE
]
6766 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
6767 /* Firstprivate clause is supported on all constructs but
6768 simd and masked/master. Put it on the outermost of those and duplicate
6769 on parallel and teams. */
6770 if (mask
& GFC_OMP_MASK_TARGET
)
6771 gfc_add_firstprivate_if_unmapped (&clausesa
[GFC_OMP_SPLIT_TARGET
],
6772 code
->ext
.omp_clauses
);
6773 if (mask
& GFC_OMP_MASK_TEAMS
)
6774 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
6775 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6776 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6777 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
6778 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6779 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6780 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_FIRSTPRIVATE
]
6781 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6782 if ((mask
& GFC_OMP_MASK_PARALLEL
)
6783 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
6784 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
6785 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6786 else if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
6787 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
6788 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6789 /* Lastprivate is allowed on distribute, do, simd, taskloop and loop.
6790 In parallel do{, simd} we actually want to put it on
6791 parallel rather than do. */
6792 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6793 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
6794 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6795 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6796 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_LASTPRIVATE
]
6797 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6798 if ((mask
& GFC_OMP_MASK_PARALLEL
) && !is_loop
6799 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
6800 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
6801 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6802 else if (mask
& GFC_OMP_MASK_DO
)
6803 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
6804 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6805 if (mask
& GFC_OMP_MASK_SIMD
)
6806 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
6807 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6808 /* Reduction is allowed on simd, do, parallel, teams, taskloop, and loop.
6809 Duplicate it on all of them, but
6810 - omit on do if parallel is present;
6811 - omit on task and parallel if loop is present;
6812 additionally, inscan applies to do/simd only. */
6813 for (int i
= OMP_LIST_REDUCTION
; i
<= OMP_LIST_REDUCTION_TASK
; i
++)
6815 if (mask
& GFC_OMP_MASK_TASKLOOP
6816 && i
!= OMP_LIST_REDUCTION_INSCAN
)
6817 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[i
]
6818 = code
->ext
.omp_clauses
->lists
[i
];
6819 if (mask
& GFC_OMP_MASK_TEAMS
6820 && i
!= OMP_LIST_REDUCTION_INSCAN
6822 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[i
]
6823 = code
->ext
.omp_clauses
->lists
[i
];
6824 if (mask
& GFC_OMP_MASK_PARALLEL
6825 && i
!= OMP_LIST_REDUCTION_INSCAN
6826 && !(mask
& GFC_OMP_MASK_TASKLOOP
)
6828 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[i
]
6829 = code
->ext
.omp_clauses
->lists
[i
];
6830 else if (mask
& GFC_OMP_MASK_DO
)
6831 clausesa
[GFC_OMP_SPLIT_DO
].lists
[i
]
6832 = code
->ext
.omp_clauses
->lists
[i
];
6833 if (mask
& GFC_OMP_MASK_SIMD
)
6834 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[i
]
6835 = code
->ext
.omp_clauses
->lists
[i
];
6837 if (mask
& GFC_OMP_MASK_TARGET
)
6838 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IN_REDUCTION
]
6839 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
6840 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6841 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_IN_REDUCTION
]
6842 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
6843 /* Linear clause is supported on do and simd,
6844 put it on the innermost one. */
6845 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
6846 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
6848 /* Propagate firstprivate/lastprivate/reduction vars to
6849 shared (parallel, teams) and map-tofrom (target). */
6850 if (mask
& GFC_OMP_MASK_TARGET
)
6851 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TARGET
],
6852 code
->ext
.omp_clauses
, true, false);
6853 if ((mask
& GFC_OMP_MASK_PARALLEL
) && innermost
!= GFC_OMP_MASK_PARALLEL
)
6854 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6855 code
->ext
.omp_clauses
, false,
6856 mask
& GFC_OMP_MASK_DO
);
6857 if (mask
& GFC_OMP_MASK_TEAMS
&& innermost
!= GFC_OMP_MASK_TEAMS
)
6858 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TEAMS
],
6859 code
->ext
.omp_clauses
, false, false);
6860 if (((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
6861 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
6863 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
6865 /* Distribute allocate clause to do, parallel, distribute, teams, target
6866 and taskloop. The code below iterates over variables in the
6867 allocate list and checks if that available is also in any
6868 privatization clause on those construct. If yes, then we add it
6869 to the list of 'allocate'ed variables for that construct. If a
6870 variable is found in none of them then we issue an error. */
6872 if (code
->ext
.omp_clauses
->lists
[OMP_LIST_ALLOCATE
])
6874 gfc_omp_namelist
*alloc_nl
, *priv_nl
;
6875 gfc_omp_namelist
*tails
[GFC_OMP_SPLIT_NUM
];
6876 for (alloc_nl
= code
->ext
.omp_clauses
->lists
[OMP_LIST_ALLOCATE
];
6877 alloc_nl
; alloc_nl
= alloc_nl
->next
)
6880 for (int i
= GFC_OMP_SPLIT_DO
; i
<= GFC_OMP_SPLIT_TASKLOOP
; i
++)
6882 gfc_omp_namelist
*p
;
6884 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
6888 case OMP_LIST_PRIVATE
:
6889 case OMP_LIST_FIRSTPRIVATE
:
6890 case OMP_LIST_LASTPRIVATE
:
6891 case OMP_LIST_REDUCTION
:
6892 case OMP_LIST_REDUCTION_INSCAN
:
6893 case OMP_LIST_REDUCTION_TASK
:
6894 case OMP_LIST_IN_REDUCTION
:
6895 case OMP_LIST_TASK_REDUCTION
:
6896 case OMP_LIST_LINEAR
:
6897 for (priv_nl
= clausesa
[i
].lists
[list
]; priv_nl
;
6898 priv_nl
= priv_nl
->next
)
6899 if (alloc_nl
->sym
== priv_nl
->sym
)
6902 p
= gfc_get_omp_namelist ();
6903 p
->sym
= alloc_nl
->sym
;
6904 p
->expr
= alloc_nl
->expr
;
6905 p
->u
.align
= alloc_nl
->u
.align
;
6906 p
->u2
.allocator
= alloc_nl
->u2
.allocator
;
6907 p
->where
= alloc_nl
->where
;
6908 if (clausesa
[i
].lists
[OMP_LIST_ALLOCATE
] == NULL
)
6910 clausesa
[i
].lists
[OMP_LIST_ALLOCATE
] = p
;
6916 tails
[i
] = tails
[i
]->next
;
6926 gfc_error ("%qs specified in 'allocate' clause at %L but not "
6927 "in an explicit privatization clause",
6928 alloc_nl
->sym
->name
, &alloc_nl
->where
);
6934 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
6935 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
6938 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6939 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
6940 bool free_clausesa
= false;
6943 gfc_start_block (&block
);
6945 gfc_init_block (&block
);
6947 if (clausesa
== NULL
)
6949 clausesa
= clausesa_buf
;
6950 gfc_split_omp_clauses (code
, clausesa
);
6951 free_clausesa
= true;
6955 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
6956 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
6957 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
6960 if (TREE_CODE (body
) != BIND_EXPR
)
6961 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
6965 else if (TREE_CODE (body
) != BIND_EXPR
)
6966 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
6969 stmt
= make_node (OMP_FOR
);
6970 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
6971 TREE_TYPE (stmt
) = void_type_node
;
6972 OMP_FOR_BODY (stmt
) = body
;
6973 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
6977 gfc_add_expr_to_block (&block
, stmt
);
6979 gfc_free_split_omp_clauses (code
, clausesa
);
6980 return gfc_finish_block (&block
);
6984 gfc_trans_omp_parallel_do (gfc_code
*code
, bool is_loop
, stmtblock_t
*pblock
,
6985 gfc_omp_clauses
*clausesa
)
6987 stmtblock_t block
, *new_pblock
= pblock
;
6988 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6989 tree stmt
, omp_clauses
= NULL_TREE
;
6990 bool free_clausesa
= false;
6993 gfc_start_block (&block
);
6995 gfc_init_block (&block
);
6997 if (clausesa
== NULL
)
6999 clausesa
= clausesa_buf
;
7000 gfc_split_omp_clauses (code
, clausesa
);
7001 free_clausesa
= true;
7004 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7008 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
7009 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
7010 new_pblock
= &block
;
7014 stmt
= gfc_trans_omp_do (code
, is_loop
? EXEC_OMP_LOOP
: EXEC_OMP_DO
,
7015 new_pblock
, &clausesa
[GFC_OMP_SPLIT_DO
],
7019 if (TREE_CODE (stmt
) != BIND_EXPR
)
7020 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7024 else if (TREE_CODE (stmt
) != BIND_EXPR
)
7025 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
7026 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7027 void_type_node
, stmt
, omp_clauses
);
7028 OMP_PARALLEL_COMBINED (stmt
) = 1;
7029 gfc_add_expr_to_block (&block
, stmt
);
7031 gfc_free_split_omp_clauses (code
, clausesa
);
7032 return gfc_finish_block (&block
);
7036 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
7037 gfc_omp_clauses
*clausesa
)
7040 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7041 tree stmt
, omp_clauses
= NULL_TREE
;
7042 bool free_clausesa
= false;
7045 gfc_start_block (&block
);
7047 gfc_init_block (&block
);
7049 if (clausesa
== NULL
)
7051 clausesa
= clausesa_buf
;
7052 gfc_split_omp_clauses (code
, clausesa
);
7053 free_clausesa
= true;
7057 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7061 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
7064 if (TREE_CODE (stmt
) != BIND_EXPR
)
7065 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7069 else if (TREE_CODE (stmt
) != BIND_EXPR
)
7070 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
7073 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7074 void_type_node
, stmt
, omp_clauses
);
7075 OMP_PARALLEL_COMBINED (stmt
) = 1;
7077 gfc_add_expr_to_block (&block
, stmt
);
7079 gfc_free_split_omp_clauses (code
, clausesa
);
7080 return gfc_finish_block (&block
);
7084 gfc_trans_omp_parallel_sections (gfc_code
*code
)
7087 gfc_omp_clauses section_clauses
;
7088 tree stmt
, omp_clauses
;
7090 memset (§ion_clauses
, 0, sizeof (section_clauses
));
7091 section_clauses
.nowait
= true;
7093 gfc_start_block (&block
);
7094 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7097 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
7098 if (TREE_CODE (stmt
) != BIND_EXPR
)
7099 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7102 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7103 void_type_node
, stmt
, omp_clauses
);
7104 OMP_PARALLEL_COMBINED (stmt
) = 1;
7105 gfc_add_expr_to_block (&block
, stmt
);
7106 return gfc_finish_block (&block
);
7110 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
7113 gfc_omp_clauses workshare_clauses
;
7114 tree stmt
, omp_clauses
;
7116 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
7117 workshare_clauses
.nowait
= true;
7119 gfc_start_block (&block
);
7120 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7123 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
7124 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7125 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7126 void_type_node
, stmt
, omp_clauses
);
7127 OMP_PARALLEL_COMBINED (stmt
) = 1;
7128 gfc_add_expr_to_block (&block
, stmt
);
7129 return gfc_finish_block (&block
);
7133 gfc_trans_omp_scope (gfc_code
*code
)
7136 tree body
= gfc_trans_code (code
->block
->next
);
7137 if (IS_EMPTY_STMT (body
))
7139 gfc_start_block (&block
);
7140 tree omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7142 tree stmt
= make_node (OMP_SCOPE
);
7143 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7144 TREE_TYPE (stmt
) = void_type_node
;
7145 OMP_SCOPE_BODY (stmt
) = body
;
7146 OMP_SCOPE_CLAUSES (stmt
) = omp_clauses
;
7147 gfc_add_expr_to_block (&block
, stmt
);
7148 return gfc_finish_block (&block
);
7152 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
7154 stmtblock_t block
, body
;
7155 tree omp_clauses
, stmt
;
7156 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
7157 location_t loc
= gfc_get_location (&code
->loc
);
7159 gfc_start_block (&block
);
7161 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
7163 gfc_init_block (&body
);
7164 for (code
= code
->block
; code
; code
= code
->block
)
7166 /* Last section is special because of lastprivate, so even if it
7167 is empty, chain it in. */
7168 stmt
= gfc_trans_omp_code (code
->next
,
7169 has_lastprivate
&& code
->block
== NULL
);
7170 if (! IS_EMPTY_STMT (stmt
))
7172 stmt
= build1_v (OMP_SECTION
, stmt
);
7173 gfc_add_expr_to_block (&body
, stmt
);
7176 stmt
= gfc_finish_block (&body
);
7178 stmt
= build2_loc (loc
, OMP_SECTIONS
, void_type_node
, stmt
, omp_clauses
);
7179 gfc_add_expr_to_block (&block
, stmt
);
7181 return gfc_finish_block (&block
);
7185 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
7187 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
7188 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7189 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_SINGLE
, void_type_node
,
7195 gfc_trans_omp_task (gfc_code
*code
)
7198 tree stmt
, omp_clauses
;
7200 gfc_start_block (&block
);
7201 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7204 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7205 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7206 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TASK
, void_type_node
,
7208 gfc_add_expr_to_block (&block
, stmt
);
7209 return gfc_finish_block (&block
);
7213 gfc_trans_omp_taskgroup (gfc_code
*code
)
7216 gfc_start_block (&block
);
7217 tree body
= gfc_trans_code (code
->block
->next
);
7218 tree stmt
= make_node (OMP_TASKGROUP
);
7219 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7220 TREE_TYPE (stmt
) = void_type_node
;
7221 OMP_TASKGROUP_BODY (stmt
) = body
;
7222 OMP_TASKGROUP_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
7223 code
->ext
.omp_clauses
,
7225 gfc_add_expr_to_block (&block
, stmt
);
7226 return gfc_finish_block (&block
);
7230 gfc_trans_omp_taskwait (gfc_code
*code
)
7232 if (!code
->ext
.omp_clauses
)
7234 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
7235 return build_call_expr_loc (input_location
, decl
, 0);
7238 gfc_start_block (&block
);
7239 tree stmt
= make_node (OMP_TASK
);
7240 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7241 TREE_TYPE (stmt
) = void_type_node
;
7242 OMP_TASK_BODY (stmt
) = NULL_TREE
;
7243 OMP_TASK_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
7244 code
->ext
.omp_clauses
,
7246 gfc_add_expr_to_block (&block
, stmt
);
7247 return gfc_finish_block (&block
);
7251 gfc_trans_omp_taskyield (void)
7253 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
7254 return build_call_expr_loc (input_location
, decl
, 0);
7258 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
7261 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7262 tree stmt
, omp_clauses
= NULL_TREE
;
7263 bool free_clausesa
= false;
7265 gfc_start_block (&block
);
7266 if (clausesa
== NULL
)
7268 clausesa
= clausesa_buf
;
7269 gfc_split_omp_clauses (code
, clausesa
);
7270 free_clausesa
= true;
7274 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
7278 case EXEC_OMP_DISTRIBUTE
:
7279 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
7280 case EXEC_OMP_TEAMS_DISTRIBUTE
:
7281 /* This is handled in gfc_trans_omp_do. */
7284 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
7285 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7286 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7287 stmt
= gfc_trans_omp_parallel_do (code
, false, &block
, clausesa
);
7288 if (TREE_CODE (stmt
) != BIND_EXPR
)
7289 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7293 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
7294 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7295 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7296 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
7297 if (TREE_CODE (stmt
) != BIND_EXPR
)
7298 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7302 case EXEC_OMP_DISTRIBUTE_SIMD
:
7303 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
7304 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
7305 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
7306 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
7307 if (TREE_CODE (stmt
) != BIND_EXPR
)
7308 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7317 tree distribute
= make_node (OMP_DISTRIBUTE
);
7318 SET_EXPR_LOCATION (distribute
, gfc_get_location (&code
->loc
));
7319 TREE_TYPE (distribute
) = void_type_node
;
7320 OMP_FOR_BODY (distribute
) = stmt
;
7321 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
7324 gfc_add_expr_to_block (&block
, stmt
);
7326 gfc_free_split_omp_clauses (code
, clausesa
);
7327 return gfc_finish_block (&block
);
7331 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
7335 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7337 bool combined
= true, free_clausesa
= false;
7339 gfc_start_block (&block
);
7340 if (clausesa
== NULL
)
7342 clausesa
= clausesa_buf
;
7343 gfc_split_omp_clauses (code
, clausesa
);
7344 free_clausesa
= true;
7349 = chainon (omp_clauses
,
7350 gfc_trans_omp_clauses (&block
,
7351 &clausesa
[GFC_OMP_SPLIT_TEAMS
],
7357 case EXEC_OMP_TARGET_TEAMS
:
7358 case EXEC_OMP_TEAMS
:
7359 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7362 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
7363 case EXEC_OMP_TEAMS_DISTRIBUTE
:
7364 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
7365 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
7368 case EXEC_OMP_TARGET_TEAMS_LOOP
:
7369 case EXEC_OMP_TEAMS_LOOP
:
7370 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_LOOP
, NULL
,
7371 &clausesa
[GFC_OMP_SPLIT_DO
],
7375 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
7380 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7381 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TEAMS
,
7382 void_type_node
, stmt
, omp_clauses
);
7384 OMP_TEAMS_COMBINED (stmt
) = 1;
7386 gfc_add_expr_to_block (&block
, stmt
);
7388 gfc_free_split_omp_clauses (code
, clausesa
);
7389 return gfc_finish_block (&block
);
7393 gfc_trans_omp_target (gfc_code
*code
)
7396 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7397 tree stmt
, omp_clauses
= NULL_TREE
;
7399 gfc_start_block (&block
);
7400 gfc_split_omp_clauses (code
, clausesa
);
7403 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
7407 case EXEC_OMP_TARGET
:
7409 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7410 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7412 case EXEC_OMP_TARGET_PARALLEL
:
7417 gfc_start_block (&iblock
);
7419 = gfc_trans_omp_clauses (&iblock
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7421 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7422 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
7424 gfc_add_expr_to_block (&iblock
, stmt
);
7425 stmt
= gfc_finish_block (&iblock
);
7426 if (TREE_CODE (stmt
) != BIND_EXPR
)
7427 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7432 case EXEC_OMP_TARGET_PARALLEL_DO
:
7433 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
7434 stmt
= gfc_trans_omp_parallel_do (code
,
7436 == EXEC_OMP_TARGET_PARALLEL_LOOP
),
7438 if (TREE_CODE (stmt
) != BIND_EXPR
)
7439 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7443 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
7444 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
7445 if (TREE_CODE (stmt
) != BIND_EXPR
)
7446 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7450 case EXEC_OMP_TARGET_SIMD
:
7451 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
7452 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
7453 if (TREE_CODE (stmt
) != BIND_EXPR
)
7454 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7460 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
7461 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
7463 gfc_omp_clauses clausesb
;
7465 /* For combined !$omp target teams, the num_teams and
7466 thread_limit clauses are evaluated before entering the
7467 target construct. */
7468 memset (&clausesb
, '\0', sizeof (clausesb
));
7469 clausesb
.num_teams_lower
7470 = clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_lower
;
7471 clausesb
.num_teams_upper
7472 = clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
;
7473 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
7474 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_lower
= NULL
;
7475 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
= NULL
;
7476 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
7478 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
7480 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
7485 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
7487 if (TREE_CODE (stmt
) != BIND_EXPR
)
7488 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7495 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET
,
7496 void_type_node
, stmt
, omp_clauses
);
7497 if (code
->op
!= EXEC_OMP_TARGET
)
7498 OMP_TARGET_COMBINED (stmt
) = 1;
7499 cfun
->has_omp_target
= true;
7501 gfc_add_expr_to_block (&block
, stmt
);
7502 gfc_free_split_omp_clauses (code
, clausesa
);
7503 return gfc_finish_block (&block
);
7507 gfc_trans_omp_taskloop (gfc_code
*code
, gfc_exec_op op
)
7510 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7511 tree stmt
, omp_clauses
= NULL_TREE
;
7513 gfc_start_block (&block
);
7514 gfc_split_omp_clauses (code
, clausesa
);
7517 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
7521 case EXEC_OMP_TASKLOOP
:
7522 /* This is handled in gfc_trans_omp_do. */
7525 case EXEC_OMP_TASKLOOP_SIMD
:
7526 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
7527 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
7528 if (TREE_CODE (stmt
) != BIND_EXPR
)
7529 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7538 tree taskloop
= make_node (OMP_TASKLOOP
);
7539 SET_EXPR_LOCATION (taskloop
, gfc_get_location (&code
->loc
));
7540 TREE_TYPE (taskloop
) = void_type_node
;
7541 OMP_FOR_BODY (taskloop
) = stmt
;
7542 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
7545 gfc_add_expr_to_block (&block
, stmt
);
7546 gfc_free_split_omp_clauses (code
, clausesa
);
7547 return gfc_finish_block (&block
);
7551 gfc_trans_omp_master_masked_taskloop (gfc_code
*code
, gfc_exec_op op
)
7553 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7557 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
7558 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
7559 gfc_split_omp_clauses (code
, clausesa
);
7562 if (op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
7563 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
)
7564 stmt
= gfc_trans_omp_taskloop (code
, EXEC_OMP_TASKLOOP_SIMD
);
7567 gcc_assert (op
== EXEC_OMP_MASKED_TASKLOOP
7568 || op
== EXEC_OMP_MASTER_TASKLOOP
);
7569 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_TASKLOOP
, NULL
,
7570 code
->op
!= EXEC_OMP_MASTER_TASKLOOP
7571 ? &clausesa
[GFC_OMP_SPLIT_TASKLOOP
]
7572 : code
->ext
.omp_clauses
, NULL
);
7574 if (TREE_CODE (stmt
) != BIND_EXPR
)
7575 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7578 gfc_start_block (&block
);
7579 if (op
== EXEC_OMP_MASKED_TASKLOOP
|| op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
)
7581 tree clauses
= gfc_trans_omp_clauses (&block
,
7582 &clausesa
[GFC_OMP_SPLIT_MASKED
],
7584 tree msk
= make_node (OMP_MASKED
);
7585 SET_EXPR_LOCATION (msk
, gfc_get_location (&code
->loc
));
7586 TREE_TYPE (msk
) = void_type_node
;
7587 OMP_MASKED_BODY (msk
) = stmt
;
7588 OMP_MASKED_CLAUSES (msk
) = clauses
;
7589 OMP_MASKED_COMBINED (msk
) = 1;
7590 gfc_add_expr_to_block (&block
, msk
);
7594 gcc_assert (op
== EXEC_OMP_MASTER_TASKLOOP
7595 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
);
7596 stmt
= build1_v (OMP_MASTER
, stmt
);
7597 gfc_add_expr_to_block (&block
, stmt
);
7599 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
7600 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
7601 gfc_free_split_omp_clauses (code
, clausesa
);
7602 return gfc_finish_block (&block
);
7606 gfc_trans_omp_parallel_master_masked (gfc_code
*code
)
7609 tree stmt
, omp_clauses
;
7610 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7611 bool parallel_combined
= false;
7613 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
7614 gfc_split_omp_clauses (code
, clausesa
);
7616 gfc_start_block (&block
);
7617 omp_clauses
= gfc_trans_omp_clauses (&block
,
7618 code
->op
== EXEC_OMP_PARALLEL_MASTER
7619 ? code
->ext
.omp_clauses
7620 : &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7623 if (code
->op
== EXEC_OMP_PARALLEL_MASTER
)
7624 stmt
= gfc_trans_omp_master (code
);
7625 else if (code
->op
== EXEC_OMP_PARALLEL_MASKED
)
7626 stmt
= gfc_trans_omp_masked (code
, &clausesa
[GFC_OMP_SPLIT_MASKED
]);
7632 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
7633 op
= EXEC_OMP_MASKED_TASKLOOP
;
7635 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
7636 op
= EXEC_OMP_MASKED_TASKLOOP_SIMD
;
7638 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
7639 op
= EXEC_OMP_MASTER_TASKLOOP
;
7641 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
7642 op
= EXEC_OMP_MASTER_TASKLOOP_SIMD
;
7647 stmt
= gfc_trans_omp_master_masked_taskloop (code
, op
);
7648 parallel_combined
= true;
7650 if (TREE_CODE (stmt
) != BIND_EXPR
)
7651 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7654 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7655 void_type_node
, stmt
, omp_clauses
);
7656 /* masked does have just filter clause, but during gimplification
7657 isn't represented by a gimplification omp context, so for
7658 !$omp parallel masked don't set OMP_PARALLEL_COMBINED,
7660 !$omp parallel masked
7661 !$omp taskloop simd lastprivate (x)
7663 !$omp parallel masked taskloop simd lastprivate (x) */
7664 if (parallel_combined
)
7665 OMP_PARALLEL_COMBINED (stmt
) = 1;
7666 gfc_add_expr_to_block (&block
, stmt
);
7667 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
7668 gfc_free_split_omp_clauses (code
, clausesa
);
7669 return gfc_finish_block (&block
);
7673 gfc_trans_omp_target_data (gfc_code
*code
)
7676 tree stmt
, omp_clauses
;
7678 gfc_start_block (&block
);
7679 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7681 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7682 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET_DATA
,
7683 void_type_node
, stmt
, omp_clauses
);
7684 gfc_add_expr_to_block (&block
, stmt
);
7685 return gfc_finish_block (&block
);
7689 gfc_trans_omp_target_enter_data (gfc_code
*code
)
7692 tree stmt
, omp_clauses
;
7694 gfc_start_block (&block
);
7695 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7697 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
7699 gfc_add_expr_to_block (&block
, stmt
);
7700 return gfc_finish_block (&block
);
7704 gfc_trans_omp_target_exit_data (gfc_code
*code
)
7707 tree stmt
, omp_clauses
;
7709 gfc_start_block (&block
);
7710 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7711 code
->loc
, false, false, code
->op
);
7712 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
7714 gfc_add_expr_to_block (&block
, stmt
);
7715 return gfc_finish_block (&block
);
7719 gfc_trans_omp_target_update (gfc_code
*code
)
7722 tree stmt
, omp_clauses
;
7724 gfc_start_block (&block
);
7725 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7727 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
7729 gfc_add_expr_to_block (&block
, stmt
);
7730 return gfc_finish_block (&block
);
7734 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
7736 tree res
, tmp
, stmt
;
7737 stmtblock_t block
, *pblock
= NULL
;
7738 stmtblock_t singleblock
;
7739 int saved_ompws_flags
;
7740 bool singleblock_in_progress
= false;
7741 /* True if previous gfc_code in workshare construct is not workshared. */
7742 bool prev_singleunit
;
7743 location_t loc
= gfc_get_location (&code
->loc
);
7745 code
= code
->block
->next
;
7749 gfc_start_block (&block
);
7752 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
7753 prev_singleunit
= false;
7755 /* Translate statements one by one to trees until we reach
7756 the end of the workshare construct. Adjacent gfc_codes that
7757 are a single unit of work are clustered and encapsulated in a
7758 single OMP_SINGLE construct. */
7759 for (; code
; code
= code
->next
)
7761 if (code
->here
!= 0)
7763 res
= gfc_trans_label_here (code
);
7764 gfc_add_expr_to_block (pblock
, res
);
7767 /* No dependence analysis, use for clauses with wait.
7768 If this is the last gfc_code, use default omp_clauses. */
7769 if (code
->next
== NULL
&& clauses
->nowait
)
7770 ompws_flags
|= OMPWS_NOWAIT
;
7772 /* By default, every gfc_code is a single unit of work. */
7773 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
7774 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
7783 res
= gfc_trans_assign (code
);
7786 case EXEC_POINTER_ASSIGN
:
7787 res
= gfc_trans_pointer_assign (code
);
7790 case EXEC_INIT_ASSIGN
:
7791 res
= gfc_trans_init_assign (code
);
7795 res
= gfc_trans_forall (code
);
7799 res
= gfc_trans_where (code
);
7802 case EXEC_OMP_ATOMIC
:
7803 res
= gfc_trans_omp_directive (code
);
7806 case EXEC_OMP_PARALLEL
:
7807 case EXEC_OMP_PARALLEL_DO
:
7808 case EXEC_OMP_PARALLEL_MASTER
:
7809 case EXEC_OMP_PARALLEL_SECTIONS
:
7810 case EXEC_OMP_PARALLEL_WORKSHARE
:
7811 case EXEC_OMP_CRITICAL
:
7812 saved_ompws_flags
= ompws_flags
;
7814 res
= gfc_trans_omp_directive (code
);
7815 ompws_flags
= saved_ompws_flags
;
7819 res
= gfc_trans_block_construct (code
);
7823 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
7826 gfc_set_backend_locus (&code
->loc
);
7828 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
7830 if (prev_singleunit
)
7832 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
7833 /* Add current gfc_code to single block. */
7834 gfc_add_expr_to_block (&singleblock
, res
);
7837 /* Finish single block and add it to pblock. */
7838 tmp
= gfc_finish_block (&singleblock
);
7839 tmp
= build2_loc (loc
, OMP_SINGLE
,
7840 void_type_node
, tmp
, NULL_TREE
);
7841 gfc_add_expr_to_block (pblock
, tmp
);
7842 /* Add current gfc_code to pblock. */
7843 gfc_add_expr_to_block (pblock
, res
);
7844 singleblock_in_progress
= false;
7849 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
7851 /* Start single block. */
7852 gfc_init_block (&singleblock
);
7853 gfc_add_expr_to_block (&singleblock
, res
);
7854 singleblock_in_progress
= true;
7855 loc
= gfc_get_location (&code
->loc
);
7858 /* Add the new statement to the block. */
7859 gfc_add_expr_to_block (pblock
, res
);
7861 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
7865 /* Finish remaining SINGLE block, if we were in the middle of one. */
7866 if (singleblock_in_progress
)
7868 /* Finish single block and add it to pblock. */
7869 tmp
= gfc_finish_block (&singleblock
);
7870 tmp
= build2_loc (loc
, OMP_SINGLE
, void_type_node
, tmp
,
7872 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
7874 gfc_add_expr_to_block (pblock
, tmp
);
7877 stmt
= gfc_finish_block (pblock
);
7878 if (TREE_CODE (stmt
) != BIND_EXPR
)
7880 if (!IS_EMPTY_STMT (stmt
))
7882 tree bindblock
= poplevel (1, 0);
7883 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
7891 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
7892 stmt
= gfc_trans_omp_barrier ();
7899 gfc_trans_oacc_declare (gfc_code
*code
)
7902 tree stmt
, oacc_clauses
;
7903 enum tree_code construct_code
;
7905 construct_code
= OACC_DATA
;
7907 gfc_start_block (&block
);
7909 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
7910 code
->loc
, false, true);
7911 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7912 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
7914 gfc_add_expr_to_block (&block
, stmt
);
7916 return gfc_finish_block (&block
);
7920 gfc_trans_oacc_directive (gfc_code
*code
)
7924 case EXEC_OACC_PARALLEL_LOOP
:
7925 case EXEC_OACC_KERNELS_LOOP
:
7926 case EXEC_OACC_SERIAL_LOOP
:
7927 return gfc_trans_oacc_combined_directive (code
);
7928 case EXEC_OACC_PARALLEL
:
7929 case EXEC_OACC_KERNELS
:
7930 case EXEC_OACC_SERIAL
:
7931 case EXEC_OACC_DATA
:
7932 case EXEC_OACC_HOST_DATA
:
7933 return gfc_trans_oacc_construct (code
);
7934 case EXEC_OACC_LOOP
:
7935 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
7937 case EXEC_OACC_UPDATE
:
7938 case EXEC_OACC_CACHE
:
7939 case EXEC_OACC_ENTER_DATA
:
7940 case EXEC_OACC_EXIT_DATA
:
7941 return gfc_trans_oacc_executable_directive (code
);
7942 case EXEC_OACC_WAIT
:
7943 return gfc_trans_oacc_wait_directive (code
);
7944 case EXEC_OACC_ATOMIC
:
7945 return gfc_trans_omp_atomic (code
);
7946 case EXEC_OACC_DECLARE
:
7947 return gfc_trans_oacc_declare (code
);
7954 gfc_trans_omp_directive (gfc_code
*code
)
7958 case EXEC_OMP_ALLOCATE
:
7959 case EXEC_OMP_ALLOCATORS
:
7960 sorry ("%<!$OMP %s%> not yet supported",
7961 code
->op
== EXEC_OMP_ALLOCATE
? "ALLOCATE" : "ALLOCATORS");
7963 case EXEC_OMP_ASSUME
:
7964 return gfc_trans_omp_assume (code
);
7965 case EXEC_OMP_ATOMIC
:
7966 return gfc_trans_omp_atomic (code
);
7967 case EXEC_OMP_BARRIER
:
7968 return gfc_trans_omp_barrier ();
7969 case EXEC_OMP_CANCEL
:
7970 return gfc_trans_omp_cancel (code
);
7971 case EXEC_OMP_CANCELLATION_POINT
:
7972 return gfc_trans_omp_cancellation_point (code
);
7973 case EXEC_OMP_CRITICAL
:
7974 return gfc_trans_omp_critical (code
);
7975 case EXEC_OMP_DEPOBJ
:
7976 return gfc_trans_omp_depobj (code
);
7977 case EXEC_OMP_DISTRIBUTE
:
7981 case EXEC_OMP_TASKLOOP
:
7982 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
7984 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
7985 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
7986 case EXEC_OMP_DISTRIBUTE_SIMD
:
7987 return gfc_trans_omp_distribute (code
, NULL
);
7988 case EXEC_OMP_DO_SIMD
:
7989 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
7990 case EXEC_OMP_ERROR
:
7991 return gfc_trans_omp_error (code
);
7992 case EXEC_OMP_FLUSH
:
7993 return gfc_trans_omp_flush (code
);
7994 case EXEC_OMP_MASKED
:
7995 return gfc_trans_omp_masked (code
, NULL
);
7996 case EXEC_OMP_MASTER
:
7997 return gfc_trans_omp_master (code
);
7998 case EXEC_OMP_MASKED_TASKLOOP
:
7999 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
8000 case EXEC_OMP_MASTER_TASKLOOP
:
8001 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
8002 return gfc_trans_omp_master_masked_taskloop (code
, code
->op
);
8003 case EXEC_OMP_ORDERED
:
8004 return gfc_trans_omp_ordered (code
);
8005 case EXEC_OMP_PARALLEL
:
8006 return gfc_trans_omp_parallel (code
);
8007 case EXEC_OMP_PARALLEL_DO
:
8008 return gfc_trans_omp_parallel_do (code
, false, NULL
, NULL
);
8009 case EXEC_OMP_PARALLEL_LOOP
:
8010 return gfc_trans_omp_parallel_do (code
, true, NULL
, NULL
);
8011 case EXEC_OMP_PARALLEL_DO_SIMD
:
8012 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
8013 case EXEC_OMP_PARALLEL_MASKED
:
8014 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
8015 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
8016 case EXEC_OMP_PARALLEL_MASTER
:
8017 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
8018 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
8019 return gfc_trans_omp_parallel_master_masked (code
);
8020 case EXEC_OMP_PARALLEL_SECTIONS
:
8021 return gfc_trans_omp_parallel_sections (code
);
8022 case EXEC_OMP_PARALLEL_WORKSHARE
:
8023 return gfc_trans_omp_parallel_workshare (code
);
8024 case EXEC_OMP_SCOPE
:
8025 return gfc_trans_omp_scope (code
);
8026 case EXEC_OMP_SECTIONS
:
8027 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
8028 case EXEC_OMP_SINGLE
:
8029 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
8030 case EXEC_OMP_TARGET
:
8031 case EXEC_OMP_TARGET_PARALLEL
:
8032 case EXEC_OMP_TARGET_PARALLEL_DO
:
8033 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
8034 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
8035 case EXEC_OMP_TARGET_SIMD
:
8036 case EXEC_OMP_TARGET_TEAMS
:
8037 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
8038 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
8039 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
8040 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
8041 case EXEC_OMP_TARGET_TEAMS_LOOP
:
8042 return gfc_trans_omp_target (code
);
8043 case EXEC_OMP_TARGET_DATA
:
8044 return gfc_trans_omp_target_data (code
);
8045 case EXEC_OMP_TARGET_ENTER_DATA
:
8046 return gfc_trans_omp_target_enter_data (code
);
8047 case EXEC_OMP_TARGET_EXIT_DATA
:
8048 return gfc_trans_omp_target_exit_data (code
);
8049 case EXEC_OMP_TARGET_UPDATE
:
8050 return gfc_trans_omp_target_update (code
);
8052 return gfc_trans_omp_task (code
);
8053 case EXEC_OMP_TASKGROUP
:
8054 return gfc_trans_omp_taskgroup (code
);
8055 case EXEC_OMP_TASKLOOP_SIMD
:
8056 return gfc_trans_omp_taskloop (code
, code
->op
);
8057 case EXEC_OMP_TASKWAIT
:
8058 return gfc_trans_omp_taskwait (code
);
8059 case EXEC_OMP_TASKYIELD
:
8060 return gfc_trans_omp_taskyield ();
8061 case EXEC_OMP_TEAMS
:
8062 case EXEC_OMP_TEAMS_DISTRIBUTE
:
8063 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
8064 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
8065 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
8066 case EXEC_OMP_TEAMS_LOOP
:
8067 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
8068 case EXEC_OMP_WORKSHARE
:
8069 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
8076 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
8081 gfc_omp_declare_simd
*ods
;
8082 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
8084 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
8085 tree fndecl
= ns
->proc_name
->backend_decl
;
8087 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
8088 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
8089 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
8090 DECL_ATTRIBUTES (fndecl
) = c
;
8095 gfc_trans_omp_declare_variant (gfc_namespace
*ns
)
8097 tree base_fn_decl
= ns
->proc_name
->backend_decl
;
8098 gfc_namespace
*search_ns
= ns
;
8099 gfc_omp_declare_variant
*next
;
8101 for (gfc_omp_declare_variant
*odv
= search_ns
->omp_declare_variant
;
8102 search_ns
; odv
= next
)
8104 /* Look in the parent namespace if there are no more directives in the
8105 current namespace. */
8108 search_ns
= search_ns
->parent
;
8110 next
= search_ns
->omp_declare_variant
;
8119 /* Check directive the first time it is encountered. */
8120 bool error_found
= true;
8123 error_found
= false;
8124 if (odv
->base_proc_symtree
== NULL
)
8126 if (!search_ns
->proc_name
->attr
.function
8127 && !search_ns
->proc_name
->attr
.subroutine
)
8128 gfc_error ("The base name for 'declare variant' must be "
8129 "specified at %L ", &odv
->where
);
8131 error_found
= false;
8135 if (!search_ns
->contained
8136 && strcmp (odv
->base_proc_symtree
->name
,
8137 ns
->proc_name
->name
))
8138 gfc_error ("The base name at %L does not match the name of the "
8139 "current procedure", &odv
->where
);
8140 else if (odv
->base_proc_symtree
->n
.sym
->attr
.entry
)
8141 gfc_error ("The base name at %L must not be an entry name",
8143 else if (odv
->base_proc_symtree
->n
.sym
->attr
.generic
)
8144 gfc_error ("The base name at %L must not be a generic name",
8146 else if (odv
->base_proc_symtree
->n
.sym
->attr
.proc_pointer
)
8147 gfc_error ("The base name at %L must not be a procedure pointer",
8149 else if (odv
->base_proc_symtree
->n
.sym
->attr
.implicit_type
)
8150 gfc_error ("The base procedure at %L must have an explicit "
8151 "interface", &odv
->where
);
8153 error_found
= false;
8156 odv
->checked_p
= true;
8159 odv
->error_p
= true;
8163 /* Ignore directives that do not apply to the current procedure. */
8164 if ((odv
->base_proc_symtree
== NULL
&& search_ns
!= ns
)
8165 || (odv
->base_proc_symtree
!= NULL
8166 && strcmp (odv
->base_proc_symtree
->name
, ns
->proc_name
->name
)))
8169 tree set_selectors
= NULL_TREE
;
8170 gfc_omp_set_selector
*oss
;
8172 for (oss
= odv
->set_selectors
; oss
; oss
= oss
->next
)
8174 tree selectors
= NULL_TREE
;
8175 gfc_omp_selector
*os
;
8176 for (os
= oss
->trait_selectors
; os
; os
= os
->next
)
8178 tree properties
= NULL_TREE
;
8179 gfc_omp_trait_property
*otp
;
8181 for (otp
= os
->properties
; otp
; otp
= otp
->next
)
8183 switch (otp
->property_kind
)
8185 case CTX_PROPERTY_USER
:
8186 case CTX_PROPERTY_EXPR
:
8189 gfc_init_se (&se
, NULL
);
8190 gfc_conv_expr (&se
, otp
->expr
);
8191 properties
= tree_cons (NULL_TREE
, se
.expr
,
8195 case CTX_PROPERTY_ID
:
8196 properties
= tree_cons (get_identifier (otp
->name
),
8197 NULL_TREE
, properties
);
8199 case CTX_PROPERTY_NAME_LIST
:
8201 tree prop
= NULL_TREE
, value
= NULL_TREE
;
8203 prop
= get_identifier (otp
->name
);
8205 value
= gfc_conv_constant_to_tree (otp
->expr
);
8207 properties
= tree_cons (prop
, value
, properties
);
8210 case CTX_PROPERTY_SIMD
:
8211 properties
= gfc_trans_omp_clauses (NULL
, otp
->clauses
,
8222 gfc_init_se (&se
, NULL
);
8223 gfc_conv_expr (&se
, os
->score
);
8224 properties
= tree_cons (get_identifier (" score"),
8225 se
.expr
, properties
);
8228 selectors
= tree_cons (get_identifier (os
->trait_selector_name
),
8229 properties
, selectors
);
8233 = tree_cons (get_identifier (oss
->trait_set_selector_name
),
8234 selectors
, set_selectors
);
8237 const char *variant_proc_name
= odv
->variant_proc_symtree
->name
;
8238 gfc_symbol
*variant_proc_sym
= odv
->variant_proc_symtree
->n
.sym
;
8239 if (variant_proc_sym
== NULL
|| variant_proc_sym
->attr
.implicit_type
)
8241 gfc_symtree
*proc_st
;
8242 gfc_find_sym_tree (variant_proc_name
, gfc_current_ns
, 1, &proc_st
);
8243 variant_proc_sym
= proc_st
->n
.sym
;
8245 if (variant_proc_sym
== NULL
)
8247 gfc_error ("Cannot find symbol %qs", variant_proc_name
);
8250 set_selectors
= omp_check_context_selector
8251 (gfc_get_location (&odv
->where
), set_selectors
);
8252 if (set_selectors
!= error_mark_node
)
8254 if (!variant_proc_sym
->attr
.implicit_type
8255 && !variant_proc_sym
->attr
.subroutine
8256 && !variant_proc_sym
->attr
.function
)
8258 gfc_error ("variant %qs at %L is not a function or subroutine",
8259 variant_proc_name
, &odv
->where
);
8260 variant_proc_sym
= NULL
;
8262 else if (omp_get_context_selector (set_selectors
, "construct",
8263 "simd") == NULL_TREE
)
8266 if (!gfc_compare_interfaces (ns
->proc_name
, variant_proc_sym
,
8267 variant_proc_sym
->name
, 0, 1,
8268 err
, sizeof (err
), NULL
, NULL
))
8270 gfc_error ("variant %qs and base %qs at %L have "
8271 "incompatible types: %s",
8272 variant_proc_name
, ns
->proc_name
->name
,
8274 variant_proc_sym
= NULL
;
8277 if (variant_proc_sym
!= NULL
)
8279 gfc_set_sym_referenced (variant_proc_sym
);
8280 tree construct
= omp_get_context_selector (set_selectors
,
8282 omp_mark_declare_variant (gfc_get_location (&odv
->where
),
8283 gfc_get_symbol_decl (variant_proc_sym
),
8285 if (omp_context_selector_matches (set_selectors
))
8287 tree id
= get_identifier ("omp declare variant base");
8288 tree variant
= gfc_get_symbol_decl (variant_proc_sym
);
8289 DECL_ATTRIBUTES (base_fn_decl
)
8290 = tree_cons (id
, build_tree_list (variant
, set_selectors
),
8291 DECL_ATTRIBUTES (base_fn_decl
));