1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "constructor.h"
39 #include "gomp-constants.h"
40 #include "omp-general.h"
42 #include "memmodel.h" /* For MEMMODEL_ enums. */
45 #define GCC_DIAG_STYLE __gcc_tdiag__
46 #include "diagnostic-core.h"
48 #define GCC_DIAG_STYLE __gcc_gfc__
54 /* True if OpenMP should regard this DECL as being a scalar which has Fortran's
55 allocatable or pointer attribute. */
58 gfc_omp_is_allocatable_or_ptr (const_tree decl
)
61 && (GFC_DECL_GET_SCALAR_POINTER (decl
)
62 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)));
65 /* True if the argument is an optional argument; except that false is also
66 returned for arguments with the value attribute (nonpointers) and for
67 assumed-shape variables (decl is a local variable containing arg->data).
68 Note that for 'procedure(), optional' the value false is used as that's
69 always a pointer and no additional indirection is used.
70 Note that pvoid_type_node is for 'type(c_ptr), value' (and c_funloc). */
73 gfc_omp_is_optional_argument (const_tree decl
)
75 return (TREE_CODE (decl
) == PARM_DECL
76 && DECL_LANG_SPECIFIC (decl
)
77 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
78 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
79 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) != FUNCTION_TYPE
80 && GFC_DECL_OPTIONAL_ARGUMENT (decl
));
83 /* Check whether this DECL belongs to a Fortran optional argument.
84 With 'for_present_check' set to false, decls which are optional parameters
85 themselve are returned as tree - or a NULL_TREE otherwise. Those decls are
86 always pointers. With 'for_present_check' set to true, the decl for checking
87 whether an argument is present is returned; for arguments with value
88 attribute this is the hidden argument and of BOOLEAN_TYPE. If the decl is
89 unrelated to optional arguments, NULL_TREE is returned. */
92 gfc_omp_check_optional_argument (tree decl
, bool for_present_check
)
94 if (!for_present_check
)
95 return gfc_omp_is_optional_argument (decl
) ? decl
: NULL_TREE
;
97 if (!DECL_LANG_SPECIFIC (decl
))
100 tree orig_decl
= decl
;
102 /* For assumed-shape arrays, a local decl with arg->data is used. */
103 if (TREE_CODE (decl
) != PARM_DECL
104 && (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
105 || GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))))
106 decl
= GFC_DECL_SAVED_DESCRIPTOR (decl
);
108 if (decl
== NULL_TREE
109 || TREE_CODE (decl
) != PARM_DECL
110 || !DECL_LANG_SPECIFIC (decl
)
111 || !GFC_DECL_OPTIONAL_ARGUMENT (decl
))
114 /* Scalars with VALUE attribute which are passed by value use a hidden
115 argument to denote the present status. They are passed as nonpointer type
116 with one exception: 'type(c_ptr), value' as 'void*'. */
117 /* Cf. trans-expr.c's gfc_conv_expr_present. */
118 if (TREE_CODE (TREE_TYPE (decl
)) != POINTER_TYPE
119 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
121 char name
[GFC_MAX_SYMBOL_LEN
+ 2];
125 strcpy (&name
[1], IDENTIFIER_POINTER (DECL_NAME (decl
)));
126 tree_name
= get_identifier (name
);
128 /* Walk function argument list to find the hidden arg. */
129 decl
= DECL_ARGUMENTS (DECL_CONTEXT (decl
));
130 for ( ; decl
!= NULL_TREE
; decl
= TREE_CHAIN (decl
))
131 if (DECL_NAME (decl
) == tree_name
132 && DECL_ARTIFICIAL (decl
))
139 return fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
140 orig_decl
, null_pointer_node
);
144 /* Returns tree with NULL if it is not an array descriptor and with the tree to
145 access the 'data' component otherwise. With type_only = true, it returns the
146 TREE_TYPE without creating a new tree. */
149 gfc_omp_array_data (tree decl
, bool type_only
)
151 tree type
= TREE_TYPE (decl
);
153 if (POINTER_TYPE_P (type
))
154 type
= TREE_TYPE (type
);
156 if (!GFC_DESCRIPTOR_TYPE_P (type
))
160 return GFC_TYPE_ARRAY_DATAPTR_TYPE (type
);
162 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
163 decl
= build_fold_indirect_ref (decl
);
165 decl
= gfc_conv_descriptor_data_get (decl
);
170 /* True if OpenMP should privatize what this DECL points to rather
171 than the DECL itself. */
174 gfc_omp_privatize_by_reference (const_tree decl
)
176 tree type
= TREE_TYPE (decl
);
178 if (TREE_CODE (type
) == REFERENCE_TYPE
179 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
182 if (TREE_CODE (type
) == POINTER_TYPE
183 && gfc_omp_is_optional_argument (decl
))
186 if (TREE_CODE (type
) == POINTER_TYPE
)
188 while (TREE_CODE (decl
) == COMPONENT_REF
)
189 decl
= TREE_OPERAND (decl
, 1);
191 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
192 that have POINTER_TYPE type and aren't scalar pointers, scalar
193 allocatables, Cray pointees or C pointers are supposed to be
194 privatized by reference. */
195 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
196 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
197 || GFC_DECL_CRAY_POINTEE (decl
)
198 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
199 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
202 if (!DECL_ARTIFICIAL (decl
)
203 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
206 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
208 if (DECL_LANG_SPECIFIC (decl
)
209 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
216 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
217 of DECL is predetermined. */
219 enum omp_clause_default_kind
220 gfc_omp_predetermined_sharing (tree decl
)
222 /* Associate names preserve the association established during ASSOCIATE.
223 As they are implemented either as pointers to the selector or array
224 descriptor and shouldn't really change in the ASSOCIATE region,
225 this decl can be either shared or firstprivate. If it is a pointer,
226 use firstprivate, as it is cheaper that way, otherwise make it shared. */
227 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
229 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
230 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
232 return OMP_CLAUSE_DEFAULT_SHARED
;
235 if (DECL_ARTIFICIAL (decl
)
236 && ! GFC_DECL_RESULT (decl
)
237 && ! (DECL_LANG_SPECIFIC (decl
)
238 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
239 return OMP_CLAUSE_DEFAULT_SHARED
;
241 /* Cray pointees shouldn't be listed in any clauses and should be
242 gimplified to dereference of the corresponding Cray pointer.
243 Make them all private, so that they are emitted in the debug
245 if (GFC_DECL_CRAY_POINTEE (decl
))
246 return OMP_CLAUSE_DEFAULT_PRIVATE
;
248 /* Assumed-size arrays are predetermined shared. */
249 if (TREE_CODE (decl
) == PARM_DECL
250 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
251 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
252 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
253 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
255 return OMP_CLAUSE_DEFAULT_SHARED
;
257 /* Dummy procedures aren't considered variables by OpenMP, thus are
258 disallowed in OpenMP clauses. They are represented as PARM_DECLs
259 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
260 to avoid complaining about their uses with default(none). */
261 if (TREE_CODE (decl
) == PARM_DECL
262 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
263 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
264 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
266 /* COMMON and EQUIVALENCE decls are shared. They
267 are only referenced through DECL_VALUE_EXPR of the variables
268 contained in them. If those are privatized, they will not be
269 gimplified to the COMMON or EQUIVALENCE decls. */
270 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
271 return OMP_CLAUSE_DEFAULT_SHARED
;
273 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
274 return OMP_CLAUSE_DEFAULT_SHARED
;
276 /* These are either array or derived parameters, or vtables.
277 In the former cases, the OpenMP standard doesn't consider them to be
278 variables at all (they can't be redefined), but they can nevertheless appear
279 in parallel/task regions and for default(none) purposes treat them as shared.
280 For vtables likely the same handling is desirable. */
281 if (VAR_P (decl
) && TREE_READONLY (decl
)
282 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
283 return OMP_CLAUSE_DEFAULT_SHARED
;
285 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
289 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
290 of DECL is predetermined. */
292 enum omp_clause_defaultmap_kind
293 gfc_omp_predetermined_mapping (tree decl
)
295 if (DECL_ARTIFICIAL (decl
)
296 && ! GFC_DECL_RESULT (decl
)
297 && ! (DECL_LANG_SPECIFIC (decl
)
298 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
299 return OMP_CLAUSE_DEFAULTMAP_TO
;
301 /* These are either array or derived parameters, or vtables. */
302 if (VAR_P (decl
) && TREE_READONLY (decl
)
303 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
304 return OMP_CLAUSE_DEFAULTMAP_TO
;
306 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
310 /* Return decl that should be used when reporting DEFAULT(NONE)
314 gfc_omp_report_decl (tree decl
)
316 if (DECL_ARTIFICIAL (decl
)
317 && DECL_LANG_SPECIFIC (decl
)
318 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
319 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
324 /* Return true if TYPE has any allocatable components. */
327 gfc_has_alloc_comps (tree type
, tree decl
)
331 if (POINTER_TYPE_P (type
))
333 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
334 type
= TREE_TYPE (type
);
335 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
339 if (GFC_DESCRIPTOR_TYPE_P (type
)
340 && (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
341 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
))
344 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
345 type
= gfc_get_element_type (type
);
347 if (TREE_CODE (type
) != RECORD_TYPE
)
350 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
352 ftype
= TREE_TYPE (field
);
353 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
355 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
356 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
358 if (gfc_has_alloc_comps (ftype
, field
))
364 /* Return true if TYPE is polymorphic but not with pointer attribute. */
367 gfc_is_polymorphic_nonptr (tree type
)
369 if (POINTER_TYPE_P (type
))
370 type
= TREE_TYPE (type
);
371 return GFC_CLASS_TYPE_P (type
);
374 /* Return true if TYPE is unlimited polymorphic but not with pointer attribute;
375 unlimited means also intrinsic types are handled and _len is used. */
378 gfc_is_unlimited_polymorphic_nonptr (tree type
)
380 if (POINTER_TYPE_P (type
))
381 type
= TREE_TYPE (type
);
382 if (!GFC_CLASS_TYPE_P (type
))
385 tree field
= TYPE_FIELDS (type
); /* _data */
387 field
= DECL_CHAIN (field
); /* _vptr */
389 field
= DECL_CHAIN (field
);
392 gcc_assert (strcmp ("_len", IDENTIFIER_POINTER (DECL_NAME (field
))) == 0);
396 /* Return true if the DECL is for an allocatable array or scalar. */
399 gfc_omp_allocatable_p (tree decl
)
404 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
407 tree type
= TREE_TYPE (decl
);
408 if (gfc_omp_privatize_by_reference (decl
))
409 type
= TREE_TYPE (type
);
411 if (GFC_DESCRIPTOR_TYPE_P (type
)
412 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
419 /* Return true if DECL in private clause needs
420 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
422 gfc_omp_private_outer_ref (tree decl
)
424 tree type
= TREE_TYPE (decl
);
426 if (gfc_omp_privatize_by_reference (decl
))
427 type
= TREE_TYPE (type
);
429 if (GFC_DESCRIPTOR_TYPE_P (type
)
430 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
433 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
436 if (gfc_has_alloc_comps (type
, decl
))
442 /* Callback for gfc_omp_unshare_expr. */
445 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
448 enum tree_code code
= TREE_CODE (t
);
450 /* Stop at types, decls, constants like copy_tree_r. */
451 if (TREE_CODE_CLASS (code
) == tcc_type
452 || TREE_CODE_CLASS (code
) == tcc_declaration
453 || TREE_CODE_CLASS (code
) == tcc_constant
456 else if (handled_component_p (t
)
457 || TREE_CODE (t
) == MEM_REF
)
459 *tp
= unshare_expr (t
);
466 /* Unshare in expr anything that the FE which normally doesn't
467 care much about tree sharing (because during gimplification
468 everything is unshared) could cause problems with tree sharing
469 at omp-low.c time. */
472 gfc_omp_unshare_expr (tree expr
)
474 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
478 enum walk_alloc_comps
480 WALK_ALLOC_COMPS_DTOR
,
481 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
482 WALK_ALLOC_COMPS_COPY_CTOR
485 /* Handle allocatable components in OpenMP clauses. */
488 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
489 enum walk_alloc_comps kind
)
491 stmtblock_t block
, tmpblock
;
492 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
493 gfc_init_block (&block
);
495 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
497 if (GFC_DESCRIPTOR_TYPE_P (type
))
499 gfc_init_block (&tmpblock
);
500 tem
= gfc_full_array_size (&tmpblock
, decl
,
501 GFC_TYPE_ARRAY_RANK (type
));
502 then_b
= gfc_finish_block (&tmpblock
);
503 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
504 tem
= gfc_omp_unshare_expr (tem
);
505 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
506 gfc_array_index_type
, tem
,
511 bool compute_nelts
= false;
512 if (!TYPE_DOMAIN (type
)
513 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
514 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
515 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
516 compute_nelts
= true;
517 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
519 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
520 if (lookup_attribute ("omp dummy var", a
))
521 compute_nelts
= true;
525 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
526 TYPE_SIZE_UNIT (type
),
527 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
528 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
531 tem
= array_type_nelts (type
);
532 tem
= fold_convert (gfc_array_index_type
, tem
);
535 tree nelems
= gfc_evaluate_now (tem
, &block
);
536 tree index
= gfc_create_var (gfc_array_index_type
, "S");
538 gfc_init_block (&tmpblock
);
539 tem
= gfc_conv_array_data (decl
);
540 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
541 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
542 tree destvar
, destvref
= NULL_TREE
;
545 tem
= gfc_conv_array_data (dest
);
546 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
547 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
549 gfc_add_expr_to_block (&tmpblock
,
550 gfc_walk_alloc_comps (declvref
, destvref
,
554 gfc_init_loopinfo (&loop
);
556 loop
.from
[0] = gfc_index_zero_node
;
557 loop
.loopvar
[0] = index
;
559 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
560 gfc_add_block_to_block (&block
, &loop
.pre
);
561 return gfc_finish_block (&block
);
563 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
565 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
567 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
568 type
= TREE_TYPE (decl
);
571 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
572 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
574 tree ftype
= TREE_TYPE (field
);
575 tree declf
, destf
= NULL_TREE
;
576 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
577 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
578 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
579 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
582 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
583 decl
, field
, NULL_TREE
);
585 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
586 dest
, field
, NULL_TREE
);
591 case WALK_ALLOC_COMPS_DTOR
:
593 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
594 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
595 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
597 gfc_add_modify (&block
, unshare_expr (destf
),
598 unshare_expr (declf
));
599 tem
= gfc_duplicate_allocatable_nocopy
600 (destf
, declf
, ftype
,
601 GFC_TYPE_ARRAY_RANK (ftype
));
603 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
604 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
606 case WALK_ALLOC_COMPS_COPY_CTOR
:
607 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
608 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
609 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
610 GFC_TYPE_ARRAY_RANK (ftype
),
612 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
613 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
618 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
621 gfc_init_block (&tmpblock
);
622 gfc_add_expr_to_block (&tmpblock
,
623 gfc_walk_alloc_comps (declf
, destf
,
625 then_b
= gfc_finish_block (&tmpblock
);
626 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
627 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
628 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
629 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
630 tem
= unshare_expr (declf
);
635 tem
= fold_convert (pvoid_type_node
, tem
);
636 tem
= fold_build2_loc (input_location
, NE_EXPR
,
637 logical_type_node
, tem
,
639 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
641 build_empty_stmt (input_location
));
643 gfc_add_expr_to_block (&block
, then_b
);
645 if (kind
== WALK_ALLOC_COMPS_DTOR
)
647 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
648 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
650 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
651 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
652 NULL_TREE
, NULL_TREE
, true,
654 GFC_CAF_COARRAY_NOCOARRAY
);
655 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
657 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
659 tem
= gfc_call_free (unshare_expr (declf
));
660 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
665 return gfc_finish_block (&block
);
668 /* Return code to initialize DECL with its default constructor, or
669 NULL if there's nothing to do. */
672 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
674 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
675 stmtblock_t block
, cond_block
;
677 switch (OMP_CLAUSE_CODE (clause
))
679 case OMP_CLAUSE__LOOPTEMP_
:
680 case OMP_CLAUSE__REDUCTEMP_
:
681 case OMP_CLAUSE__CONDTEMP_
:
682 case OMP_CLAUSE__SCANTEMP_
:
684 case OMP_CLAUSE_PRIVATE
:
685 case OMP_CLAUSE_LASTPRIVATE
:
686 case OMP_CLAUSE_LINEAR
:
687 case OMP_CLAUSE_REDUCTION
:
688 case OMP_CLAUSE_IN_REDUCTION
:
689 case OMP_CLAUSE_TASK_REDUCTION
:
695 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
696 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
697 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
698 || !POINTER_TYPE_P (type
)))
700 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
703 gfc_start_block (&block
);
704 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
705 OMP_CLAUSE_DECL (clause
),
706 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
707 gfc_add_expr_to_block (&block
, tem
);
708 return gfc_finish_block (&block
);
713 gcc_assert (outer
!= NULL_TREE
);
715 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
716 "not currently allocated" allocation status if outer
717 array is "not currently allocated", otherwise should be allocated. */
718 gfc_start_block (&block
);
720 gfc_init_block (&cond_block
);
722 if (GFC_DESCRIPTOR_TYPE_P (type
))
724 gfc_add_modify (&cond_block
, decl
, outer
);
725 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
726 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
727 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
729 gfc_conv_descriptor_lbound_get (decl
, rank
));
730 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
731 size
, gfc_index_one_node
);
732 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
733 size
= fold_build2_loc (input_location
, MULT_EXPR
,
734 gfc_array_index_type
, size
,
735 gfc_conv_descriptor_stride_get (decl
, rank
));
736 tree esize
= fold_convert (gfc_array_index_type
,
737 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
738 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
740 size
= unshare_expr (size
);
741 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
745 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
746 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
747 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
748 if (GFC_DESCRIPTOR_TYPE_P (type
))
749 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
751 gfc_add_modify (&cond_block
, unshare_expr (decl
),
752 fold_convert (TREE_TYPE (decl
), ptr
));
753 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
755 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
756 OMP_CLAUSE_DECL (clause
),
757 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
758 gfc_add_expr_to_block (&cond_block
, tem
);
760 then_b
= gfc_finish_block (&cond_block
);
762 /* Reduction clause requires allocated ALLOCATABLE. */
763 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
764 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_IN_REDUCTION
765 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_TASK_REDUCTION
)
767 gfc_init_block (&cond_block
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
769 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
772 gfc_add_modify (&cond_block
, unshare_expr (decl
),
773 build_zero_cst (TREE_TYPE (decl
)));
774 else_b
= gfc_finish_block (&cond_block
);
776 tree tem
= fold_convert (pvoid_type_node
,
777 GFC_DESCRIPTOR_TYPE_P (type
)
778 ? gfc_conv_descriptor_data_get (outer
) : outer
);
779 tem
= unshare_expr (tem
);
780 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
781 tem
, null_pointer_node
);
782 gfc_add_expr_to_block (&block
,
783 build3_loc (input_location
, COND_EXPR
,
784 void_type_node
, cond
, then_b
,
786 /* Avoid -W*uninitialized warnings. */
788 suppress_warning (decl
, OPT_Wuninitialized
);
791 gfc_add_expr_to_block (&block
, then_b
);
793 return gfc_finish_block (&block
);
796 /* Build and return code for a copy constructor from SRC to DEST. */
799 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
801 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
802 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
803 tree cond
, then_b
, else_b
;
804 stmtblock_t block
, cond_block
;
806 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
807 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
809 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
810 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
811 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
813 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
815 if (gfc_is_polymorphic_nonptr (decl_type
))
817 if (POINTER_TYPE_P (decl_type
))
818 decl_type
= TREE_TYPE (decl_type
);
819 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
820 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
821 fatal_error (input_location
,
822 "Sorry, polymorphic arrays not yet supported for "
825 tree nelems
= build_int_cst (size_type_node
, 1); /* Scalar. */
826 tree src_data
= gfc_class_data_get (unshare_expr (src
));
827 tree dest_data
= gfc_class_data_get (unshare_expr (dest
));
828 bool unlimited
= gfc_is_unlimited_polymorphic_nonptr (type
);
830 gfc_start_block (&block
);
831 gfc_add_modify (&block
, gfc_class_vptr_get (dest
),
832 gfc_class_vptr_get (src
));
833 gfc_init_block (&cond_block
);
837 src_len
= gfc_class_len_get (src
);
838 gfc_add_modify (&cond_block
, gfc_class_len_get (unshare_expr (dest
)), src_len
);
841 /* Use: size = class._vtab._size * (class._len > 0 ? class._len : 1). */
842 size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (src
));
845 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
846 unshare_expr (src_len
),
847 build_zero_cst (TREE_TYPE (src_len
)));
848 cond
= build3_loc (input_location
, COND_EXPR
, size_type_node
, cond
,
849 fold_convert (size_type_node
,
850 unshare_expr (src_len
)),
851 build_int_cst (size_type_node
, 1));
852 size
= fold_build2_loc (input_location
, MULT_EXPR
, size_type_node
,
856 /* Malloc memory + call class->_vpt->_copy. */
857 call
= builtin_decl_explicit (BUILT_IN_MALLOC
);
858 call
= build_call_expr_loc (input_location
, call
, 1, size
);
859 gfc_add_modify (&cond_block
, dest_data
,
860 fold_convert (TREE_TYPE (dest_data
), call
));
861 gfc_add_expr_to_block (&cond_block
,
862 gfc_copy_class_to_class (src
, dest
, nelems
,
865 gcc_assert (TREE_CODE (dest_data
) == COMPONENT_REF
);
866 if (!GFC_DECL_GET_SCALAR_ALLOCATABLE (TREE_OPERAND (dest_data
, 1)))
868 gfc_add_block_to_block (&block
, &cond_block
);
872 /* Create: if (class._data != 0) <cond_block> else class._data = NULL; */
873 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
874 src_data
, null_pointer_node
);
875 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
876 void_type_node
, cond
,
877 gfc_finish_block (&cond_block
),
878 fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
879 unshare_expr (dest_data
), null_pointer_node
)));
881 return gfc_finish_block (&block
);
884 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
885 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
886 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
887 || !POINTER_TYPE_P (type
)))
889 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
891 gfc_start_block (&block
);
892 gfc_add_modify (&block
, dest
, src
);
893 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
894 WALK_ALLOC_COMPS_COPY_CTOR
);
895 gfc_add_expr_to_block (&block
, tem
);
896 return gfc_finish_block (&block
);
899 return build2_v (MODIFY_EXPR
, dest
, src
);
902 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
903 and copied from SRC. */
904 gfc_start_block (&block
);
906 gfc_init_block (&cond_block
);
908 gfc_add_modify (&cond_block
, dest
, fold_convert (TREE_TYPE (dest
), src
));
909 if (GFC_DESCRIPTOR_TYPE_P (type
))
911 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
912 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
913 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
915 gfc_conv_descriptor_lbound_get (dest
, rank
));
916 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
917 size
, gfc_index_one_node
);
918 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
919 size
= fold_build2_loc (input_location
, MULT_EXPR
,
920 gfc_array_index_type
, size
,
921 gfc_conv_descriptor_stride_get (dest
, rank
));
922 tree esize
= fold_convert (gfc_array_index_type
,
923 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
924 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
926 size
= unshare_expr (size
);
927 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
931 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
932 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
933 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
934 if (GFC_DESCRIPTOR_TYPE_P (type
))
935 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
937 gfc_add_modify (&cond_block
, unshare_expr (dest
),
938 fold_convert (TREE_TYPE (dest
), ptr
));
940 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
941 ? gfc_conv_descriptor_data_get (src
) : src
;
942 srcptr
= unshare_expr (srcptr
);
943 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
944 call
= build_call_expr_loc (input_location
,
945 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
947 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
948 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
950 tree tem
= gfc_walk_alloc_comps (src
, dest
,
951 OMP_CLAUSE_DECL (clause
),
952 WALK_ALLOC_COMPS_COPY_CTOR
);
953 gfc_add_expr_to_block (&cond_block
, tem
);
955 then_b
= gfc_finish_block (&cond_block
);
957 gfc_init_block (&cond_block
);
958 if (GFC_DESCRIPTOR_TYPE_P (type
))
959 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
962 gfc_add_modify (&cond_block
, unshare_expr (dest
),
963 build_zero_cst (TREE_TYPE (dest
)));
964 else_b
= gfc_finish_block (&cond_block
);
966 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
967 unshare_expr (srcptr
), null_pointer_node
);
968 gfc_add_expr_to_block (&block
,
969 build3_loc (input_location
, COND_EXPR
,
970 void_type_node
, cond
, then_b
, else_b
));
971 /* Avoid -W*uninitialized warnings. */
973 suppress_warning (dest
, OPT_Wuninitialized
);
975 return gfc_finish_block (&block
);
978 /* Similarly, except use an intrinsic or pointer assignment operator
982 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
984 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
985 tree cond
, then_b
, else_b
;
986 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
988 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
989 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
990 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
991 || !POINTER_TYPE_P (type
)))
993 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 gfc_start_block (&block
);
996 /* First dealloc any allocatable components in DEST. */
997 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
998 OMP_CLAUSE_DECL (clause
),
999 WALK_ALLOC_COMPS_DTOR
);
1000 gfc_add_expr_to_block (&block
, tem
);
1001 /* Then copy over toplevel data. */
1002 gfc_add_modify (&block
, dest
, src
);
1003 /* Finally allocate any allocatable components and copy. */
1004 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
1005 WALK_ALLOC_COMPS_COPY_CTOR
);
1006 gfc_add_expr_to_block (&block
, tem
);
1007 return gfc_finish_block (&block
);
1010 return build2_v (MODIFY_EXPR
, dest
, src
);
1013 gfc_start_block (&block
);
1015 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1017 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
1018 WALK_ALLOC_COMPS_DTOR
);
1019 tree tem
= fold_convert (pvoid_type_node
,
1020 GFC_DESCRIPTOR_TYPE_P (type
)
1021 ? gfc_conv_descriptor_data_get (dest
) : dest
);
1022 tem
= unshare_expr (tem
);
1023 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1024 tem
, null_pointer_node
);
1025 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1026 then_b
, build_empty_stmt (input_location
));
1027 gfc_add_expr_to_block (&block
, tem
);
1030 gfc_init_block (&cond_block
);
1032 if (GFC_DESCRIPTOR_TYPE_P (type
))
1034 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1035 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
1036 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1038 gfc_conv_descriptor_lbound_get (src
, rank
));
1039 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1040 size
, gfc_index_one_node
);
1041 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1042 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1043 gfc_array_index_type
, size
,
1044 gfc_conv_descriptor_stride_get (src
, rank
));
1045 tree esize
= fold_convert (gfc_array_index_type
,
1046 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1047 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1049 size
= unshare_expr (size
);
1050 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1054 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1055 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1057 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1058 ? gfc_conv_descriptor_data_get (dest
) : dest
;
1059 destptr
= unshare_expr (destptr
);
1060 destptr
= fold_convert (pvoid_type_node
, destptr
);
1061 gfc_add_modify (&cond_block
, ptr
, destptr
);
1063 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
1064 destptr
, null_pointer_node
);
1066 if (GFC_DESCRIPTOR_TYPE_P (type
))
1069 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
1071 tree rank
= gfc_rank_cst
[i
];
1072 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
1073 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
1074 gfc_array_index_type
, tem
,
1075 gfc_conv_descriptor_lbound_get (src
, rank
));
1076 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
1077 gfc_array_index_type
, tem
,
1078 gfc_conv_descriptor_lbound_get (dest
, rank
));
1079 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1080 tem
, gfc_conv_descriptor_ubound_get (dest
,
1082 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
1083 logical_type_node
, cond
, tem
);
1087 gfc_init_block (&cond_block2
);
1089 if (GFC_DESCRIPTOR_TYPE_P (type
))
1091 gfc_init_block (&inner_block
);
1092 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
1093 then_b
= gfc_finish_block (&inner_block
);
1095 gfc_init_block (&inner_block
);
1096 gfc_add_modify (&inner_block
, ptr
,
1097 gfc_call_realloc (&inner_block
, ptr
, size
));
1098 else_b
= gfc_finish_block (&inner_block
);
1100 gfc_add_expr_to_block (&cond_block2
,
1101 build3_loc (input_location
, COND_EXPR
,
1103 unshare_expr (nonalloc
),
1105 gfc_add_modify (&cond_block2
, dest
, src
);
1106 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
1110 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
1111 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
1112 fold_convert (type
, ptr
));
1114 then_b
= gfc_finish_block (&cond_block2
);
1115 else_b
= build_empty_stmt (input_location
);
1117 gfc_add_expr_to_block (&cond_block
,
1118 build3_loc (input_location
, COND_EXPR
,
1119 void_type_node
, unshare_expr (cond
),
1122 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1123 ? gfc_conv_descriptor_data_get (src
) : src
;
1124 srcptr
= unshare_expr (srcptr
);
1125 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
1126 call
= build_call_expr_loc (input_location
,
1127 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
1129 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1130 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1132 tree tem
= gfc_walk_alloc_comps (src
, dest
,
1133 OMP_CLAUSE_DECL (clause
),
1134 WALK_ALLOC_COMPS_COPY_CTOR
);
1135 gfc_add_expr_to_block (&cond_block
, tem
);
1137 then_b
= gfc_finish_block (&cond_block
);
1139 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
1141 gfc_init_block (&cond_block
);
1142 if (GFC_DESCRIPTOR_TYPE_P (type
))
1144 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
1145 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
1146 NULL_TREE
, NULL_TREE
, true, NULL
,
1147 GFC_CAF_COARRAY_NOCOARRAY
);
1148 gfc_add_expr_to_block (&cond_block
, tmp
);
1152 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
1153 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
1154 gfc_add_modify (&cond_block
, unshare_expr (dest
),
1155 build_zero_cst (TREE_TYPE (dest
)));
1157 else_b
= gfc_finish_block (&cond_block
);
1159 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1160 unshare_expr (srcptr
), null_pointer_node
);
1161 gfc_add_expr_to_block (&block
,
1162 build3_loc (input_location
, COND_EXPR
,
1163 void_type_node
, cond
,
1167 gfc_add_expr_to_block (&block
, then_b
);
1169 return gfc_finish_block (&block
);
1173 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
1174 tree add
, tree nelems
)
1176 stmtblock_t tmpblock
;
1177 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
1178 nelems
= gfc_evaluate_now (nelems
, block
);
1180 gfc_init_block (&tmpblock
);
1181 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
1183 desta
= gfc_build_array_ref (dest
, index
, NULL
);
1184 srca
= gfc_build_array_ref (src
, index
, NULL
);
1188 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
1189 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
1190 fold_convert (sizetype
, index
),
1191 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
1192 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1193 TREE_TYPE (dest
), dest
,
1195 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1196 TREE_TYPE (src
), src
,
1199 gfc_add_modify (&tmpblock
, desta
,
1200 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
1204 gfc_init_loopinfo (&loop
);
1206 loop
.from
[0] = gfc_index_zero_node
;
1207 loop
.loopvar
[0] = index
;
1208 loop
.to
[0] = nelems
;
1209 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
1210 gfc_add_block_to_block (block
, &loop
.pre
);
1213 /* Build and return code for a constructor of DEST that initializes
1214 it to SRC plus ADD (ADD is scalar integer). */
1217 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
1219 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
1222 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
1224 gfc_start_block (&block
);
1225 add
= gfc_evaluate_now (add
, &block
);
1227 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1228 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1229 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1230 || !POINTER_TYPE_P (type
)))
1232 bool compute_nelts
= false;
1233 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
1234 if (!TYPE_DOMAIN (type
)
1235 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
1236 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
1237 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
1238 compute_nelts
= true;
1239 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1241 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
1242 if (lookup_attribute ("omp dummy var", a
))
1243 compute_nelts
= true;
1247 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
1248 TYPE_SIZE_UNIT (type
),
1249 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1250 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
1253 nelems
= array_type_nelts (type
);
1254 nelems
= fold_convert (gfc_array_index_type
, nelems
);
1256 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
1257 return gfc_finish_block (&block
);
1260 /* Allocatable arrays in LINEAR clauses need to be allocated
1261 and copied from SRC. */
1262 gfc_add_modify (&block
, dest
, src
);
1263 if (GFC_DESCRIPTOR_TYPE_P (type
))
1265 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1266 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
1267 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1269 gfc_conv_descriptor_lbound_get (dest
, rank
));
1270 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1271 size
, gfc_index_one_node
);
1272 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1273 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1274 gfc_array_index_type
, size
,
1275 gfc_conv_descriptor_stride_get (dest
, rank
));
1276 tree esize
= fold_convert (gfc_array_index_type
,
1277 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1278 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
1279 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1280 nelems
, unshare_expr (esize
));
1281 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1283 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
1284 gfc_array_index_type
, nelems
,
1285 gfc_index_one_node
);
1288 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1289 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1290 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
1291 if (GFC_DESCRIPTOR_TYPE_P (type
))
1293 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
1294 tree etype
= gfc_get_element_type (type
);
1295 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
1296 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
1297 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
1298 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
1302 gfc_add_modify (&block
, unshare_expr (dest
),
1303 fold_convert (TREE_TYPE (dest
), ptr
));
1304 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
1305 tree dstm
= build_fold_indirect_ref (ptr
);
1306 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
1307 gfc_add_modify (&block
, dstm
,
1308 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
1310 return gfc_finish_block (&block
);
1313 /* Build and return code destructing DECL. Return NULL if nothing
1317 gfc_omp_clause_dtor (tree clause
, tree decl
)
1319 tree type
= TREE_TYPE (decl
), tem
;
1320 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
1322 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
1323 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
1324 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
1326 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
1327 if (gfc_is_polymorphic_nonptr (decl_type
))
1329 if (POINTER_TYPE_P (decl_type
))
1330 decl_type
= TREE_TYPE (decl_type
);
1331 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
1332 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
1333 fatal_error (input_location
,
1334 "Sorry, polymorphic arrays not yet supported for "
1336 stmtblock_t block
, cond_block
;
1337 gfc_start_block (&block
);
1338 gfc_init_block (&cond_block
);
1339 tree final
= gfc_class_vtab_final_get (decl
);
1340 tree size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (decl
));
1342 gfc_init_se (&se
, NULL
);
1343 symbol_attribute attr
= {};
1344 tree data
= gfc_class_data_get (decl
);
1345 tree desc
= gfc_conv_scalar_to_descriptor (&se
, data
, attr
);
1347 /* Call class->_vpt->_finalize + free. */
1348 tree call
= build_fold_indirect_ref (final
);
1349 call
= build_call_expr_loc (input_location
, call
, 3,
1350 gfc_build_addr_expr (NULL
, desc
),
1351 size
, boolean_false_node
);
1352 gfc_add_block_to_block (&cond_block
, &se
.pre
);
1353 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1354 gfc_add_block_to_block (&cond_block
, &se
.post
);
1355 /* Create: if (_vtab && _final) <cond_block> */
1356 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1357 gfc_class_vptr_get (decl
),
1359 tree cond2
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1360 final
, null_pointer_node
);
1361 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1362 boolean_type_node
, cond
, cond2
);
1363 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1364 void_type_node
, cond
,
1365 gfc_finish_block (&cond_block
), NULL_TREE
));
1366 call
= builtin_decl_explicit (BUILT_IN_FREE
);
1367 call
= build_call_expr_loc (input_location
, call
, 1, data
);
1368 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
1369 return gfc_finish_block (&block
);
1372 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1373 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1374 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1375 || !POINTER_TYPE_P (type
)))
1377 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1378 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
1379 OMP_CLAUSE_DECL (clause
),
1380 WALK_ALLOC_COMPS_DTOR
);
1384 if (GFC_DESCRIPTOR_TYPE_P (type
))
1386 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1387 to be deallocated if they were allocated. */
1388 tem
= gfc_conv_descriptor_data_get (decl
);
1389 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1390 NULL_TREE
, true, NULL
,
1391 GFC_CAF_COARRAY_NOCOARRAY
);
1394 tem
= gfc_call_free (decl
);
1395 tem
= gfc_omp_unshare_expr (tem
);
1397 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1402 gfc_init_block (&block
);
1403 gfc_add_expr_to_block (&block
,
1404 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1405 OMP_CLAUSE_DECL (clause
),
1406 WALK_ALLOC_COMPS_DTOR
));
1407 gfc_add_expr_to_block (&block
, tem
);
1408 then_b
= gfc_finish_block (&block
);
1410 tem
= fold_convert (pvoid_type_node
,
1411 GFC_DESCRIPTOR_TYPE_P (type
)
1412 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1413 tem
= unshare_expr (tem
);
1414 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1415 tem
, null_pointer_node
);
1416 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1417 then_b
, build_empty_stmt (input_location
));
1422 /* Build a conditional expression in BLOCK. If COND_VAL is not
1423 null, then the block THEN_B is executed, otherwise ELSE_VAL
1424 is assigned to VAL. */
1427 gfc_build_cond_assign (stmtblock_t
*block
, tree val
, tree cond_val
,
1428 tree then_b
, tree else_val
)
1430 stmtblock_t cond_block
;
1431 tree else_b
= NULL_TREE
;
1432 tree val_ty
= TREE_TYPE (val
);
1436 gfc_init_block (&cond_block
);
1437 gfc_add_modify (&cond_block
, val
, fold_convert (val_ty
, else_val
));
1438 else_b
= gfc_finish_block (&cond_block
);
1440 gfc_add_expr_to_block (block
,
1441 build3_loc (input_location
, COND_EXPR
, void_type_node
,
1442 cond_val
, then_b
, else_b
));
1445 /* Build a conditional expression in BLOCK, returning a temporary
1446 variable containing the result. If COND_VAL is not null, then
1447 THEN_VAL will be assigned to the variable, otherwise ELSE_VAL
1452 gfc_build_cond_assign_expr (stmtblock_t
*block
, tree cond_val
,
1453 tree then_val
, tree else_val
)
1456 tree val_ty
= TREE_TYPE (then_val
);
1457 stmtblock_t cond_block
;
1459 val
= create_tmp_var (val_ty
);
1461 gfc_init_block (&cond_block
);
1462 gfc_add_modify (&cond_block
, val
, then_val
);
1463 tree then_b
= gfc_finish_block (&cond_block
);
1465 gfc_build_cond_assign (block
, val
, cond_val
, then_b
, else_val
);
1471 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
, bool openacc
)
1473 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1476 tree decl
= OMP_CLAUSE_DECL (c
);
1478 /* Assumed-size arrays can't be mapped implicitly, they have to be
1479 mapped explicitly using array sections. */
1480 if (TREE_CODE (decl
) == PARM_DECL
1481 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1482 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1483 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1484 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1487 error_at (OMP_CLAUSE_LOCATION (c
),
1488 "implicit mapping of assumed size array %qD", decl
);
1492 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1493 tree present
= gfc_omp_check_optional_argument (decl
, true);
1494 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1496 if (!gfc_omp_privatize_by_reference (decl
)
1497 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1498 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1499 && !GFC_DECL_CRAY_POINTEE (decl
)
1500 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1502 tree orig_decl
= decl
;
1504 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1505 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1506 OMP_CLAUSE_DECL (c4
) = decl
;
1507 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1508 decl
= build_fold_indirect_ref (decl
);
1510 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1511 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1513 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1514 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_POINTER
);
1515 OMP_CLAUSE_DECL (c2
) = decl
;
1516 OMP_CLAUSE_SIZE (c2
) = size_int (0);
1519 gfc_start_block (&block
);
1521 ptr
= gfc_build_cond_assign_expr (&block
, present
, decl
,
1523 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
1524 ptr
= build_fold_indirect_ref (ptr
);
1525 OMP_CLAUSE_DECL (c
) = ptr
;
1526 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
1530 OMP_CLAUSE_DECL (c
) = decl
;
1531 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1533 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1534 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1535 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1537 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1538 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1539 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1540 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1541 decl
= build_fold_indirect_ref (decl
);
1542 OMP_CLAUSE_DECL (c
) = decl
;
1545 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1548 gfc_start_block (&block
);
1549 tree type
= TREE_TYPE (decl
);
1550 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1552 /* OpenMP: automatically map pointer targets with the pointer;
1553 hence, always update the descriptor/pointer itself.
1554 NOTE: This also remaps the pointer for allocatable arrays with
1555 'target' attribute which also don't have the 'restrict' qualifier. */
1556 bool always_modifier
= false;
1559 && !(TYPE_QUALS (TREE_TYPE (ptr
)) & TYPE_QUAL_RESTRICT
))
1560 always_modifier
= true;
1563 ptr
= gfc_build_cond_assign_expr (&block
, present
, ptr
,
1565 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1566 ptr
= build_fold_indirect_ref (ptr
);
1567 OMP_CLAUSE_DECL (c
) = ptr
;
1568 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1569 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1572 ptr
= create_tmp_var (TREE_TYPE (TREE_OPERAND (decl
, 0)));
1573 gfc_add_modify (&block
, ptr
, TREE_OPERAND (decl
, 0));
1575 OMP_CLAUSE_DECL (c2
) = build_fold_indirect_ref (ptr
);
1578 OMP_CLAUSE_DECL (c2
) = decl
;
1579 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1580 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1581 OMP_CLAUSE_SET_MAP_KIND (c3
, always_modifier
? GOMP_MAP_ALWAYS_POINTER
1582 : GOMP_MAP_POINTER
);
1585 ptr
= gfc_conv_descriptor_data_get (decl
);
1586 ptr
= gfc_build_addr_expr (NULL
, ptr
);
1587 ptr
= gfc_build_cond_assign_expr (&block
, present
,
1588 ptr
, null_pointer_node
);
1589 ptr
= build_fold_indirect_ref (ptr
);
1590 OMP_CLAUSE_DECL (c3
) = ptr
;
1593 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1594 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1595 tree size
= create_tmp_var (gfc_array_index_type
);
1596 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1597 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1598 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1599 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1601 stmtblock_t cond_block
;
1602 tree tem
, then_b
, else_b
, zero
, cond
;
1604 gfc_init_block (&cond_block
);
1605 tem
= gfc_full_array_size (&cond_block
, decl
,
1606 GFC_TYPE_ARRAY_RANK (type
));
1607 gfc_add_modify (&cond_block
, size
, tem
);
1608 gfc_add_modify (&cond_block
, size
,
1609 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1611 then_b
= gfc_finish_block (&cond_block
);
1612 gfc_init_block (&cond_block
);
1613 zero
= build_int_cst (gfc_array_index_type
, 0);
1614 gfc_add_modify (&cond_block
, size
, zero
);
1615 else_b
= gfc_finish_block (&cond_block
);
1616 tem
= gfc_conv_descriptor_data_get (decl
);
1617 tem
= fold_convert (pvoid_type_node
, tem
);
1618 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1619 boolean_type_node
, tem
, null_pointer_node
);
1622 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1623 boolean_type_node
, present
, cond
);
1625 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1626 void_type_node
, cond
,
1631 stmtblock_t cond_block
;
1634 gfc_init_block (&cond_block
);
1635 gfc_add_modify (&cond_block
, size
,
1636 gfc_full_array_size (&cond_block
, decl
,
1637 GFC_TYPE_ARRAY_RANK (type
)));
1638 gfc_add_modify (&cond_block
, size
,
1639 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1641 then_b
= gfc_finish_block (&cond_block
);
1643 gfc_build_cond_assign (&block
, size
, present
, then_b
,
1644 build_int_cst (gfc_array_index_type
, 0));
1648 gfc_add_modify (&block
, size
,
1649 gfc_full_array_size (&block
, decl
,
1650 GFC_TYPE_ARRAY_RANK (type
)));
1651 gfc_add_modify (&block
, size
,
1652 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1655 OMP_CLAUSE_SIZE (c
) = size
;
1656 tree stmt
= gfc_finish_block (&block
);
1657 gimplify_and_add (stmt
, pre_p
);
1660 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1662 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1663 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1664 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
1665 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
1666 OMP_CLAUSE_SIZE (c
) = size_int (0);
1669 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1670 OMP_CLAUSE_CHAIN (last
) = c2
;
1675 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1676 OMP_CLAUSE_CHAIN (last
) = c3
;
1681 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1682 OMP_CLAUSE_CHAIN (last
) = c4
;
1687 /* Return true if DECL is a scalar variable (for the purpose of
1688 implicit firstprivatization/mapping). Only if 'ptr_alloc_ok.'
1689 is true, allocatables and pointers are permitted. */
1692 gfc_omp_scalar_p (tree decl
, bool ptr_alloc_ok
)
1694 tree type
= TREE_TYPE (decl
);
1695 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1696 type
= TREE_TYPE (type
);
1697 if (TREE_CODE (type
) == POINTER_TYPE
)
1699 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1700 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1704 type
= TREE_TYPE (type
);
1706 if (GFC_ARRAY_TYPE_P (type
)
1707 || GFC_CLASS_TYPE_P (type
))
1710 if ((TREE_CODE (type
) == ARRAY_TYPE
|| TREE_CODE (type
) == INTEGER_TYPE
)
1711 && TYPE_STRING_FLAG (type
))
1713 if (INTEGRAL_TYPE_P (type
)
1714 || SCALAR_FLOAT_TYPE_P (type
)
1715 || COMPLEX_FLOAT_TYPE_P (type
))
1721 /* Return true if DECL is a scalar with target attribute but does not have the
1722 allocatable (or pointer) attribute (for the purpose of implicit mapping). */
1725 gfc_omp_scalar_target_p (tree decl
)
1727 return (DECL_P (decl
) && GFC_DECL_GET_SCALAR_TARGET (decl
)
1728 && gfc_omp_scalar_p (decl
, false));
1732 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1733 disregarded in OpenMP construct, because it is going to be
1734 remapped during OpenMP lowering. SHARED is true if DECL
1735 is going to be shared, false if it is going to be privatized. */
1738 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1740 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1741 && DECL_HAS_VALUE_EXPR_P (decl
))
1743 tree value
= DECL_VALUE_EXPR (decl
);
1745 if (TREE_CODE (value
) == COMPONENT_REF
1746 && VAR_P (TREE_OPERAND (value
, 0))
1747 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1749 /* If variable in COMMON or EQUIVALENCE is privatized, return
1750 true, as just that variable is supposed to be privatized,
1751 not the whole COMMON or whole EQUIVALENCE.
1752 For shared variables in COMMON or EQUIVALENCE, let them be
1753 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1754 from the same COMMON or EQUIVALENCE just one sharing of the
1755 whole COMMON or EQUIVALENCE is enough. */
1760 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1766 /* Return true if DECL that is shared iff SHARED is true should
1767 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1771 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1773 if (GFC_DECL_CRAY_POINTEE (decl
))
1776 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1777 && DECL_HAS_VALUE_EXPR_P (decl
))
1779 tree value
= DECL_VALUE_EXPR (decl
);
1781 if (TREE_CODE (value
) == COMPONENT_REF
1782 && VAR_P (TREE_OPERAND (value
, 0))
1783 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1790 /* Register language specific type size variables as potentially OpenMP
1791 firstprivate variables. */
1794 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1796 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1800 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1801 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1803 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1804 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1805 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1807 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1808 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1814 gfc_trans_add_clause (tree node
, tree tail
)
1816 OMP_CLAUSE_CHAIN (node
) = tail
;
1821 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1826 gfc_symbol
*proc_sym
;
1827 gfc_formal_arglist
*f
;
1829 gcc_assert (sym
->attr
.dummy
);
1830 proc_sym
= sym
->ns
->proc_name
;
1831 if (proc_sym
->attr
.entry_master
)
1833 if (gfc_return_by_reference (proc_sym
))
1836 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1839 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1845 return build_int_cst (integer_type_node
, cnt
);
1848 tree t
= gfc_get_symbol_decl (sym
);
1852 bool alternate_entry
;
1855 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1856 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1857 && sym
->result
== sym
;
1858 entry_master
= sym
->attr
.result
1859 && sym
->ns
->proc_name
->attr
.entry_master
1860 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1861 parent_decl
= current_function_decl
1862 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1864 if ((t
== parent_decl
&& return_value
)
1865 || (sym
->ns
&& sym
->ns
->proc_name
1866 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1867 && (alternate_entry
|| entry_master
)))
1872 /* Special case for assigning the return value of a function.
1873 Self recursive functions must have an explicit return value. */
1874 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1875 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1877 /* Similarly for alternate entry points. */
1878 else if (alternate_entry
1879 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1882 gfc_entry_list
*el
= NULL
;
1884 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1887 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1892 else if (entry_master
1893 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1895 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1901 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1902 gfc_omp_namelist
*namelist
, tree list
,
1905 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1906 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1908 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1909 if (t
!= error_mark_node
)
1911 tree node
= build_omp_clause (input_location
, code
);
1912 OMP_CLAUSE_DECL (node
) = t
;
1913 list
= gfc_trans_add_clause (node
, list
);
1915 if (code
== OMP_CLAUSE_LASTPRIVATE
1916 && namelist
->u
.lastprivate_conditional
)
1917 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (node
) = 1;
1923 struct omp_udr_find_orig_data
1925 gfc_omp_udr
*omp_udr
;
1930 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1933 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1934 if ((*e
)->expr_type
== EXPR_VARIABLE
1935 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1936 cd
->omp_orig_seen
= true;
1942 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1944 gfc_symbol
*sym
= n
->sym
;
1945 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1946 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1947 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1948 gfc_symbol omp_var_copy
[4];
1949 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1951 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1952 locus old_loc
= gfc_current_locus
;
1955 gfc_omp_udr
*udr
= n
->u2
.udr
? n
->u2
.udr
->udr
: NULL
;
1957 decl
= OMP_CLAUSE_DECL (c
);
1958 gfc_current_locus
= where
;
1959 type
= TREE_TYPE (decl
);
1960 outer_decl
= create_tmp_var_raw (type
);
1961 if (TREE_CODE (decl
) == PARM_DECL
1962 && TREE_CODE (type
) == REFERENCE_TYPE
1963 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1964 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1966 decl
= build_fold_indirect_ref (decl
);
1967 type
= TREE_TYPE (type
);
1970 /* Create a fake symbol for init value. */
1971 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1972 init_val_sym
.ns
= sym
->ns
;
1973 init_val_sym
.name
= sym
->name
;
1974 init_val_sym
.ts
= sym
->ts
;
1975 init_val_sym
.attr
.referenced
= 1;
1976 init_val_sym
.declared_at
= where
;
1977 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1978 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1979 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1980 else if (udr
->initializer_ns
)
1981 backend_decl
= NULL
;
1983 switch (sym
->ts
.type
)
1989 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1992 backend_decl
= NULL_TREE
;
1995 init_val_sym
.backend_decl
= backend_decl
;
1997 /* Create a fake symbol for the outer array reference. */
2000 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
2001 outer_sym
.attr
.dummy
= 0;
2002 outer_sym
.attr
.result
= 0;
2003 outer_sym
.attr
.flavor
= FL_VARIABLE
;
2004 outer_sym
.backend_decl
= outer_decl
;
2005 if (decl
!= OMP_CLAUSE_DECL (c
))
2006 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
2008 /* Create fake symtrees for it. */
2009 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
2010 symtree1
->n
.sym
= sym
;
2011 gcc_assert (symtree1
== root1
);
2013 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
2014 symtree2
->n
.sym
= &init_val_sym
;
2015 gcc_assert (symtree2
== root2
);
2017 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
2018 symtree3
->n
.sym
= &outer_sym
;
2019 gcc_assert (symtree3
== root3
);
2021 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
2024 omp_var_copy
[0] = *udr
->omp_out
;
2025 omp_var_copy
[1] = *udr
->omp_in
;
2026 *udr
->omp_out
= outer_sym
;
2027 *udr
->omp_in
= *sym
;
2028 if (udr
->initializer_ns
)
2030 omp_var_copy
[2] = *udr
->omp_priv
;
2031 omp_var_copy
[3] = *udr
->omp_orig
;
2032 *udr
->omp_priv
= *sym
;
2033 *udr
->omp_orig
= outer_sym
;
2037 /* Create expressions. */
2038 e1
= gfc_get_expr ();
2039 e1
->expr_type
= EXPR_VARIABLE
;
2041 e1
->symtree
= symtree1
;
2043 if (sym
->attr
.dimension
)
2045 e1
->ref
= ref
= gfc_get_ref ();
2046 ref
->type
= REF_ARRAY
;
2047 ref
->u
.ar
.where
= where
;
2048 ref
->u
.ar
.as
= sym
->as
;
2049 ref
->u
.ar
.type
= AR_FULL
;
2050 ref
->u
.ar
.dimen
= 0;
2052 t
= gfc_resolve_expr (e1
);
2056 if (backend_decl
!= NULL_TREE
)
2058 e2
= gfc_get_expr ();
2059 e2
->expr_type
= EXPR_VARIABLE
;
2061 e2
->symtree
= symtree2
;
2063 t
= gfc_resolve_expr (e2
);
2066 else if (udr
->initializer_ns
== NULL
)
2068 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
2069 e2
= gfc_default_initializer (&sym
->ts
);
2071 t
= gfc_resolve_expr (e2
);
2074 else if (n
->u2
.udr
->initializer
->op
== EXEC_ASSIGN
)
2076 e2
= gfc_copy_expr (n
->u2
.udr
->initializer
->expr2
);
2077 t
= gfc_resolve_expr (e2
);
2080 if (udr
&& udr
->initializer_ns
)
2082 struct omp_udr_find_orig_data cd
;
2084 cd
.omp_orig_seen
= false;
2085 gfc_code_walker (&n
->u2
.udr
->initializer
,
2086 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
2087 if (cd
.omp_orig_seen
)
2088 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
2091 e3
= gfc_copy_expr (e1
);
2092 e3
->symtree
= symtree3
;
2093 t
= gfc_resolve_expr (e3
);
2098 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
2102 e4
= gfc_add (e3
, e1
);
2105 e4
= gfc_multiply (e3
, e1
);
2107 case TRUTH_ANDIF_EXPR
:
2108 e4
= gfc_and (e3
, e1
);
2110 case TRUTH_ORIF_EXPR
:
2111 e4
= gfc_or (e3
, e1
);
2114 e4
= gfc_eqv (e3
, e1
);
2117 e4
= gfc_neqv (e3
, e1
);
2135 if (n
->u2
.udr
->combiner
->op
== EXEC_ASSIGN
)
2138 e3
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr1
);
2139 e4
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr2
);
2140 t
= gfc_resolve_expr (e3
);
2142 t
= gfc_resolve_expr (e4
);
2151 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
2152 intrinsic_sym
.ns
= sym
->ns
;
2153 intrinsic_sym
.name
= iname
;
2154 intrinsic_sym
.ts
= sym
->ts
;
2155 intrinsic_sym
.attr
.referenced
= 1;
2156 intrinsic_sym
.attr
.intrinsic
= 1;
2157 intrinsic_sym
.attr
.function
= 1;
2158 intrinsic_sym
.attr
.implicit_type
= 1;
2159 intrinsic_sym
.result
= &intrinsic_sym
;
2160 intrinsic_sym
.declared_at
= where
;
2162 symtree4
= gfc_new_symtree (&root4
, iname
);
2163 symtree4
->n
.sym
= &intrinsic_sym
;
2164 gcc_assert (symtree4
== root4
);
2166 e4
= gfc_get_expr ();
2167 e4
->expr_type
= EXPR_FUNCTION
;
2169 e4
->symtree
= symtree4
;
2170 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
2171 e4
->value
.function
.actual
->expr
= e3
;
2172 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
2173 e4
->value
.function
.actual
->next
->expr
= e1
;
2175 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
2177 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
2178 e1
= gfc_copy_expr (e1
);
2179 e3
= gfc_copy_expr (e3
);
2180 t
= gfc_resolve_expr (e4
);
2184 /* Create the init statement list. */
2187 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
2189 stmt
= gfc_trans_call (n
->u2
.udr
->initializer
, false,
2190 NULL_TREE
, NULL_TREE
, false);
2191 if (TREE_CODE (stmt
) != BIND_EXPR
)
2192 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2195 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
2197 /* Create the merge statement list. */
2200 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
2202 stmt
= gfc_trans_call (n
->u2
.udr
->combiner
, false,
2203 NULL_TREE
, NULL_TREE
, false);
2204 if (TREE_CODE (stmt
) != BIND_EXPR
)
2205 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2208 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
2210 /* And stick the placeholder VAR_DECL into the clause as well. */
2211 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
2213 gfc_current_locus
= old_loc
;
2226 gfc_free_array_spec (outer_sym
.as
);
2230 *udr
->omp_out
= omp_var_copy
[0];
2231 *udr
->omp_in
= omp_var_copy
[1];
2232 if (udr
->initializer_ns
)
2234 *udr
->omp_priv
= omp_var_copy
[2];
2235 *udr
->omp_orig
= omp_var_copy
[3];
2241 gfc_trans_omp_reduction_list (int kind
, gfc_omp_namelist
*namelist
, tree list
,
2242 locus where
, bool mark_addressable
)
2244 omp_clause_code clause
= OMP_CLAUSE_REDUCTION
;
2247 case OMP_LIST_REDUCTION
:
2248 case OMP_LIST_REDUCTION_INSCAN
:
2249 case OMP_LIST_REDUCTION_TASK
:
2251 case OMP_LIST_IN_REDUCTION
:
2252 clause
= OMP_CLAUSE_IN_REDUCTION
;
2254 case OMP_LIST_TASK_REDUCTION
:
2255 clause
= OMP_CLAUSE_TASK_REDUCTION
;
2260 for (; namelist
!= NULL
; namelist
= namelist
->next
)
2261 if (namelist
->sym
->attr
.referenced
)
2263 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
2264 if (t
!= error_mark_node
)
2266 tree node
= build_omp_clause (gfc_get_location (&namelist
->where
),
2268 OMP_CLAUSE_DECL (node
) = t
;
2269 if (mark_addressable
)
2270 TREE_ADDRESSABLE (t
) = 1;
2271 if (kind
== OMP_LIST_REDUCTION_INSCAN
)
2272 OMP_CLAUSE_REDUCTION_INSCAN (node
) = 1;
2273 if (kind
== OMP_LIST_REDUCTION_TASK
)
2274 OMP_CLAUSE_REDUCTION_TASK (node
) = 1;
2275 switch (namelist
->u
.reduction_op
)
2277 case OMP_REDUCTION_PLUS
:
2278 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
2280 case OMP_REDUCTION_MINUS
:
2281 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
2283 case OMP_REDUCTION_TIMES
:
2284 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
2286 case OMP_REDUCTION_AND
:
2287 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
2289 case OMP_REDUCTION_OR
:
2290 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
2292 case OMP_REDUCTION_EQV
:
2293 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
2295 case OMP_REDUCTION_NEQV
:
2296 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
2298 case OMP_REDUCTION_MAX
:
2299 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
2301 case OMP_REDUCTION_MIN
:
2302 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
2304 case OMP_REDUCTION_IAND
:
2305 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
2307 case OMP_REDUCTION_IOR
:
2308 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
2310 case OMP_REDUCTION_IEOR
:
2311 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
2313 case OMP_REDUCTION_USER
:
2314 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
2319 if (namelist
->sym
->attr
.dimension
2320 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
2321 || namelist
->sym
->attr
.allocatable
)
2322 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
2323 list
= gfc_trans_add_clause (node
, list
);
2330 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
2335 gfc_init_se (&se
, NULL
);
2336 gfc_conv_expr (&se
, expr
);
2337 gfc_add_block_to_block (block
, &se
.pre
);
2338 result
= gfc_evaluate_now (se
.expr
, block
);
2339 gfc_add_block_to_block (block
, &se
.post
);
2344 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
2347 /* Translate an array section or array element. */
2350 gfc_trans_omp_array_section (stmtblock_t
*block
, gfc_omp_namelist
*n
,
2351 tree decl
, bool element
, gomp_map_kind ptr_kind
,
2352 tree
&node
, tree
&node2
, tree
&node3
, tree
&node4
)
2356 tree elemsz
= NULL_TREE
;
2358 gfc_init_se (&se
, NULL
);
2362 gfc_conv_expr_reference (&se
, n
->expr
);
2363 gfc_add_block_to_block (block
, &se
.pre
);
2365 OMP_CLAUSE_SIZE (node
) = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr
)));
2366 elemsz
= OMP_CLAUSE_SIZE (node
);
2370 gfc_conv_expr_descriptor (&se
, n
->expr
);
2371 ptr
= gfc_conv_array_data (se
.expr
);
2372 tree type
= TREE_TYPE (se
.expr
);
2373 gfc_add_block_to_block (block
, &se
.pre
);
2374 OMP_CLAUSE_SIZE (node
) = gfc_full_array_size (block
, se
.expr
,
2375 GFC_TYPE_ARRAY_RANK (type
));
2376 elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2377 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2378 OMP_CLAUSE_SIZE (node
) = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2379 OMP_CLAUSE_SIZE (node
), elemsz
);
2381 gcc_assert (se
.post
.head
== NULL_TREE
);
2382 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
2383 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2384 ptr
= fold_convert (ptrdiff_type_node
, ptr
);
2386 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2387 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
2388 && ptr_kind
== GOMP_MAP_POINTER
)
2390 node4
= build_omp_clause (input_location
,
2392 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2393 OMP_CLAUSE_DECL (node4
) = decl
;
2394 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2395 decl
= build_fold_indirect_ref (decl
);
2397 else if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
2398 && n
->expr
->ts
.type
== BT_CHARACTER
2399 && n
->expr
->ts
.deferred
)
2401 gomp_map_kind map_kind
;
2402 if (GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (node
)))
2403 map_kind
= GOMP_MAP_TO
;
2404 else if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_RELEASE
2405 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
)
2406 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
2408 map_kind
= GOMP_MAP_ALLOC
;
2409 gcc_assert (se
.string_length
);
2410 node4
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2411 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
2412 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
2413 OMP_CLAUSE_SIZE (node4
) = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
2415 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2418 tree type
= TREE_TYPE (decl
);
2419 ptr2
= gfc_conv_descriptor_data_get (decl
);
2420 desc_node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2421 OMP_CLAUSE_DECL (desc_node
) = decl
;
2422 OMP_CLAUSE_SIZE (desc_node
) = TYPE_SIZE_UNIT (type
);
2423 if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
)
2425 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_TO
);
2427 node
= desc_node
; /* Needs to come first. */
2431 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_TO_PSET
);
2434 node3
= build_omp_clause (input_location
,
2436 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2437 OMP_CLAUSE_DECL (node3
)
2438 = gfc_conv_descriptor_data_get (decl
);
2439 /* This purposely does not include GOMP_MAP_ALWAYS_POINTER. The extra
2440 cast prevents gimplify.c from recognising it as being part of the
2441 struct – and adding an 'alloc: for the 'desc.data' pointer, which
2442 would break as the 'desc' (the descriptor) is also mapped
2443 (see node4 above). */
2444 if (ptr_kind
== GOMP_MAP_ATTACH_DETACH
)
2445 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
2449 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2452 ptr2
= build_fold_addr_expr (decl
);
2453 offset
= fold_build2 (MINUS_EXPR
, ptrdiff_type_node
, ptr
,
2454 fold_convert (ptrdiff_type_node
, ptr2
));
2455 offset
= build2 (TRUNC_DIV_EXPR
, ptrdiff_type_node
,
2456 offset
, fold_convert (ptrdiff_type_node
, elemsz
));
2457 offset
= build4_loc (input_location
, ARRAY_REF
,
2458 TREE_TYPE (TREE_TYPE (decl
)),
2459 decl
, offset
, NULL_TREE
, NULL_TREE
);
2460 OMP_CLAUSE_DECL (node
) = offset
;
2464 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2467 node3
= build_omp_clause (input_location
,
2469 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2470 OMP_CLAUSE_DECL (node3
) = decl
;
2472 ptr2
= fold_convert (ptrdiff_type_node
, ptr2
);
2473 OMP_CLAUSE_SIZE (node3
) = fold_build2 (MINUS_EXPR
, ptrdiff_type_node
,
2478 handle_iterator (gfc_namespace
*ns
, stmtblock_t
*iter_block
, tree block
)
2480 tree list
= NULL_TREE
;
2481 for (gfc_symbol
*sym
= ns
->proc_name
; sym
; sym
= sym
->tlink
)
2486 tree last
= make_tree_vec (6);
2487 tree iter_var
= gfc_get_symbol_decl (sym
);
2488 tree type
= TREE_TYPE (iter_var
);
2489 TREE_VEC_ELT (last
, 0) = iter_var
;
2490 DECL_CHAIN (iter_var
) = BLOCK_VARS (block
);
2491 BLOCK_VARS (block
) = iter_var
;
2494 c
= gfc_constructor_first (sym
->value
->value
.constructor
);
2495 gfc_init_se (&se
, NULL
);
2496 gfc_conv_expr (&se
, c
->expr
);
2497 gfc_add_block_to_block (iter_block
, &se
.pre
);
2498 gfc_add_block_to_block (iter_block
, &se
.post
);
2499 TREE_VEC_ELT (last
, 1) = fold_convert (type
,
2500 gfc_evaluate_now (se
.expr
,
2503 c
= gfc_constructor_next (c
);
2504 gfc_init_se (&se
, NULL
);
2505 gfc_conv_expr (&se
, c
->expr
);
2506 gfc_add_block_to_block (iter_block
, &se
.pre
);
2507 gfc_add_block_to_block (iter_block
, &se
.post
);
2508 TREE_VEC_ELT (last
, 2) = fold_convert (type
,
2509 gfc_evaluate_now (se
.expr
,
2512 c
= gfc_constructor_next (c
);
2516 gfc_init_se (&se
, NULL
);
2517 gfc_conv_expr (&se
, c
->expr
);
2518 gfc_add_block_to_block (iter_block
, &se
.pre
);
2519 gfc_add_block_to_block (iter_block
, &se
.post
);
2520 gfc_conv_expr (&se
, c
->expr
);
2521 step
= fold_convert (type
,
2522 gfc_evaluate_now (se
.expr
,
2526 step
= build_int_cst (type
, 1);
2527 TREE_VEC_ELT (last
, 3) = step
;
2529 TREE_VEC_ELT (last
, 4) = save_expr (step
);
2530 TREE_CHAIN (last
) = list
;
2537 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
2538 locus where
, bool declare_simd
= false,
2539 bool openacc
= false)
2541 tree omp_clauses
= NULL_TREE
, prev_clauses
, chunk_size
, c
;
2542 tree iterator
= NULL_TREE
;
2543 tree tree_block
= NULL_TREE
;
2544 stmtblock_t iter_block
;
2546 enum omp_clause_code clause_code
;
2547 gfc_omp_namelist
*prev
= NULL
;
2550 if (clauses
== NULL
)
2553 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
2555 gfc_omp_namelist
*n
= clauses
->lists
[list
];
2561 case OMP_LIST_REDUCTION
:
2562 case OMP_LIST_REDUCTION_INSCAN
:
2563 case OMP_LIST_REDUCTION_TASK
:
2564 case OMP_LIST_IN_REDUCTION
:
2565 case OMP_LIST_TASK_REDUCTION
:
2566 /* An OpenACC async clause indicates the need to set reduction
2567 arguments addressable, to allow asynchronous copy-out. */
2568 omp_clauses
= gfc_trans_omp_reduction_list (list
, n
, omp_clauses
,
2569 where
, clauses
->async
);
2571 case OMP_LIST_PRIVATE
:
2572 clause_code
= OMP_CLAUSE_PRIVATE
;
2574 case OMP_LIST_SHARED
:
2575 clause_code
= OMP_CLAUSE_SHARED
;
2577 case OMP_LIST_FIRSTPRIVATE
:
2578 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
2580 case OMP_LIST_LASTPRIVATE
:
2581 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
2583 case OMP_LIST_COPYIN
:
2584 clause_code
= OMP_CLAUSE_COPYIN
;
2586 case OMP_LIST_COPYPRIVATE
:
2587 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
2589 case OMP_LIST_UNIFORM
:
2590 clause_code
= OMP_CLAUSE_UNIFORM
;
2592 case OMP_LIST_USE_DEVICE
:
2593 case OMP_LIST_USE_DEVICE_PTR
:
2594 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
2596 case OMP_LIST_USE_DEVICE_ADDR
:
2597 clause_code
= OMP_CLAUSE_USE_DEVICE_ADDR
;
2599 case OMP_LIST_IS_DEVICE_PTR
:
2600 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
2602 case OMP_LIST_NONTEMPORAL
:
2603 clause_code
= OMP_CLAUSE_NONTEMPORAL
;
2605 case OMP_LIST_SCAN_IN
:
2606 clause_code
= OMP_CLAUSE_INCLUSIVE
;
2608 case OMP_LIST_SCAN_EX
:
2609 clause_code
= OMP_CLAUSE_EXCLUSIVE
;
2614 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
2617 case OMP_LIST_ALIGNED
:
2618 for (; n
!= NULL
; n
= n
->next
)
2619 if (n
->sym
->attr
.referenced
|| declare_simd
)
2621 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2622 if (t
!= error_mark_node
)
2624 tree node
= build_omp_clause (input_location
,
2625 OMP_CLAUSE_ALIGNED
);
2626 OMP_CLAUSE_DECL (node
) = t
;
2632 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
2635 gfc_init_se (&se
, NULL
);
2636 gfc_conv_expr (&se
, n
->expr
);
2637 gfc_add_block_to_block (block
, &se
.pre
);
2638 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
2639 gfc_add_block_to_block (block
, &se
.post
);
2641 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
2643 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2647 case OMP_LIST_LINEAR
:
2649 gfc_expr
*last_step_expr
= NULL
;
2650 tree last_step
= NULL_TREE
;
2651 bool last_step_parm
= false;
2653 for (; n
!= NULL
; n
= n
->next
)
2657 last_step_expr
= n
->expr
;
2658 last_step
= NULL_TREE
;
2659 last_step_parm
= false;
2661 if (n
->sym
->attr
.referenced
|| declare_simd
)
2663 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2664 if (t
!= error_mark_node
)
2666 tree node
= build_omp_clause (input_location
,
2668 OMP_CLAUSE_DECL (node
) = t
;
2669 omp_clause_linear_kind kind
;
2670 switch (n
->u
.linear_op
)
2672 case OMP_LINEAR_DEFAULT
:
2673 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
2675 case OMP_LINEAR_REF
:
2676 kind
= OMP_CLAUSE_LINEAR_REF
;
2678 case OMP_LINEAR_VAL
:
2679 kind
= OMP_CLAUSE_LINEAR_VAL
;
2681 case OMP_LINEAR_UVAL
:
2682 kind
= OMP_CLAUSE_LINEAR_UVAL
;
2687 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
2688 if (last_step_expr
&& last_step
== NULL_TREE
)
2692 gfc_init_se (&se
, NULL
);
2693 gfc_conv_expr (&se
, last_step_expr
);
2694 gfc_add_block_to_block (block
, &se
.pre
);
2695 last_step
= gfc_evaluate_now (se
.expr
, block
);
2696 gfc_add_block_to_block (block
, &se
.post
);
2698 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
2700 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
2701 last_step
= gfc_trans_omp_variable (s
, true);
2702 last_step_parm
= true;
2706 = gfc_conv_constant_to_tree (last_step_expr
);
2710 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
2711 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
2715 if (kind
== OMP_CLAUSE_LINEAR_REF
)
2718 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
2720 type
= gfc_get_function_type (n
->sym
);
2721 type
= build_pointer_type (type
);
2724 type
= gfc_sym_type (n
->sym
);
2725 if (POINTER_TYPE_P (type
))
2726 type
= TREE_TYPE (type
);
2727 /* Otherwise to be determined what exactly
2729 tree t
= fold_convert (sizetype
, last_step
);
2730 t
= size_binop (MULT_EXPR
, t
,
2731 TYPE_SIZE_UNIT (type
));
2732 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
2737 = gfc_typenode_for_spec (&n
->sym
->ts
);
2738 OMP_CLAUSE_LINEAR_STEP (node
)
2739 = fold_convert (type
, last_step
);
2742 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
2743 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
2744 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2750 case OMP_LIST_AFFINITY
:
2751 case OMP_LIST_DEPEND
:
2752 iterator
= NULL_TREE
;
2754 prev_clauses
= omp_clauses
;
2755 for (; n
!= NULL
; n
= n
->next
)
2757 if (iterator
&& prev
->u2
.ns
!= n
->u2
.ns
)
2759 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
2760 TREE_VEC_ELT (iterator
, 5) = tree_block
;
2761 for (tree c
= omp_clauses
; c
!= prev_clauses
;
2762 c
= OMP_CLAUSE_CHAIN (c
))
2763 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
2764 OMP_CLAUSE_DECL (c
));
2765 prev_clauses
= omp_clauses
;
2766 iterator
= NULL_TREE
;
2768 if (n
->u2
.ns
&& (!prev
|| prev
->u2
.ns
!= n
->u2
.ns
))
2770 gfc_init_block (&iter_block
);
2771 tree_block
= make_node (BLOCK
);
2772 TREE_USED (tree_block
) = 1;
2773 BLOCK_VARS (tree_block
) = NULL_TREE
;
2774 iterator
= handle_iterator (n
->u2
.ns
, block
,
2778 gfc_init_block (&iter_block
);
2780 if (list
== OMP_LIST_DEPEND
2781 && n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
2783 tree vec
= NULL_TREE
;
2787 tree addend
= integer_zero_node
, t
;
2791 addend
= gfc_conv_constant_to_tree (n
->expr
);
2792 if (TREE_CODE (addend
) == INTEGER_CST
2793 && tree_int_cst_sgn (addend
) == -1)
2796 addend
= const_unop (NEGATE_EXPR
,
2797 TREE_TYPE (addend
), addend
);
2800 t
= gfc_trans_omp_variable (n
->sym
, false);
2801 if (t
!= error_mark_node
)
2803 if (i
< vec_safe_length (doacross_steps
)
2804 && !integer_zerop (addend
)
2805 && (*doacross_steps
)[i
])
2807 tree step
= (*doacross_steps
)[i
];
2808 addend
= fold_convert (TREE_TYPE (step
), addend
);
2809 addend
= build2 (TRUNC_DIV_EXPR
,
2810 TREE_TYPE (step
), addend
, step
);
2812 vec
= tree_cons (addend
, t
, vec
);
2814 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2817 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2821 if (vec
== NULL_TREE
)
2824 tree node
= build_omp_clause (input_location
,
2826 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2827 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2828 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2832 if (!n
->sym
->attr
.referenced
)
2835 tree node
= build_omp_clause (input_location
,
2836 list
== OMP_LIST_DEPEND
2838 : OMP_CLAUSE_AFFINITY
);
2839 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2841 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2842 if (gfc_omp_privatize_by_reference (decl
))
2843 decl
= build_fold_indirect_ref (decl
);
2844 if (n
->u
.depend_op
== OMP_DEPEND_DEPOBJ
2845 && POINTER_TYPE_P (TREE_TYPE (decl
)))
2846 decl
= build_fold_indirect_ref (decl
);
2847 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2849 decl
= gfc_conv_descriptor_data_get (decl
);
2850 decl
= fold_convert (build_pointer_type (char_type_node
),
2852 decl
= build_fold_indirect_ref (decl
);
2854 else if (DECL_P (decl
))
2855 TREE_ADDRESSABLE (decl
) = 1;
2856 OMP_CLAUSE_DECL (node
) = decl
;
2861 gfc_init_se (&se
, NULL
);
2862 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2864 gfc_conv_expr_reference (&se
, n
->expr
);
2869 gfc_conv_expr_descriptor (&se
, n
->expr
);
2870 ptr
= gfc_conv_array_data (se
.expr
);
2872 gfc_add_block_to_block (&iter_block
, &se
.pre
);
2873 gfc_add_block_to_block (&iter_block
, &se
.post
);
2874 ptr
= fold_convert (build_pointer_type (char_type_node
),
2876 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2878 if (list
== OMP_LIST_DEPEND
)
2879 switch (n
->u
.depend_op
)
2882 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2884 case OMP_DEPEND_OUT
:
2885 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2887 case OMP_DEPEND_INOUT
:
2888 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2890 case OMP_DEPEND_MUTEXINOUTSET
:
2891 OMP_CLAUSE_DEPEND_KIND (node
)
2892 = OMP_CLAUSE_DEPEND_MUTEXINOUTSET
;
2894 case OMP_DEPEND_DEPOBJ
:
2895 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_DEPOBJ
;
2901 gfc_add_block_to_block (block
, &iter_block
);
2902 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2906 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
2907 TREE_VEC_ELT (iterator
, 5) = tree_block
;
2908 for (tree c
= omp_clauses
; c
!= prev_clauses
;
2909 c
= OMP_CLAUSE_CHAIN (c
))
2910 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
2911 OMP_CLAUSE_DECL (c
));
2915 for (; n
!= NULL
; n
= n
->next
)
2917 if (!n
->sym
->attr
.referenced
)
2920 bool always_modifier
= false;
2921 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2922 tree node2
= NULL_TREE
;
2923 tree node3
= NULL_TREE
;
2924 tree node4
= NULL_TREE
;
2926 /* OpenMP: automatically map pointer targets with the pointer;
2927 hence, always update the descriptor/pointer itself. */
2929 && ((n
->expr
== NULL
&& n
->sym
->attr
.pointer
)
2930 || (n
->expr
&& gfc_expr_attr (n
->expr
).pointer
)))
2931 always_modifier
= true;
2933 switch (n
->u
.map_op
)
2936 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2938 case OMP_MAP_IF_PRESENT
:
2939 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_IF_PRESENT
);
2941 case OMP_MAP_ATTACH
:
2942 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ATTACH
);
2945 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2948 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2950 case OMP_MAP_TOFROM
:
2951 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2953 case OMP_MAP_ALWAYS_TO
:
2954 always_modifier
= true;
2955 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2957 case OMP_MAP_ALWAYS_FROM
:
2958 always_modifier
= true;
2959 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2961 case OMP_MAP_ALWAYS_TOFROM
:
2962 always_modifier
= true;
2963 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2965 case OMP_MAP_RELEASE
:
2966 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2968 case OMP_MAP_DELETE
:
2969 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2971 case OMP_MAP_DETACH
:
2972 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DETACH
);
2974 case OMP_MAP_FORCE_ALLOC
:
2975 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2977 case OMP_MAP_FORCE_TO
:
2978 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2980 case OMP_MAP_FORCE_FROM
:
2981 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2983 case OMP_MAP_FORCE_TOFROM
:
2984 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2986 case OMP_MAP_FORCE_PRESENT
:
2987 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2989 case OMP_MAP_FORCE_DEVICEPTR
:
2990 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2996 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2998 TREE_ADDRESSABLE (decl
) = 1;
3000 gfc_ref
*lastref
= NULL
;
3003 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3004 if (ref
->type
== REF_COMPONENT
|| ref
->type
== REF_ARRAY
)
3007 bool allocatable
= false, pointer
= false;
3009 if (lastref
&& lastref
->type
== REF_COMPONENT
)
3011 gfc_component
*c
= lastref
->u
.c
.component
;
3013 if (c
->ts
.type
== BT_CLASS
)
3015 pointer
= CLASS_DATA (c
)->attr
.class_pointer
;
3016 allocatable
= CLASS_DATA (c
)->attr
.allocatable
;
3020 pointer
= c
->attr
.pointer
;
3021 allocatable
= c
->attr
.allocatable
;
3026 || (n
->expr
->ref
->type
== REF_ARRAY
3027 && n
->expr
->ref
->u
.ar
.type
== AR_FULL
))
3029 tree present
= gfc_omp_check_optional_argument (decl
, true);
3030 if (openacc
&& n
->sym
->ts
.type
== BT_CLASS
)
3032 tree type
= TREE_TYPE (decl
);
3033 if (n
->sym
->attr
.optional
)
3034 sorry ("optional class parameter");
3035 if (POINTER_TYPE_P (type
))
3037 node4
= build_omp_clause (input_location
,
3039 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
3040 OMP_CLAUSE_DECL (node4
) = decl
;
3041 OMP_CLAUSE_SIZE (node4
) = size_int (0);
3042 decl
= build_fold_indirect_ref (decl
);
3044 tree ptr
= gfc_class_data_get (decl
);
3045 ptr
= build_fold_indirect_ref (ptr
);
3046 OMP_CLAUSE_DECL (node
) = ptr
;
3047 OMP_CLAUSE_SIZE (node
) = gfc_class_vtab_size_get (decl
);
3048 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3049 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
3050 OMP_CLAUSE_DECL (node2
) = decl
;
3051 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3052 node3
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3053 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH_DETACH
);
3054 OMP_CLAUSE_DECL (node3
) = gfc_class_data_get (decl
);
3055 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3056 goto finalize_map_clause
;
3058 else if (POINTER_TYPE_P (TREE_TYPE (decl
))
3059 && (gfc_omp_privatize_by_reference (decl
)
3060 || GFC_DECL_GET_SCALAR_POINTER (decl
)
3061 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
3062 || GFC_DECL_CRAY_POINTEE (decl
)
3063 || GFC_DESCRIPTOR_TYPE_P
3064 (TREE_TYPE (TREE_TYPE (decl
)))
3065 || n
->sym
->ts
.type
== BT_DERIVED
))
3067 tree orig_decl
= decl
;
3069 /* For nonallocatable, nonpointer arrays, a temporary
3070 variable is generated, but this one is only defined if
3071 the variable is present; hence, we now set it to NULL
3072 to avoid accessing undefined variables. We cannot use
3073 a temporary variable here as otherwise the replacement
3074 of the variables in omp-low.c will not work. */
3075 if (present
&& GFC_ARRAY_TYPE_P (TREE_TYPE (decl
)))
3077 tree tmp
= fold_build2_loc (input_location
,
3079 void_type_node
, decl
,
3081 tree cond
= fold_build1_loc (input_location
,
3085 gfc_add_expr_to_block (block
,
3086 build3_loc (input_location
,
3092 node4
= build_omp_clause (input_location
,
3094 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
3095 OMP_CLAUSE_DECL (node4
) = decl
;
3096 OMP_CLAUSE_SIZE (node4
) = size_int (0);
3097 decl
= build_fold_indirect_ref (decl
);
3098 if ((TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
3099 || gfc_omp_is_optional_argument (orig_decl
))
3100 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
3101 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
3103 node3
= build_omp_clause (input_location
,
3105 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
3106 OMP_CLAUSE_DECL (node3
) = decl
;
3107 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3108 decl
= build_fold_indirect_ref (decl
);
3111 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3113 tree type
= TREE_TYPE (decl
);
3114 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3116 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
3118 ptr
= fold_convert (build_pointer_type (char_type_node
),
3120 ptr
= build_fold_indirect_ref (ptr
);
3121 OMP_CLAUSE_DECL (node
) = ptr
;
3122 node2
= build_omp_clause (input_location
,
3124 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
3125 OMP_CLAUSE_DECL (node2
) = decl
;
3126 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3127 node3
= build_omp_clause (input_location
,
3131 ptr
= gfc_conv_descriptor_data_get (decl
);
3132 ptr
= gfc_build_addr_expr (NULL
, ptr
);
3133 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
3135 ptr
= build_fold_indirect_ref (ptr
);
3136 OMP_CLAUSE_DECL (node3
) = ptr
;
3139 OMP_CLAUSE_DECL (node3
)
3140 = gfc_conv_descriptor_data_get (decl
);
3141 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3142 if (n
->u
.map_op
== OMP_MAP_ATTACH
)
3144 /* Standalone attach clauses used with arrays with
3145 descriptors must copy the descriptor to the target,
3146 else they won't have anything to perform the
3147 attachment onto (see OpenACC 2.6, "2.6.3. Data
3148 Structures with Pointers"). */
3149 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH
);
3150 /* We don't want to map PTR at all in this case, so
3151 delete its node and shuffle the others down. */
3155 goto finalize_map_clause
;
3157 else if (n
->u
.map_op
== OMP_MAP_DETACH
)
3159 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_DETACH
);
3160 /* Similarly to above, we don't want to unmap PTR
3165 goto finalize_map_clause
;
3168 OMP_CLAUSE_SET_MAP_KIND (node3
,
3170 ? GOMP_MAP_ALWAYS_POINTER
3171 : GOMP_MAP_POINTER
);
3173 /* We have to check for n->sym->attr.dimension because
3174 of scalar coarrays. */
3175 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
3177 stmtblock_t cond_block
;
3179 = gfc_create_var (gfc_array_index_type
, NULL
);
3180 tree tem
, then_b
, else_b
, zero
, cond
;
3182 gfc_init_block (&cond_block
);
3184 = gfc_full_array_size (&cond_block
, decl
,
3185 GFC_TYPE_ARRAY_RANK (type
));
3186 gfc_add_modify (&cond_block
, size
, tem
);
3187 then_b
= gfc_finish_block (&cond_block
);
3188 gfc_init_block (&cond_block
);
3189 zero
= build_int_cst (gfc_array_index_type
, 0);
3190 gfc_add_modify (&cond_block
, size
, zero
);
3191 else_b
= gfc_finish_block (&cond_block
);
3192 tem
= gfc_conv_descriptor_data_get (decl
);
3193 tem
= fold_convert (pvoid_type_node
, tem
);
3194 cond
= fold_build2_loc (input_location
, NE_EXPR
,
3196 tem
, null_pointer_node
);
3198 cond
= fold_build2_loc (input_location
,
3202 gfc_add_expr_to_block (block
,
3203 build3_loc (input_location
,
3208 OMP_CLAUSE_SIZE (node
) = size
;
3210 else if (n
->sym
->attr
.dimension
)
3212 stmtblock_t cond_block
;
3213 gfc_init_block (&cond_block
);
3214 tree size
= gfc_full_array_size (&cond_block
, decl
,
3215 GFC_TYPE_ARRAY_RANK (type
));
3218 tree var
= gfc_create_var (gfc_array_index_type
,
3220 gfc_add_modify (&cond_block
, var
, size
);
3221 tree cond_body
= gfc_finish_block (&cond_block
);
3222 tree cond
= build3_loc (input_location
, COND_EXPR
,
3223 void_type_node
, present
,
3224 cond_body
, NULL_TREE
);
3225 gfc_add_expr_to_block (block
, cond
);
3226 OMP_CLAUSE_SIZE (node
) = var
;
3230 gfc_add_block_to_block (block
, &cond_block
);
3231 OMP_CLAUSE_SIZE (node
) = size
;
3234 if (n
->sym
->attr
.dimension
)
3237 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3238 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3239 OMP_CLAUSE_SIZE (node
)
3240 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3241 OMP_CLAUSE_SIZE (node
), elemsz
);
3245 && TREE_CODE (decl
) == INDIRECT_REF
3246 && (TREE_CODE (TREE_OPERAND (decl
, 0))
3249 /* A single indirectref is handled by the middle end. */
3250 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
3251 decl
= TREE_OPERAND (decl
, 0);
3252 decl
= gfc_build_cond_assign_expr (block
, present
, decl
,
3254 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (decl
);
3257 OMP_CLAUSE_DECL (node
) = decl
;
3260 && n
->expr
->expr_type
== EXPR_VARIABLE
3261 && n
->expr
->ref
->type
== REF_ARRAY
3262 && !n
->expr
->ref
->next
)
3264 /* An array element or array section which is not part of a
3265 derived type, etc. */
3266 bool element
= n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
;
3267 gfc_trans_omp_array_section (block
, n
, decl
, element
,
3268 GOMP_MAP_POINTER
, node
, node2
,
3272 && n
->expr
->expr_type
== EXPR_VARIABLE
3273 && (n
->expr
->ref
->type
== REF_COMPONENT
3274 || n
->expr
->ref
->type
== REF_ARRAY
)
3276 && lastref
->type
== REF_COMPONENT
3277 && lastref
->u
.c
.component
->ts
.type
!= BT_CLASS
3278 && lastref
->u
.c
.component
->ts
.type
!= BT_DERIVED
3279 && !lastref
->u
.c
.component
->attr
.dimension
)
3281 /* Derived type access with last component being a scalar. */
3282 gfc_init_se (&se
, NULL
);
3284 gfc_conv_expr (&se
, n
->expr
);
3285 gfc_add_block_to_block (block
, &se
.pre
);
3286 /* For BT_CHARACTER a pointer is returned. */
3287 OMP_CLAUSE_DECL (node
)
3288 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3289 ? build_fold_indirect_ref (se
.expr
) : se
.expr
;
3290 gfc_add_block_to_block (block
, &se
.post
);
3291 if (pointer
|| allocatable
)
3293 node2
= build_omp_clause (input_location
,
3296 = (openacc
? GOMP_MAP_ATTACH_DETACH
3297 : GOMP_MAP_ALWAYS_POINTER
);
3298 OMP_CLAUSE_SET_MAP_KIND (node2
, kind
);
3299 OMP_CLAUSE_DECL (node2
)
3300 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3302 : gfc_build_addr_expr (NULL
, se
.expr
);
3303 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3305 && n
->expr
->ts
.type
== BT_CHARACTER
3306 && n
->expr
->ts
.deferred
)
3308 gcc_assert (se
.string_length
);
3310 = gfc_get_char_type (n
->expr
->ts
.kind
);
3311 OMP_CLAUSE_SIZE (node
)
3312 = fold_build2 (MULT_EXPR
, size_type_node
,
3313 fold_convert (size_type_node
,
3315 TYPE_SIZE_UNIT (tmp
));
3316 node3
= build_omp_clause (input_location
,
3318 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_TO
);
3319 OMP_CLAUSE_DECL (node3
) = se
.string_length
;
3320 OMP_CLAUSE_SIZE (node3
)
3321 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3326 && n
->expr
->expr_type
== EXPR_VARIABLE
3327 && (n
->expr
->ref
->type
== REF_COMPONENT
3328 || n
->expr
->ref
->type
== REF_ARRAY
))
3330 gfc_init_se (&se
, NULL
);
3331 se
.expr
= gfc_maybe_dereference_var (n
->sym
, decl
);
3333 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3335 if (ref
->type
== REF_COMPONENT
)
3337 if (ref
->u
.c
.sym
->attr
.extension
)
3338 conv_parent_component_references (&se
, ref
);
3340 gfc_conv_component_ref (&se
, ref
);
3342 else if (ref
->type
== REF_ARRAY
)
3344 if (ref
->u
.ar
.type
== AR_ELEMENT
&& ref
->next
)
3345 gfc_conv_array_ref (&se
, &ref
->u
.ar
, n
->expr
,
3348 gcc_assert (!ref
->next
);
3351 sorry ("unhandled expression type");
3354 tree inner
= se
.expr
;
3356 /* Last component is a derived type or class pointer. */
3357 if (lastref
->type
== REF_COMPONENT
3358 && (lastref
->u
.c
.component
->ts
.type
== BT_DERIVED
3359 || lastref
->u
.c
.component
->ts
.type
== BT_CLASS
))
3361 if (pointer
|| (openacc
&& allocatable
))
3365 if (lastref
->u
.c
.component
->ts
.type
== BT_CLASS
)
3367 data
= gfc_class_data_get (inner
);
3368 gcc_assert (POINTER_TYPE_P (TREE_TYPE (data
)));
3369 data
= build_fold_indirect_ref (data
);
3370 size
= gfc_class_vtab_size_get (inner
);
3372 else /* BT_DERIVED. */
3375 size
= TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3378 OMP_CLAUSE_DECL (node
) = data
;
3379 OMP_CLAUSE_SIZE (node
) = size
;
3380 node2
= build_omp_clause (input_location
,
3382 OMP_CLAUSE_SET_MAP_KIND (node2
,
3384 ? GOMP_MAP_ATTACH_DETACH
3385 : GOMP_MAP_ALWAYS_POINTER
);
3386 OMP_CLAUSE_DECL (node2
) = build_fold_addr_expr (data
);
3387 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3391 OMP_CLAUSE_DECL (node
) = inner
;
3392 OMP_CLAUSE_SIZE (node
)
3393 = TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3396 else if (lastref
->type
== REF_ARRAY
3397 && lastref
->u
.ar
.type
== AR_FULL
)
3399 /* Just pass the (auto-dereferenced) decl through for
3400 bare attach and detach clauses. */
3401 if (n
->u
.map_op
== OMP_MAP_ATTACH
3402 || n
->u
.map_op
== OMP_MAP_DETACH
)
3404 OMP_CLAUSE_DECL (node
) = inner
;
3405 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3406 goto finalize_map_clause
;
3409 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
3411 gomp_map_kind map_kind
;
3413 tree type
= TREE_TYPE (inner
);
3414 tree ptr
= gfc_conv_descriptor_data_get (inner
);
3415 ptr
= build_fold_indirect_ref (ptr
);
3416 OMP_CLAUSE_DECL (node
) = ptr
;
3417 int rank
= GFC_TYPE_ARRAY_RANK (type
);
3418 OMP_CLAUSE_SIZE (node
)
3419 = gfc_full_array_size (block
, inner
, rank
);
3421 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3422 if (GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (node
)))
3423 map_kind
= GOMP_MAP_TO
;
3424 else if (n
->u
.map_op
== OMP_MAP_RELEASE
3425 || n
->u
.map_op
== OMP_MAP_DELETE
)
3426 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
3428 map_kind
= GOMP_MAP_ALLOC
;
3430 && n
->expr
->ts
.type
== BT_CHARACTER
3431 && n
->expr
->ts
.deferred
)
3433 gcc_assert (se
.string_length
);
3434 tree len
= fold_convert (size_type_node
,
3436 elemsz
= gfc_get_char_type (n
->expr
->ts
.kind
);
3437 elemsz
= TYPE_SIZE_UNIT (elemsz
);
3438 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
3440 node4
= build_omp_clause (input_location
,
3442 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
3443 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
3444 OMP_CLAUSE_SIZE (node4
)
3445 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3447 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3448 OMP_CLAUSE_SIZE (node
)
3449 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3450 OMP_CLAUSE_SIZE (node
), elemsz
);
3451 desc_node
= build_omp_clause (input_location
,
3454 OMP_CLAUSE_SET_MAP_KIND (desc_node
,
3457 OMP_CLAUSE_SET_MAP_KIND (desc_node
, map_kind
);
3458 OMP_CLAUSE_DECL (desc_node
) = inner
;
3459 OMP_CLAUSE_SIZE (desc_node
) = TYPE_SIZE_UNIT (type
);
3465 node
= desc_node
; /* Put first. */
3467 node3
= build_omp_clause (input_location
,
3469 OMP_CLAUSE_SET_MAP_KIND (node3
,
3471 ? GOMP_MAP_ATTACH_DETACH
3472 : GOMP_MAP_ALWAYS_POINTER
);
3473 OMP_CLAUSE_DECL (node3
)
3474 = gfc_conv_descriptor_data_get (inner
);
3475 /* Similar to gfc_trans_omp_array_section (details
3476 there), we add/keep the cast for OpenMP to prevent
3477 that an 'alloc:' gets added for node3 ('desc.data')
3478 as that is part of the whole descriptor (node3).
3479 TODO: Remove once the ME handles this properly. */
3481 OMP_CLAUSE_DECL (node3
)
3482 = fold_convert (TREE_TYPE (TREE_OPERAND(ptr
, 0)),
3483 OMP_CLAUSE_DECL (node3
));
3485 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
3486 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3489 OMP_CLAUSE_DECL (node
) = inner
;
3491 else if (lastref
->type
== REF_ARRAY
)
3493 /* An array element or section. */
3494 bool element
= lastref
->u
.ar
.type
== AR_ELEMENT
;
3495 gomp_map_kind kind
= (openacc
? GOMP_MAP_ATTACH_DETACH
3496 : GOMP_MAP_ALWAYS_POINTER
);
3497 gfc_trans_omp_array_section (block
, n
, inner
, element
,
3498 kind
, node
, node2
, node3
,
3505 sorry ("unhandled expression");
3507 finalize_map_clause
:
3509 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3511 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
3513 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
3515 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
3520 case OMP_LIST_CACHE
:
3521 for (; n
!= NULL
; n
= n
->next
)
3523 if (!n
->sym
->attr
.referenced
)
3529 clause_code
= OMP_CLAUSE_TO
;
3532 clause_code
= OMP_CLAUSE_FROM
;
3534 case OMP_LIST_CACHE
:
3535 clause_code
= OMP_CLAUSE__CACHE_
;
3540 tree node
= build_omp_clause (input_location
, clause_code
);
3541 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
3543 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
3544 if (gfc_omp_privatize_by_reference (decl
))
3546 if (gfc_omp_is_allocatable_or_ptr (decl
))
3547 decl
= build_fold_indirect_ref (decl
);
3548 decl
= build_fold_indirect_ref (decl
);
3550 else if (DECL_P (decl
))
3551 TREE_ADDRESSABLE (decl
) = 1;
3552 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3554 tree type
= TREE_TYPE (decl
);
3555 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3556 ptr
= fold_convert (build_pointer_type (char_type_node
),
3558 ptr
= build_fold_indirect_ref (ptr
);
3559 OMP_CLAUSE_DECL (node
) = ptr
;
3560 OMP_CLAUSE_SIZE (node
)
3561 = gfc_full_array_size (block
, decl
,
3562 GFC_TYPE_ARRAY_RANK (type
));
3564 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3565 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3566 OMP_CLAUSE_SIZE (node
)
3567 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3568 OMP_CLAUSE_SIZE (node
), elemsz
);
3572 OMP_CLAUSE_DECL (node
) = decl
;
3573 if (gfc_omp_is_allocatable_or_ptr (decl
))
3574 OMP_CLAUSE_SIZE (node
)
3575 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
3581 gfc_init_se (&se
, NULL
);
3582 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
3584 gfc_conv_expr_reference (&se
, n
->expr
);
3586 gfc_add_block_to_block (block
, &se
.pre
);
3587 OMP_CLAUSE_SIZE (node
)
3588 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
3592 gfc_conv_expr_descriptor (&se
, n
->expr
);
3593 ptr
= gfc_conv_array_data (se
.expr
);
3594 tree type
= TREE_TYPE (se
.expr
);
3595 gfc_add_block_to_block (block
, &se
.pre
);
3596 OMP_CLAUSE_SIZE (node
)
3597 = gfc_full_array_size (block
, se
.expr
,
3598 GFC_TYPE_ARRAY_RANK (type
));
3600 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3601 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3602 OMP_CLAUSE_SIZE (node
)
3603 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3604 OMP_CLAUSE_SIZE (node
), elemsz
);
3606 gfc_add_block_to_block (block
, &se
.post
);
3607 ptr
= fold_convert (build_pointer_type (char_type_node
),
3609 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
3611 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3619 if (clauses
->if_expr
)
3623 gfc_init_se (&se
, NULL
);
3624 gfc_conv_expr (&se
, clauses
->if_expr
);
3625 gfc_add_block_to_block (block
, &se
.pre
);
3626 if_var
= gfc_evaluate_now (se
.expr
, block
);
3627 gfc_add_block_to_block (block
, &se
.post
);
3629 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3630 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
3631 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
3632 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3634 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
3635 if (clauses
->if_exprs
[ifc
])
3639 gfc_init_se (&se
, NULL
);
3640 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
3641 gfc_add_block_to_block (block
, &se
.pre
);
3642 if_var
= gfc_evaluate_now (se
.expr
, block
);
3643 gfc_add_block_to_block (block
, &se
.post
);
3645 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3649 OMP_CLAUSE_IF_MODIFIER (c
) = VOID_CST
;
3651 case OMP_IF_PARALLEL
:
3652 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
3655 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_SIMD
;
3658 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
3660 case OMP_IF_TASKLOOP
:
3661 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
3664 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
3666 case OMP_IF_TARGET_DATA
:
3667 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
3669 case OMP_IF_TARGET_UPDATE
:
3670 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
3672 case OMP_IF_TARGET_ENTER_DATA
:
3673 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
3675 case OMP_IF_TARGET_EXIT_DATA
:
3676 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
3681 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
3682 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3685 if (clauses
->final_expr
)
3689 gfc_init_se (&se
, NULL
);
3690 gfc_conv_expr (&se
, clauses
->final_expr
);
3691 gfc_add_block_to_block (block
, &se
.pre
);
3692 final_var
= gfc_evaluate_now (se
.expr
, block
);
3693 gfc_add_block_to_block (block
, &se
.post
);
3695 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINAL
);
3696 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
3697 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3700 if (clauses
->num_threads
)
3704 gfc_init_se (&se
, NULL
);
3705 gfc_conv_expr (&se
, clauses
->num_threads
);
3706 gfc_add_block_to_block (block
, &se
.pre
);
3707 num_threads
= gfc_evaluate_now (se
.expr
, block
);
3708 gfc_add_block_to_block (block
, &se
.post
);
3710 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_THREADS
);
3711 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
3712 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3715 chunk_size
= NULL_TREE
;
3716 if (clauses
->chunk_size
)
3718 gfc_init_se (&se
, NULL
);
3719 gfc_conv_expr (&se
, clauses
->chunk_size
);
3720 gfc_add_block_to_block (block
, &se
.pre
);
3721 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
3722 gfc_add_block_to_block (block
, &se
.post
);
3725 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
3727 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SCHEDULE
);
3728 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
3729 switch (clauses
->sched_kind
)
3731 case OMP_SCHED_STATIC
:
3732 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
3734 case OMP_SCHED_DYNAMIC
:
3735 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
3737 case OMP_SCHED_GUIDED
:
3738 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
3740 case OMP_SCHED_RUNTIME
:
3741 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
3743 case OMP_SCHED_AUTO
:
3744 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
3749 if (clauses
->sched_monotonic
)
3750 OMP_CLAUSE_SCHEDULE_KIND (c
)
3751 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
3752 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
3753 else if (clauses
->sched_nonmonotonic
)
3754 OMP_CLAUSE_SCHEDULE_KIND (c
)
3755 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
3756 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
3757 if (clauses
->sched_simd
)
3758 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
3759 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3762 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
3764 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULT
);
3765 switch (clauses
->default_sharing
)
3767 case OMP_DEFAULT_NONE
:
3768 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
3770 case OMP_DEFAULT_SHARED
:
3771 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
3773 case OMP_DEFAULT_PRIVATE
:
3774 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
3776 case OMP_DEFAULT_FIRSTPRIVATE
:
3777 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
3779 case OMP_DEFAULT_PRESENT
:
3780 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
3785 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3788 if (clauses
->nowait
)
3790 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOWAIT
);
3791 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3794 if (clauses
->ordered
)
3796 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDERED
);
3797 OMP_CLAUSE_ORDERED_EXPR (c
)
3798 = clauses
->orderedc
? build_int_cst (integer_type_node
,
3799 clauses
->orderedc
) : NULL_TREE
;
3800 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3803 if (clauses
->order_concurrent
)
3805 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDER
);
3806 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3809 if (clauses
->untied
)
3811 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_UNTIED
);
3812 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3815 if (clauses
->mergeable
)
3817 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_MERGEABLE
);
3818 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3821 if (clauses
->collapse
)
3823 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_COLLAPSE
);
3824 OMP_CLAUSE_COLLAPSE_EXPR (c
)
3825 = build_int_cst (integer_type_node
, clauses
->collapse
);
3826 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3829 if (clauses
->inbranch
)
3831 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INBRANCH
);
3832 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3835 if (clauses
->notinbranch
)
3837 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOTINBRANCH
);
3838 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3841 switch (clauses
->cancel
)
3843 case OMP_CANCEL_UNKNOWN
:
3845 case OMP_CANCEL_PARALLEL
:
3846 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PARALLEL
);
3847 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3849 case OMP_CANCEL_SECTIONS
:
3850 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SECTIONS
);
3851 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3854 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FOR
);
3855 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3857 case OMP_CANCEL_TASKGROUP
:
3858 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TASKGROUP
);
3859 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3863 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
3865 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PROC_BIND
);
3866 switch (clauses
->proc_bind
)
3868 case OMP_PROC_BIND_PRIMARY
:
3869 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_PRIMARY
;
3871 case OMP_PROC_BIND_MASTER
:
3872 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
3874 case OMP_PROC_BIND_SPREAD
:
3875 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
3877 case OMP_PROC_BIND_CLOSE
:
3878 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
3883 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3886 if (clauses
->safelen_expr
)
3890 gfc_init_se (&se
, NULL
);
3891 gfc_conv_expr (&se
, clauses
->safelen_expr
);
3892 gfc_add_block_to_block (block
, &se
.pre
);
3893 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
3894 gfc_add_block_to_block (block
, &se
.post
);
3896 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SAFELEN
);
3897 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
3898 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3901 if (clauses
->simdlen_expr
)
3905 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
3906 OMP_CLAUSE_SIMDLEN_EXPR (c
)
3907 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
3908 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3914 gfc_init_se (&se
, NULL
);
3915 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
3916 gfc_add_block_to_block (block
, &se
.pre
);
3917 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
3918 gfc_add_block_to_block (block
, &se
.post
);
3920 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
3921 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
3922 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3926 if (clauses
->num_teams
)
3930 gfc_init_se (&se
, NULL
);
3931 gfc_conv_expr (&se
, clauses
->num_teams
);
3932 gfc_add_block_to_block (block
, &se
.pre
);
3933 num_teams
= gfc_evaluate_now (se
.expr
, block
);
3934 gfc_add_block_to_block (block
, &se
.post
);
3936 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TEAMS
);
3937 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
3938 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3941 if (clauses
->device
)
3945 gfc_init_se (&se
, NULL
);
3946 gfc_conv_expr (&se
, clauses
->device
);
3947 gfc_add_block_to_block (block
, &se
.pre
);
3948 device
= gfc_evaluate_now (se
.expr
, block
);
3949 gfc_add_block_to_block (block
, &se
.post
);
3951 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEVICE
);
3952 OMP_CLAUSE_DEVICE_ID (c
) = device
;
3953 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3956 if (clauses
->thread_limit
)
3960 gfc_init_se (&se
, NULL
);
3961 gfc_conv_expr (&se
, clauses
->thread_limit
);
3962 gfc_add_block_to_block (block
, &se
.pre
);
3963 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
3964 gfc_add_block_to_block (block
, &se
.post
);
3966 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREAD_LIMIT
);
3967 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
3968 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3971 chunk_size
= NULL_TREE
;
3972 if (clauses
->dist_chunk_size
)
3974 gfc_init_se (&se
, NULL
);
3975 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
3976 gfc_add_block_to_block (block
, &se
.pre
);
3977 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
3978 gfc_add_block_to_block (block
, &se
.post
);
3981 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
3983 c
= build_omp_clause (gfc_get_location (&where
),
3984 OMP_CLAUSE_DIST_SCHEDULE
);
3985 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
3986 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3989 if (clauses
->grainsize
)
3993 gfc_init_se (&se
, NULL
);
3994 gfc_conv_expr (&se
, clauses
->grainsize
);
3995 gfc_add_block_to_block (block
, &se
.pre
);
3996 grainsize
= gfc_evaluate_now (se
.expr
, block
);
3997 gfc_add_block_to_block (block
, &se
.post
);
3999 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GRAINSIZE
);
4000 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
4001 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4004 if (clauses
->num_tasks
)
4008 gfc_init_se (&se
, NULL
);
4009 gfc_conv_expr (&se
, clauses
->num_tasks
);
4010 gfc_add_block_to_block (block
, &se
.pre
);
4011 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
4012 gfc_add_block_to_block (block
, &se
.post
);
4014 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TASKS
);
4015 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
4016 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4019 if (clauses
->priority
)
4023 gfc_init_se (&se
, NULL
);
4024 gfc_conv_expr (&se
, clauses
->priority
);
4025 gfc_add_block_to_block (block
, &se
.pre
);
4026 priority
= gfc_evaluate_now (se
.expr
, block
);
4027 gfc_add_block_to_block (block
, &se
.post
);
4029 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PRIORITY
);
4030 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
4031 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4034 if (clauses
->detach
)
4038 gfc_init_se (&se
, NULL
);
4039 gfc_conv_expr (&se
, clauses
->detach
);
4040 gfc_add_block_to_block (block
, &se
.pre
);
4042 gfc_add_block_to_block (block
, &se
.post
);
4044 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DETACH
);
4045 TREE_ADDRESSABLE (detach
) = 1;
4046 OMP_CLAUSE_DECL (c
) = detach
;
4047 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4050 if (clauses
->filter
)
4054 gfc_init_se (&se
, NULL
);
4055 gfc_conv_expr (&se
, clauses
->filter
);
4056 gfc_add_block_to_block (block
, &se
.pre
);
4057 filter
= gfc_evaluate_now (se
.expr
, block
);
4058 gfc_add_block_to_block (block
, &se
.post
);
4060 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FILTER
);
4061 OMP_CLAUSE_FILTER_EXPR (c
) = filter
;
4062 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4069 gfc_init_se (&se
, NULL
);
4070 gfc_conv_expr (&se
, clauses
->hint
);
4071 gfc_add_block_to_block (block
, &se
.pre
);
4072 hint
= gfc_evaluate_now (se
.expr
, block
);
4073 gfc_add_block_to_block (block
, &se
.post
);
4075 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_HINT
);
4076 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
4077 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4082 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMD
);
4083 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4085 if (clauses
->threads
)
4087 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREADS
);
4088 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4090 if (clauses
->nogroup
)
4092 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOGROUP
);
4093 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4096 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
4098 if (clauses
->defaultmap
[i
] == OMP_DEFAULTMAP_UNSET
)
4100 enum omp_clause_defaultmap_kind behavior
, category
;
4101 switch ((gfc_omp_defaultmap_category
) i
)
4103 case OMP_DEFAULTMAP_CAT_UNCATEGORIZED
:
4104 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
4106 case OMP_DEFAULTMAP_CAT_SCALAR
:
4107 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
;
4109 case OMP_DEFAULTMAP_CAT_AGGREGATE
:
4110 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
;
4112 case OMP_DEFAULTMAP_CAT_ALLOCATABLE
:
4113 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
;
4115 case OMP_DEFAULTMAP_CAT_POINTER
:
4116 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
;
4118 default: gcc_unreachable ();
4120 switch (clauses
->defaultmap
[i
])
4122 case OMP_DEFAULTMAP_ALLOC
:
4123 behavior
= OMP_CLAUSE_DEFAULTMAP_ALLOC
;
4125 case OMP_DEFAULTMAP_TO
: behavior
= OMP_CLAUSE_DEFAULTMAP_TO
; break;
4126 case OMP_DEFAULTMAP_FROM
: behavior
= OMP_CLAUSE_DEFAULTMAP_FROM
; break;
4127 case OMP_DEFAULTMAP_TOFROM
:
4128 behavior
= OMP_CLAUSE_DEFAULTMAP_TOFROM
;
4130 case OMP_DEFAULTMAP_FIRSTPRIVATE
:
4131 behavior
= OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
4133 case OMP_DEFAULTMAP_NONE
: behavior
= OMP_CLAUSE_DEFAULTMAP_NONE
; break;
4134 case OMP_DEFAULTMAP_DEFAULT
:
4135 behavior
= OMP_CLAUSE_DEFAULTMAP_DEFAULT
;
4137 default: gcc_unreachable ();
4139 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULTMAP
);
4140 OMP_CLAUSE_DEFAULTMAP_SET_KIND (c
, behavior
, category
);
4141 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4144 if (clauses
->depend_source
)
4146 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEPEND
);
4147 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
4148 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4153 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ASYNC
);
4154 if (clauses
->async_expr
)
4155 OMP_CLAUSE_ASYNC_EXPR (c
)
4156 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
4158 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
4159 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4163 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SEQ
);
4164 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4166 if (clauses
->par_auto
)
4168 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_AUTO
);
4169 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4171 if (clauses
->if_present
)
4173 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF_PRESENT
);
4174 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4176 if (clauses
->finalize
)
4178 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINALIZE
);
4179 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4181 if (clauses
->independent
)
4183 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INDEPENDENT
);
4184 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4186 if (clauses
->wait_list
)
4190 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4192 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WAIT
);
4193 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
4194 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
4198 if (clauses
->num_gangs_expr
)
4201 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
4202 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_GANGS
);
4203 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
4204 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4206 if (clauses
->num_workers_expr
)
4208 tree num_workers_var
4209 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
4210 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_WORKERS
);
4211 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
4212 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4214 if (clauses
->vector_length_expr
)
4216 tree vector_length_var
4217 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
4218 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR_LENGTH
);
4219 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
4220 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4222 if (clauses
->tile_list
)
4224 vec
<tree
, va_gc
> *tvec
;
4227 vec_alloc (tvec
, 4);
4229 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
4230 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
4232 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TILE
);
4233 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
4234 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4237 if (clauses
->vector
)
4239 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR
);
4240 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4242 if (clauses
->vector_expr
)
4245 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
4246 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
4248 /* TODO: We're not capturing location information for individual
4249 clauses. However, if we have an expression attached to the
4250 clause, that one provides better location information. */
4251 OMP_CLAUSE_LOCATION (c
)
4252 = gfc_get_location (&clauses
->vector_expr
->where
);
4255 if (clauses
->worker
)
4257 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WORKER
);
4258 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4260 if (clauses
->worker_expr
)
4263 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
4264 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
4266 /* TODO: We're not capturing location information for individual
4267 clauses. However, if we have an expression attached to the
4268 clause, that one provides better location information. */
4269 OMP_CLAUSE_LOCATION (c
)
4270 = gfc_get_location (&clauses
->worker_expr
->where
);
4276 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GANG
);
4277 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4279 if (clauses
->gang_num_expr
)
4281 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
4282 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
4284 /* TODO: We're not capturing location information for individual
4285 clauses. However, if we have an expression attached to the
4286 clause, that one provides better location information. */
4287 OMP_CLAUSE_LOCATION (c
)
4288 = gfc_get_location (&clauses
->gang_num_expr
->where
);
4291 if (clauses
->gang_static
)
4293 arg
= clauses
->gang_static_expr
4294 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
4295 : integer_minus_one_node
;
4296 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
4299 if (clauses
->bind
!= OMP_BIND_UNSET
)
4301 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_BIND
);
4302 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4303 switch (clauses
->bind
)
4305 case OMP_BIND_TEAMS
:
4306 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_TEAMS
;
4308 case OMP_BIND_PARALLEL
:
4309 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_PARALLEL
;
4311 case OMP_BIND_THREAD
:
4312 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_THREAD
;
4318 /* OpenACC 'nohost' clauses cannot appear here. */
4319 gcc_checking_assert (!clauses
->nohost
);
4321 return nreverse (omp_clauses
);
4324 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
4327 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
4332 stmt
= gfc_trans_code (code
);
4333 if (TREE_CODE (stmt
) != BIND_EXPR
)
4335 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
4337 tree block
= poplevel (1, 0);
4338 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
4348 /* Translate OpenACC 'parallel', 'kernels', 'serial', 'data', 'host_data'
4352 gfc_trans_oacc_construct (gfc_code
*code
)
4355 tree stmt
, oacc_clauses
;
4356 enum tree_code construct_code
;
4360 case EXEC_OACC_PARALLEL
:
4361 construct_code
= OACC_PARALLEL
;
4363 case EXEC_OACC_KERNELS
:
4364 construct_code
= OACC_KERNELS
;
4366 case EXEC_OACC_SERIAL
:
4367 construct_code
= OACC_SERIAL
;
4369 case EXEC_OACC_DATA
:
4370 construct_code
= OACC_DATA
;
4372 case EXEC_OACC_HOST_DATA
:
4373 construct_code
= OACC_HOST_DATA
;
4379 gfc_start_block (&block
);
4380 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4381 code
->loc
, false, true);
4382 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4383 stmt
= build2_loc (gfc_get_location (&code
->loc
), construct_code
,
4384 void_type_node
, stmt
, oacc_clauses
);
4385 gfc_add_expr_to_block (&block
, stmt
);
4386 return gfc_finish_block (&block
);
4389 /* update, enter_data, exit_data, cache. */
4391 gfc_trans_oacc_executable_directive (gfc_code
*code
)
4394 tree stmt
, oacc_clauses
;
4395 enum tree_code construct_code
;
4399 case EXEC_OACC_UPDATE
:
4400 construct_code
= OACC_UPDATE
;
4402 case EXEC_OACC_ENTER_DATA
:
4403 construct_code
= OACC_ENTER_DATA
;
4405 case EXEC_OACC_EXIT_DATA
:
4406 construct_code
= OACC_EXIT_DATA
;
4408 case EXEC_OACC_CACHE
:
4409 construct_code
= OACC_CACHE
;
4415 gfc_start_block (&block
);
4416 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4417 code
->loc
, false, true);
4418 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
4420 gfc_add_expr_to_block (&block
, stmt
);
4421 return gfc_finish_block (&block
);
4425 gfc_trans_oacc_wait_directive (gfc_code
*code
)
4429 vec
<tree
, va_gc
> *args
;
4432 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4433 location_t loc
= input_location
;
4435 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4438 vec_alloc (args
, nparms
+ 2);
4439 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
4441 gfc_start_block (&block
);
4443 if (clauses
->async_expr
)
4444 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
4446 t
= build_int_cst (integer_type_node
, -2);
4448 args
->quick_push (t
);
4449 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
4451 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4452 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
4454 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
4455 gfc_add_expr_to_block (&block
, stmt
);
4459 return gfc_finish_block (&block
);
4462 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
4463 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
4466 gfc_trans_omp_atomic (gfc_code
*code
)
4468 gfc_code
*atomic_code
= code
->block
;
4472 gfc_expr
*expr2
, *e
;
4475 tree lhsaddr
, type
, rhs
, x
;
4476 enum tree_code op
= ERROR_MARK
;
4477 enum tree_code aop
= OMP_ATOMIC
;
4478 bool var_on_left
= false;
4479 enum omp_memory_order mo
;
4480 switch (atomic_code
->ext
.omp_clauses
->memorder
)
4482 case OMP_MEMORDER_UNSET
: mo
= OMP_MEMORY_ORDER_UNSPECIFIED
; break;
4483 case OMP_MEMORDER_ACQ_REL
: mo
= OMP_MEMORY_ORDER_ACQ_REL
; break;
4484 case OMP_MEMORDER_ACQUIRE
: mo
= OMP_MEMORY_ORDER_ACQUIRE
; break;
4485 case OMP_MEMORDER_RELAXED
: mo
= OMP_MEMORY_ORDER_RELAXED
; break;
4486 case OMP_MEMORDER_RELEASE
: mo
= OMP_MEMORY_ORDER_RELEASE
; break;
4487 case OMP_MEMORDER_SEQ_CST
: mo
= OMP_MEMORY_ORDER_SEQ_CST
; break;
4488 default: gcc_unreachable ();
4491 code
= code
->block
->next
;
4492 gcc_assert (code
->op
== EXEC_ASSIGN
);
4493 var
= code
->expr1
->symtree
->n
.sym
;
4495 gfc_init_se (&lse
, NULL
);
4496 gfc_init_se (&rse
, NULL
);
4497 gfc_init_se (&vse
, NULL
);
4498 gfc_start_block (&block
);
4500 expr2
= code
->expr2
;
4501 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4502 != GFC_OMP_ATOMIC_WRITE
)
4503 && expr2
->expr_type
== EXPR_FUNCTION
4504 && expr2
->value
.function
.isym
4505 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4506 expr2
= expr2
->value
.function
.actual
->expr
;
4508 if ((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4509 == GFC_OMP_ATOMIC_READ
)
4511 gfc_conv_expr (&vse
, code
->expr1
);
4512 gfc_add_block_to_block (&block
, &vse
.pre
);
4514 gfc_conv_expr (&lse
, expr2
);
4515 gfc_add_block_to_block (&block
, &lse
.pre
);
4516 type
= TREE_TYPE (lse
.expr
);
4517 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
4519 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
4520 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4521 x
= convert (TREE_TYPE (vse
.expr
), x
);
4522 gfc_add_modify (&block
, vse
.expr
, x
);
4524 gfc_add_block_to_block (&block
, &lse
.pre
);
4525 gfc_add_block_to_block (&block
, &rse
.pre
);
4527 return gfc_finish_block (&block
);
4529 if (atomic_code
->ext
.omp_clauses
->capture
)
4531 aop
= OMP_ATOMIC_CAPTURE_NEW
;
4532 if (expr2
->expr_type
== EXPR_VARIABLE
)
4534 aop
= OMP_ATOMIC_CAPTURE_OLD
;
4535 gfc_conv_expr (&vse
, code
->expr1
);
4536 gfc_add_block_to_block (&block
, &vse
.pre
);
4538 gfc_conv_expr (&lse
, expr2
);
4539 gfc_add_block_to_block (&block
, &lse
.pre
);
4540 gfc_init_se (&lse
, NULL
);
4542 var
= code
->expr1
->symtree
->n
.sym
;
4543 expr2
= code
->expr2
;
4544 if (expr2
->expr_type
== EXPR_FUNCTION
4545 && expr2
->value
.function
.isym
4546 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4547 expr2
= expr2
->value
.function
.actual
->expr
;
4551 gfc_conv_expr (&lse
, code
->expr1
);
4552 gfc_add_block_to_block (&block
, &lse
.pre
);
4553 type
= TREE_TYPE (lse
.expr
);
4554 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
4556 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4557 == GFC_OMP_ATOMIC_WRITE
)
4558 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
))
4560 gfc_conv_expr (&rse
, expr2
);
4561 gfc_add_block_to_block (&block
, &rse
.pre
);
4563 else if (expr2
->expr_type
== EXPR_OP
)
4566 switch (expr2
->value
.op
.op
)
4568 case INTRINSIC_PLUS
:
4571 case INTRINSIC_TIMES
:
4574 case INTRINSIC_MINUS
:
4577 case INTRINSIC_DIVIDE
:
4578 if (expr2
->ts
.type
== BT_INTEGER
)
4579 op
= TRUNC_DIV_EXPR
;
4584 op
= TRUTH_ANDIF_EXPR
;
4587 op
= TRUTH_ORIF_EXPR
;
4592 case INTRINSIC_NEQV
:
4598 e
= expr2
->value
.op
.op1
;
4599 if (e
->expr_type
== EXPR_FUNCTION
4600 && e
->value
.function
.isym
4601 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4602 e
= e
->value
.function
.actual
->expr
;
4603 if (e
->expr_type
== EXPR_VARIABLE
4604 && e
->symtree
!= NULL
4605 && e
->symtree
->n
.sym
== var
)
4607 expr2
= expr2
->value
.op
.op2
;
4612 e
= expr2
->value
.op
.op2
;
4613 if (e
->expr_type
== EXPR_FUNCTION
4614 && e
->value
.function
.isym
4615 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4616 e
= e
->value
.function
.actual
->expr
;
4617 gcc_assert (e
->expr_type
== EXPR_VARIABLE
4618 && e
->symtree
!= NULL
4619 && e
->symtree
->n
.sym
== var
);
4620 expr2
= expr2
->value
.op
.op1
;
4621 var_on_left
= false;
4623 gfc_conv_expr (&rse
, expr2
);
4624 gfc_add_block_to_block (&block
, &rse
.pre
);
4628 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
4629 switch (expr2
->value
.function
.isym
->id
)
4649 e
= expr2
->value
.function
.actual
->expr
;
4650 gcc_assert (e
->expr_type
== EXPR_VARIABLE
4651 && e
->symtree
!= NULL
4652 && e
->symtree
->n
.sym
== var
);
4654 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
4655 gfc_add_block_to_block (&block
, &rse
.pre
);
4656 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
4658 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
4659 gfc_actual_arglist
*arg
;
4661 gfc_add_modify (&block
, accum
, rse
.expr
);
4662 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
4665 gfc_init_block (&rse
.pre
);
4666 gfc_conv_expr (&rse
, arg
->expr
);
4667 gfc_add_block_to_block (&block
, &rse
.pre
);
4668 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
4670 gfc_add_modify (&block
, accum
, x
);
4676 expr2
= expr2
->value
.function
.actual
->next
->expr
;
4679 lhsaddr
= save_expr (lhsaddr
);
4680 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
4681 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
4682 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
4684 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
4685 it even after unsharing function body. */
4686 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
4687 DECL_CONTEXT (var
) = current_function_decl
;
4688 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
4689 NULL_TREE
, NULL_TREE
);
4692 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
4694 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4695 == GFC_OMP_ATOMIC_WRITE
)
4696 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
))
4700 x
= convert (TREE_TYPE (rhs
),
4701 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
4703 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
4705 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
4708 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
4709 && TREE_CODE (type
) != COMPLEX_TYPE
)
4710 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
4711 TREE_TYPE (TREE_TYPE (rhs
)), x
);
4713 gfc_add_block_to_block (&block
, &lse
.pre
);
4714 gfc_add_block_to_block (&block
, &rse
.pre
);
4716 if (aop
== OMP_ATOMIC
)
4718 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
4719 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4720 gfc_add_expr_to_block (&block
, x
);
4724 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
4727 expr2
= code
->expr2
;
4728 if (expr2
->expr_type
== EXPR_FUNCTION
4729 && expr2
->value
.function
.isym
4730 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4731 expr2
= expr2
->value
.function
.actual
->expr
;
4733 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
4734 gfc_conv_expr (&vse
, code
->expr1
);
4735 gfc_add_block_to_block (&block
, &vse
.pre
);
4737 gfc_init_se (&lse
, NULL
);
4738 gfc_conv_expr (&lse
, expr2
);
4739 gfc_add_block_to_block (&block
, &lse
.pre
);
4741 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
4742 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4743 x
= convert (TREE_TYPE (vse
.expr
), x
);
4744 gfc_add_modify (&block
, vse
.expr
, x
);
4747 return gfc_finish_block (&block
);
4751 gfc_trans_omp_barrier (void)
4753 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
4754 return build_call_expr_loc (input_location
, decl
, 0);
4758 gfc_trans_omp_cancel (gfc_code
*code
)
4761 tree ifc
= boolean_true_node
;
4763 switch (code
->ext
.omp_clauses
->cancel
)
4765 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
4766 case OMP_CANCEL_DO
: mask
= 2; break;
4767 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
4768 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
4769 default: gcc_unreachable ();
4771 gfc_start_block (&block
);
4772 if (code
->ext
.omp_clauses
->if_expr
4773 || code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
])
4778 gcc_assert ((code
->ext
.omp_clauses
->if_expr
== NULL
)
4779 ^ (code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
] == NULL
));
4780 gfc_init_se (&se
, NULL
);
4781 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
!= NULL
4782 ? code
->ext
.omp_clauses
->if_expr
4783 : code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
]);
4784 gfc_add_block_to_block (&block
, &se
.pre
);
4785 if_var
= gfc_evaluate_now (se
.expr
, &block
);
4786 gfc_add_block_to_block (&block
, &se
.post
);
4787 tree type
= TREE_TYPE (if_var
);
4788 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
4789 boolean_type_node
, if_var
,
4790 build_zero_cst (type
));
4792 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
4793 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
4794 ifc
= fold_convert (c_bool_type
, ifc
);
4795 gfc_add_expr_to_block (&block
,
4796 build_call_expr_loc (input_location
, decl
, 2,
4797 build_int_cst (integer_type_node
,
4799 return gfc_finish_block (&block
);
4803 gfc_trans_omp_cancellation_point (gfc_code
*code
)
4806 switch (code
->ext
.omp_clauses
->cancel
)
4808 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
4809 case OMP_CANCEL_DO
: mask
= 2; break;
4810 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
4811 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
4812 default: gcc_unreachable ();
4814 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
4815 return build_call_expr_loc (input_location
, decl
, 1,
4816 build_int_cst (integer_type_node
, mask
));
4820 gfc_trans_omp_critical (gfc_code
*code
)
4823 tree stmt
, name
= NULL_TREE
;
4824 if (code
->ext
.omp_clauses
->critical_name
!= NULL
)
4825 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
4826 gfc_start_block (&block
);
4827 stmt
= make_node (OMP_CRITICAL
);
4828 TREE_TYPE (stmt
) = void_type_node
;
4829 OMP_CRITICAL_BODY (stmt
) = gfc_trans_code (code
->block
->next
);
4830 OMP_CRITICAL_NAME (stmt
) = name
;
4831 OMP_CRITICAL_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
4832 code
->ext
.omp_clauses
,
4834 gfc_add_expr_to_block (&block
, stmt
);
4835 return gfc_finish_block (&block
);
4838 typedef struct dovar_init_d
{
4845 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
4846 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
4849 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
4850 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
4853 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4854 int i
, collapse
= clauses
->collapse
;
4855 vec
<dovar_init
> inits
= vNULL
;
4858 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
4859 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
4861 /* Both collapsed and tiled loops are lowered the same way. In
4862 OpenACC, those clauses are not compatible, so prioritize the tile
4863 clause, if present. */
4867 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
4871 doacross_steps
= NULL
;
4872 if (clauses
->orderedc
)
4873 collapse
= clauses
->orderedc
;
4877 code
= code
->block
->next
;
4878 gcc_assert (code
->op
== EXEC_DO
);
4880 init
= make_tree_vec (collapse
);
4881 cond
= make_tree_vec (collapse
);
4882 incr
= make_tree_vec (collapse
);
4883 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
4887 gfc_start_block (&block
);
4891 /* simd schedule modifier is only useful for composite do simd and other
4892 constructs including that, where gfc_trans_omp_do is only called
4893 on the simd construct and DO's clauses are translated elsewhere. */
4894 do_clauses
->sched_simd
= false;
4896 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
4898 for (i
= 0; i
< collapse
; i
++)
4901 int dovar_found
= 0;
4906 gfc_omp_namelist
*n
= NULL
;
4907 if (op
== EXEC_OMP_SIMD
&& collapse
== 1)
4908 for (n
= clauses
->lists
[OMP_LIST_LINEAR
];
4909 n
!= NULL
; n
= n
->next
)
4910 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4915 if (n
== NULL
&& op
!= EXEC_OMP_DISTRIBUTE
)
4916 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4917 n
!= NULL
; n
= n
->next
)
4918 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4924 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
4925 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4932 /* Evaluate all the expressions in the iterator. */
4933 gfc_init_se (&se
, NULL
);
4934 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
4935 gfc_add_block_to_block (pblock
, &se
.pre
);
4937 type
= TREE_TYPE (dovar
);
4938 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
4940 gfc_init_se (&se
, NULL
);
4941 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
4942 gfc_add_block_to_block (pblock
, &se
.pre
);
4943 from
= gfc_evaluate_now (se
.expr
, pblock
);
4945 gfc_init_se (&se
, NULL
);
4946 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
4947 gfc_add_block_to_block (pblock
, &se
.pre
);
4948 to
= gfc_evaluate_now (se
.expr
, pblock
);
4950 gfc_init_se (&se
, NULL
);
4951 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
4952 gfc_add_block_to_block (pblock
, &se
.pre
);
4953 step
= gfc_evaluate_now (se
.expr
, pblock
);
4956 /* Special case simple loops. */
4959 if (integer_onep (step
))
4961 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
4966 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
4972 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
4973 /* The condition should not be folded. */
4974 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
4975 ? LE_EXPR
: GE_EXPR
,
4976 logical_type_node
, dovar
, to
);
4977 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
4979 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
4982 TREE_VEC_ELT (incr
, i
));
4986 /* STEP is not 1 or -1. Use:
4987 for (count = 0; count < (to + step - from) / step; count++)
4989 dovar = from + count * step;
4993 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
4994 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
4995 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
4997 tmp
= gfc_evaluate_now (tmp
, pblock
);
4998 count
= gfc_create_var (type
, "count");
4999 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
5000 build_int_cst (type
, 0));
5001 /* The condition should not be folded. */
5002 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
5005 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
5007 build_int_cst (type
, 1));
5008 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
5009 MODIFY_EXPR
, type
, count
,
5010 TREE_VEC_ELT (incr
, i
));
5012 /* Initialize DOVAR. */
5013 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
5014 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
5015 dovar_init e
= {dovar
, tmp
};
5016 inits
.safe_push (e
);
5017 if (clauses
->orderedc
)
5019 if (doacross_steps
== NULL
)
5020 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
, true);
5021 (*doacross_steps
)[i
] = step
;
5025 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
5027 if (dovar_found
== 3
5028 && op
== EXEC_OMP_SIMD
5032 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
5033 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
5034 && OMP_CLAUSE_DECL (tmp
) == dovar
)
5036 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5040 if (!dovar_found
&& op
== EXEC_OMP_SIMD
)
5044 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5045 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
5046 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5047 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5048 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5053 else if (!dovar_found
&& !simple
)
5055 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5056 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5057 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5059 if (dovar_found
> 1)
5066 /* If dovar is lastprivate, but different counter is used,
5067 dovar += step needs to be added to
5068 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
5069 will have the value on entry of the last loop, rather
5070 than value after iterator increment. */
5071 if (clauses
->orderedc
)
5073 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
5076 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
5077 type
, count
, build_one_cst (type
));
5078 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
5080 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5085 tmp
= gfc_evaluate_now (step
, pblock
);
5086 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5089 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
5091 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5093 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5095 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
5098 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5099 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5101 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
5105 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
5107 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5108 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5109 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5111 tree l
= build_omp_clause (input_location
,
5112 OMP_CLAUSE_LASTPRIVATE
);
5113 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5114 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (l
) = 1;
5115 OMP_CLAUSE_DECL (l
) = dovar_decl
;
5116 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
5117 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
5119 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
5123 gcc_assert (simple
|| c
!= NULL
);
5127 if (op
!= EXEC_OMP_SIMD
|| dovar_found
== 1)
5128 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5129 else if (collapse
== 1)
5131 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5132 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
5133 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5134 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
5137 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
5138 OMP_CLAUSE_DECL (tmp
) = count
;
5139 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5142 if (i
+ 1 < collapse
)
5143 code
= code
->block
->next
;
5146 if (pblock
!= &block
)
5149 gfc_start_block (&block
);
5152 gfc_start_block (&body
);
5154 FOR_EACH_VEC_ELT (inits
, ix
, di
)
5155 gfc_add_modify (&body
, di
->var
, di
->init
);
5158 /* Cycle statement is implemented with a goto. Exit statement must not be
5159 present for this loop. */
5160 cycle_label
= gfc_build_label_decl (NULL_TREE
);
5162 /* Put these labels where they can be found later. */
5164 code
->cycle_label
= cycle_label
;
5165 code
->exit_label
= NULL_TREE
;
5167 /* Main loop body. */
5168 if (clauses
->lists
[OMP_LIST_REDUCTION_INSCAN
])
5170 gcc_assert (code
->block
->next
->next
->op
== EXEC_OMP_SCAN
);
5171 gcc_assert (code
->block
->next
->next
->next
->next
== NULL
);
5172 locus
*cloc
= &code
->block
->next
->next
->loc
;
5173 location_t loc
= gfc_get_location (cloc
);
5175 gfc_code code2
= *code
->block
->next
;
5177 tmp
= gfc_trans_code (&code2
);
5178 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, NULL_TREE
);
5179 SET_EXPR_LOCATION (tmp
, loc
);
5180 gfc_add_expr_to_block (&body
, tmp
);
5181 input_location
= loc
;
5182 tree c
= gfc_trans_omp_clauses (&body
,
5183 code
->block
->next
->next
->ext
.omp_clauses
,
5185 code2
= *code
->block
->next
->next
->next
;
5187 tmp
= gfc_trans_code (&code2
);
5188 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, c
);
5189 SET_EXPR_LOCATION (tmp
, loc
);
5192 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
5193 gfc_add_expr_to_block (&body
, tmp
);
5195 /* Label for cycle statements (if needed). */
5196 if (TREE_USED (cycle_label
))
5198 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
5199 gfc_add_expr_to_block (&body
, tmp
);
5202 /* End of loop body. */
5205 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
5206 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
5207 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
5208 case EXEC_OMP_LOOP
: stmt
= make_node (OMP_LOOP
); break;
5209 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
5210 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
5211 default: gcc_unreachable ();
5214 TREE_TYPE (stmt
) = void_type_node
;
5215 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
5216 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
5217 OMP_FOR_INIT (stmt
) = init
;
5218 OMP_FOR_COND (stmt
) = cond
;
5219 OMP_FOR_INCR (stmt
) = incr
;
5221 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
5222 gfc_add_expr_to_block (&block
, stmt
);
5224 vec_free (doacross_steps
);
5225 doacross_steps
= saved_doacross_steps
;
5227 return gfc_finish_block (&block
);
5230 /* Translate combined OpenACC 'parallel loop', 'kernels loop', 'serial loop'
5234 gfc_trans_oacc_combined_directive (gfc_code
*code
)
5236 stmtblock_t block
, *pblock
= NULL
;
5237 gfc_omp_clauses construct_clauses
, loop_clauses
;
5238 tree stmt
, oacc_clauses
= NULL_TREE
;
5239 enum tree_code construct_code
;
5240 location_t loc
= input_location
;
5244 case EXEC_OACC_PARALLEL_LOOP
:
5245 construct_code
= OACC_PARALLEL
;
5247 case EXEC_OACC_KERNELS_LOOP
:
5248 construct_code
= OACC_KERNELS
;
5250 case EXEC_OACC_SERIAL_LOOP
:
5251 construct_code
= OACC_SERIAL
;
5257 gfc_start_block (&block
);
5259 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
5260 if (code
->ext
.omp_clauses
!= NULL
)
5262 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
5263 sizeof (construct_clauses
));
5264 loop_clauses
.collapse
= construct_clauses
.collapse
;
5265 loop_clauses
.gang
= construct_clauses
.gang
;
5266 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
5267 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
5268 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
5269 loop_clauses
.vector
= construct_clauses
.vector
;
5270 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
5271 loop_clauses
.worker
= construct_clauses
.worker
;
5272 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
5273 loop_clauses
.seq
= construct_clauses
.seq
;
5274 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
5275 loop_clauses
.independent
= construct_clauses
.independent
;
5276 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
5277 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
5278 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
5279 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
5280 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
5281 construct_clauses
.gang
= false;
5282 construct_clauses
.gang_static
= false;
5283 construct_clauses
.gang_num_expr
= NULL
;
5284 construct_clauses
.gang_static_expr
= NULL
;
5285 construct_clauses
.vector
= false;
5286 construct_clauses
.vector_expr
= NULL
;
5287 construct_clauses
.worker
= false;
5288 construct_clauses
.worker_expr
= NULL
;
5289 construct_clauses
.seq
= false;
5290 construct_clauses
.par_auto
= false;
5291 construct_clauses
.independent
= false;
5292 construct_clauses
.independent
= false;
5293 construct_clauses
.tile_list
= NULL
;
5294 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
5295 if (construct_code
== OACC_KERNELS
)
5296 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
5297 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
5298 code
->loc
, false, true);
5300 if (!loop_clauses
.seq
)
5304 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
5305 protected_set_expr_location (stmt
, loc
);
5306 if (TREE_CODE (stmt
) != BIND_EXPR
)
5307 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5310 stmt
= build2_loc (loc
, construct_code
, void_type_node
, stmt
, oacc_clauses
);
5311 gfc_add_expr_to_block (&block
, stmt
);
5312 return gfc_finish_block (&block
);
5316 gfc_trans_omp_depobj (gfc_code
*code
)
5320 gfc_init_se (&se
, NULL
);
5321 gfc_init_block (&block
);
5322 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->depobj
);
5323 gcc_assert (se
.pre
.head
== NULL
&& se
.post
.head
== NULL
);
5324 tree depobj
= se
.expr
;
5325 location_t loc
= EXPR_LOCATION (depobj
);
5326 if (!POINTER_TYPE_P (TREE_TYPE (depobj
)))
5327 depobj
= gfc_build_addr_expr (NULL
, depobj
);
5328 depobj
= fold_convert (build_pointer_type_for_mode (ptr_type_node
,
5329 TYPE_MODE (ptr_type_node
),
5331 gfc_omp_namelist
*n
= code
->ext
.omp_clauses
->lists
[OMP_LIST_DEPEND
];
5336 var
= gfc_convert_expr_to_tree (&block
, n
->expr
);
5338 var
= gfc_get_symbol_decl (n
->sym
);
5339 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
5340 var
= gfc_build_addr_expr (NULL
, var
);
5341 depobj
= save_expr (depobj
);
5342 tree r
= build_fold_indirect_ref_loc (loc
, depobj
);
5343 gfc_add_expr_to_block (&block
,
5344 build2 (MODIFY_EXPR
, void_type_node
, r
, var
));
5347 /* Only one may be set. */
5348 gcc_assert (((int)(n
!= NULL
) + (int)(code
->ext
.omp_clauses
->destroy
)
5349 + (int)(code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
))
5351 int k
= -1; /* omp_clauses->destroy */
5352 if (!code
->ext
.omp_clauses
->destroy
)
5353 switch (code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
5354 ? code
->ext
.omp_clauses
->depobj_update
: n
->u
.depend_op
)
5356 case OMP_DEPEND_IN
: k
= GOMP_DEPEND_IN
; break;
5357 case OMP_DEPEND_OUT
: k
= GOMP_DEPEND_OUT
; break;
5358 case OMP_DEPEND_INOUT
: k
= GOMP_DEPEND_INOUT
; break;
5359 case OMP_DEPEND_MUTEXINOUTSET
: k
= GOMP_DEPEND_MUTEXINOUTSET
; break;
5360 default: gcc_unreachable ();
5362 tree t
= build_int_cst (ptr_type_node
, k
);
5363 depobj
= build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (depobj
), depobj
,
5364 TYPE_SIZE_UNIT (ptr_type_node
));
5365 depobj
= build_fold_indirect_ref_loc (loc
, depobj
);
5366 gfc_add_expr_to_block (&block
, build2 (MODIFY_EXPR
, void_type_node
, depobj
, t
));
5368 return gfc_finish_block (&block
);
5372 gfc_trans_omp_error (gfc_code
*code
)
5377 bool fatal
= code
->ext
.omp_clauses
->severity
== OMP_SEVERITY_FATAL
;
5378 tree fndecl
= builtin_decl_explicit (fatal
? BUILT_IN_GOMP_ERROR
5379 : BUILT_IN_GOMP_WARNING
);
5380 gfc_start_block (&block
);
5381 gfc_init_se (&se
, NULL
);
5382 if (!code
->ext
.omp_clauses
->message
)
5384 message
= null_pointer_node
;
5385 len
= build_int_cst (size_type_node
, 0);
5389 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->message
);
5391 if (!POINTER_TYPE_P (TREE_TYPE (message
)))
5392 /* To ensure an ARRAY_TYPE is not passed as such. */
5393 message
= gfc_build_addr_expr (NULL
, message
);
5394 len
= se
.string_length
;
5396 gfc_add_block_to_block (&block
, &se
.pre
);
5397 gfc_add_expr_to_block (&block
, build_call_expr_loc (input_location
, fndecl
,
5399 gfc_add_block_to_block (&block
, &se
.post
);
5400 return gfc_finish_block (&block
);
5404 gfc_trans_omp_flush (gfc_code
*code
)
5407 if (!code
->ext
.omp_clauses
5408 || code
->ext
.omp_clauses
->memorder
== OMP_MEMORDER_UNSET
)
5410 call
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
5411 call
= build_call_expr_loc (input_location
, call
, 0);
5415 enum memmodel mo
= MEMMODEL_LAST
;
5416 switch (code
->ext
.omp_clauses
->memorder
)
5418 case OMP_MEMORDER_ACQ_REL
: mo
= MEMMODEL_ACQ_REL
; break;
5419 case OMP_MEMORDER_RELEASE
: mo
= MEMMODEL_RELEASE
; break;
5420 case OMP_MEMORDER_ACQUIRE
: mo
= MEMMODEL_ACQUIRE
; break;
5421 default: gcc_unreachable (); break;
5423 call
= builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE
);
5424 call
= build_call_expr_loc (input_location
, call
, 1,
5425 build_int_cst (integer_type_node
, mo
));
5431 gfc_trans_omp_master (gfc_code
*code
)
5433 tree stmt
= gfc_trans_code (code
->block
->next
);
5434 if (IS_EMPTY_STMT (stmt
))
5436 return build1_v (OMP_MASTER
, stmt
);
5440 gfc_trans_omp_masked (gfc_code
*code
, gfc_omp_clauses
*clauses
)
5443 tree body
= gfc_trans_code (code
->block
->next
);
5444 if (IS_EMPTY_STMT (body
))
5447 clauses
= code
->ext
.omp_clauses
;
5448 gfc_start_block (&block
);
5449 tree omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
5450 tree stmt
= make_node (OMP_MASKED
);
5451 TREE_TYPE (stmt
) = void_type_node
;
5452 OMP_MASKED_BODY (stmt
) = body
;
5453 OMP_MASKED_CLAUSES (stmt
) = omp_clauses
;
5454 gfc_add_expr_to_block (&block
, stmt
);
5455 return gfc_finish_block (&block
);
5460 gfc_trans_omp_ordered (gfc_code
*code
)
5464 if (!code
->ext
.omp_clauses
->simd
)
5465 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
5466 code
->ext
.omp_clauses
->threads
= 0;
5468 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
5470 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
5471 code
->block
? gfc_trans_code (code
->block
->next
)
5472 : NULL_TREE
, omp_clauses
);
5476 gfc_trans_omp_parallel (gfc_code
*code
)
5479 tree stmt
, omp_clauses
;
5481 gfc_start_block (&block
);
5482 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5485 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5486 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5487 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
5489 gfc_add_expr_to_block (&block
, stmt
);
5490 return gfc_finish_block (&block
);
5497 GFC_OMP_SPLIT_PARALLEL
,
5498 GFC_OMP_SPLIT_DISTRIBUTE
,
5499 GFC_OMP_SPLIT_TEAMS
,
5500 GFC_OMP_SPLIT_TARGET
,
5501 GFC_OMP_SPLIT_TASKLOOP
,
5502 GFC_OMP_SPLIT_MASKED
,
5508 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
5509 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
5510 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
5511 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
5512 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
5513 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
5514 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
),
5515 GFC_OMP_MASK_MASKED
= (1 << GFC_OMP_SPLIT_MASKED
)
5518 /* If a var is in lastprivate/firstprivate/reduction but not in a
5519 data mapping/sharing clause, add it to 'map(tofrom:' if is_target
5520 and to 'shared' otherwise. */
5522 gfc_add_clause_implicitly (gfc_omp_clauses
*clauses_out
,
5523 gfc_omp_clauses
*clauses_in
,
5524 bool is_target
, bool is_parallel_do
)
5526 int clauselist_to_add
= is_target
? OMP_LIST_MAP
: OMP_LIST_SHARED
;
5527 gfc_omp_namelist
*tail
= NULL
;
5528 for (int i
= 0; i
< 5; ++i
)
5530 gfc_omp_namelist
*n
;
5533 case 0: n
= clauses_in
->lists
[OMP_LIST_FIRSTPRIVATE
]; break;
5534 case 1: n
= clauses_in
->lists
[OMP_LIST_LASTPRIVATE
]; break;
5535 case 2: n
= clauses_in
->lists
[OMP_LIST_REDUCTION
]; break;
5536 case 3: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_INSCAN
]; break;
5537 case 4: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_TASK
]; break;
5538 default: gcc_unreachable ();
5540 for (; n
!= NULL
; n
= n
->next
)
5542 gfc_omp_namelist
*n2
, **n_firstp
= NULL
, **n_lastp
= NULL
;
5543 for (int j
= 0; j
< 6; ++j
)
5545 gfc_omp_namelist
**n2ref
= NULL
, *prev2
= NULL
;
5549 n2ref
= &clauses_out
->lists
[clauselist_to_add
];
5552 n2ref
= &clauses_out
->lists
[OMP_LIST_FIRSTPRIVATE
];
5556 n2ref
= &clauses_in
->lists
[OMP_LIST_LASTPRIVATE
];
5558 n2ref
= &clauses_out
->lists
[OMP_LIST_LASTPRIVATE
];
5560 case 3: n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION
]; break;
5562 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_INSCAN
];
5565 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_TASK
];
5567 default: gcc_unreachable ();
5569 for (n2
= *n2ref
; n2
!= NULL
; prev2
= n2
, n2
= n2
->next
)
5570 if (n2
->sym
== n
->sym
)
5574 if (j
== 0 /* clauselist_to_add */)
5575 break; /* Already present. */
5576 if (j
== 1 /* OMP_LIST_FIRSTPRIVATE */)
5578 n_firstp
= prev2
? &prev2
->next
: n2ref
;
5581 if (j
== 2 /* OMP_LIST_LASTPRIVATE */)
5583 n_lastp
= prev2
? &prev2
->next
: n2ref
;
5589 if (n_firstp
&& n_lastp
)
5591 /* For parallel do, GCC puts firstprivatee/lastprivate
5595 *n_firstp
= (*n_firstp
)->next
;
5597 *n_lastp
= (*n_lastp
)->next
;
5599 else if (is_target
&& n_lastp
)
5601 else if (n2
|| n_firstp
|| n_lastp
)
5603 if (clauses_out
->lists
[clauselist_to_add
]
5604 && (clauses_out
->lists
[clauselist_to_add
]
5605 == clauses_in
->lists
[clauselist_to_add
]))
5607 gfc_omp_namelist
*p
= NULL
;
5608 for (n2
= clauses_in
->lists
[clauselist_to_add
]; n2
; n2
= n2
->next
)
5612 p
->next
= gfc_get_omp_namelist ();
5617 p
= gfc_get_omp_namelist ();
5618 clauses_out
->lists
[clauselist_to_add
] = p
;
5625 tail
= clauses_out
->lists
[clauselist_to_add
];
5626 for (; tail
&& tail
->next
; tail
= tail
->next
)
5629 n2
= gfc_get_omp_namelist ();
5630 n2
->where
= n
->where
;
5633 n2
->u
.map_op
= OMP_MAP_TOFROM
;
5640 clauses_out
->lists
[clauselist_to_add
] = n2
;
5646 gfc_free_split_omp_clauses (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
5648 for (int i
= 0; i
< GFC_OMP_SPLIT_NUM
; ++i
)
5649 for (int j
= 0; j
< OMP_LIST_NUM
; ++j
)
5650 if (clausesa
[i
].lists
[j
] && clausesa
[i
].lists
[j
] != code
->ext
.omp_clauses
->lists
[j
])
5651 for (gfc_omp_namelist
*n
= clausesa
[i
].lists
[j
]; n
;)
5653 gfc_omp_namelist
*p
= n
;
5660 gfc_split_omp_clauses (gfc_code
*code
,
5661 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
5663 int mask
= 0, innermost
= 0;
5664 bool is_loop
= false;
5665 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
5668 case EXEC_OMP_DISTRIBUTE
:
5669 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
5671 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5672 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5673 innermost
= GFC_OMP_SPLIT_DO
;
5675 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5676 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
5677 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5678 innermost
= GFC_OMP_SPLIT_SIMD
;
5680 case EXEC_OMP_DISTRIBUTE_SIMD
:
5681 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
5682 innermost
= GFC_OMP_SPLIT_SIMD
;
5686 innermost
= GFC_OMP_SPLIT_DO
;
5688 case EXEC_OMP_DO_SIMD
:
5689 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5690 innermost
= GFC_OMP_SPLIT_SIMD
;
5692 case EXEC_OMP_PARALLEL
:
5693 innermost
= GFC_OMP_SPLIT_PARALLEL
;
5695 case EXEC_OMP_PARALLEL_DO
:
5696 case EXEC_OMP_PARALLEL_LOOP
:
5697 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5698 innermost
= GFC_OMP_SPLIT_DO
;
5700 case EXEC_OMP_PARALLEL_DO_SIMD
:
5701 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5702 innermost
= GFC_OMP_SPLIT_SIMD
;
5704 case EXEC_OMP_PARALLEL_MASKED
:
5705 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
;
5706 innermost
= GFC_OMP_SPLIT_MASKED
;
5708 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
5709 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
5710 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
5711 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5713 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
5714 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5715 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5717 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
5718 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
5719 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
5720 innermost
= GFC_OMP_SPLIT_SIMD
;
5722 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
5723 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5724 innermost
= GFC_OMP_SPLIT_SIMD
;
5727 innermost
= GFC_OMP_SPLIT_SIMD
;
5729 case EXEC_OMP_TARGET
:
5730 innermost
= GFC_OMP_SPLIT_TARGET
;
5732 case EXEC_OMP_TARGET_PARALLEL
:
5733 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
5734 innermost
= GFC_OMP_SPLIT_PARALLEL
;
5736 case EXEC_OMP_TARGET_PARALLEL_DO
:
5737 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
5738 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5739 innermost
= GFC_OMP_SPLIT_DO
;
5741 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5742 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
5743 | GFC_OMP_MASK_SIMD
;
5744 innermost
= GFC_OMP_SPLIT_SIMD
;
5746 case EXEC_OMP_TARGET_SIMD
:
5747 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
5748 innermost
= GFC_OMP_SPLIT_SIMD
;
5750 case EXEC_OMP_TARGET_TEAMS
:
5751 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
5752 innermost
= GFC_OMP_SPLIT_TEAMS
;
5754 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5755 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
5756 | GFC_OMP_MASK_DISTRIBUTE
;
5757 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
5759 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5760 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5761 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5762 innermost
= GFC_OMP_SPLIT_DO
;
5764 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5765 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5766 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5767 innermost
= GFC_OMP_SPLIT_SIMD
;
5769 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5770 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
5771 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
5772 innermost
= GFC_OMP_SPLIT_SIMD
;
5774 case EXEC_OMP_TARGET_TEAMS_LOOP
:
5775 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
5776 innermost
= GFC_OMP_SPLIT_DO
;
5778 case EXEC_OMP_MASKED_TASKLOOP
:
5779 mask
= GFC_OMP_SPLIT_MASKED
| GFC_OMP_SPLIT_TASKLOOP
;
5780 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5782 case EXEC_OMP_MASTER_TASKLOOP
:
5783 case EXEC_OMP_TASKLOOP
:
5784 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5786 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
5787 mask
= GFC_OMP_MASK_MASKED
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5788 innermost
= GFC_OMP_SPLIT_SIMD
;
5790 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
5791 case EXEC_OMP_TASKLOOP_SIMD
:
5792 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5793 innermost
= GFC_OMP_SPLIT_SIMD
;
5795 case EXEC_OMP_TEAMS
:
5796 innermost
= GFC_OMP_SPLIT_TEAMS
;
5798 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5799 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
5800 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
5802 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5803 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5804 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5805 innermost
= GFC_OMP_SPLIT_DO
;
5807 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5808 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5809 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5810 innermost
= GFC_OMP_SPLIT_SIMD
;
5812 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5813 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
5814 innermost
= GFC_OMP_SPLIT_SIMD
;
5816 case EXEC_OMP_TEAMS_LOOP
:
5817 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
5818 innermost
= GFC_OMP_SPLIT_DO
;
5825 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
5828 /* Loops are similar to DO but still a bit different. */
5832 case EXEC_OMP_PARALLEL_LOOP
:
5833 case EXEC_OMP_TEAMS_LOOP
:
5834 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
5835 case EXEC_OMP_TARGET_TEAMS_LOOP
:
5840 if (code
->ext
.omp_clauses
!= NULL
)
5842 if (mask
& GFC_OMP_MASK_TARGET
)
5844 /* First the clauses that are unique to some constructs. */
5845 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
5846 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
5847 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
5848 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
5849 clausesa
[GFC_OMP_SPLIT_TARGET
].device
5850 = code
->ext
.omp_clauses
->device
;
5851 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
5852 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
[i
]
5853 = code
->ext
.omp_clauses
->defaultmap
[i
];
5854 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
5855 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
5856 /* And this is copied to all. */
5857 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
5858 = code
->ext
.omp_clauses
->if_expr
;
5860 if (mask
& GFC_OMP_MASK_TEAMS
)
5862 /* First the clauses that are unique to some constructs. */
5863 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
5864 = code
->ext
.omp_clauses
->num_teams
;
5865 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
5866 = code
->ext
.omp_clauses
->thread_limit
;
5867 /* Shared and default clauses are allowed on parallel, teams
5869 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
5870 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
5871 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
5872 = code
->ext
.omp_clauses
->default_sharing
;
5874 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
5876 /* First the clauses that are unique to some constructs. */
5877 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
5878 = code
->ext
.omp_clauses
->dist_sched_kind
;
5879 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
5880 = code
->ext
.omp_clauses
->dist_chunk_size
;
5881 /* Duplicate collapse. */
5882 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
5883 = code
->ext
.omp_clauses
->collapse
;
5884 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_concurrent
5885 = code
->ext
.omp_clauses
->order_concurrent
;
5887 if (mask
& GFC_OMP_MASK_PARALLEL
)
5889 /* First the clauses that are unique to some constructs. */
5890 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
5891 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
5892 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
5893 = code
->ext
.omp_clauses
->num_threads
;
5894 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
5895 = code
->ext
.omp_clauses
->proc_bind
;
5896 /* Shared and default clauses are allowed on parallel, teams
5898 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
5899 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
5900 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
5901 = code
->ext
.omp_clauses
->default_sharing
;
5902 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
5903 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
5904 /* And this is copied to all. */
5905 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
5906 = code
->ext
.omp_clauses
->if_expr
;
5908 if (mask
& GFC_OMP_MASK_MASKED
)
5909 clausesa
[GFC_OMP_SPLIT_MASKED
].filter
= code
->ext
.omp_clauses
->filter
;
5910 if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
5912 /* First the clauses that are unique to some constructs. */
5913 clausesa
[GFC_OMP_SPLIT_DO
].ordered
5914 = code
->ext
.omp_clauses
->ordered
;
5915 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
5916 = code
->ext
.omp_clauses
->orderedc
;
5917 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
5918 = code
->ext
.omp_clauses
->sched_kind
;
5919 if (innermost
== GFC_OMP_SPLIT_SIMD
)
5920 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
5921 = code
->ext
.omp_clauses
->sched_simd
;
5922 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
5923 = code
->ext
.omp_clauses
->sched_monotonic
;
5924 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
5925 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
5926 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
5927 = code
->ext
.omp_clauses
->chunk_size
;
5928 clausesa
[GFC_OMP_SPLIT_DO
].nowait
5929 = code
->ext
.omp_clauses
->nowait
;
5931 if (mask
& GFC_OMP_MASK_DO
)
5933 clausesa
[GFC_OMP_SPLIT_DO
].bind
5934 = code
->ext
.omp_clauses
->bind
;
5935 /* Duplicate collapse. */
5936 clausesa
[GFC_OMP_SPLIT_DO
].collapse
5937 = code
->ext
.omp_clauses
->collapse
;
5938 clausesa
[GFC_OMP_SPLIT_DO
].order_concurrent
5939 = code
->ext
.omp_clauses
->order_concurrent
;
5941 if (mask
& GFC_OMP_MASK_SIMD
)
5943 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
5944 = code
->ext
.omp_clauses
->safelen_expr
;
5945 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
5946 = code
->ext
.omp_clauses
->simdlen_expr
;
5947 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
5948 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
5949 /* Duplicate collapse. */
5950 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
5951 = code
->ext
.omp_clauses
->collapse
;
5952 clausesa
[GFC_OMP_SPLIT_SIMD
].if_exprs
[OMP_IF_SIMD
]
5953 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_SIMD
];
5954 clausesa
[GFC_OMP_SPLIT_SIMD
].order_concurrent
5955 = code
->ext
.omp_clauses
->order_concurrent
;
5956 /* And this is copied to all. */
5957 clausesa
[GFC_OMP_SPLIT_SIMD
].if_expr
5958 = code
->ext
.omp_clauses
->if_expr
;
5960 if (mask
& GFC_OMP_MASK_TASKLOOP
)
5962 /* First the clauses that are unique to some constructs. */
5963 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
5964 = code
->ext
.omp_clauses
->nogroup
;
5965 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
5966 = code
->ext
.omp_clauses
->grainsize
;
5967 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
5968 = code
->ext
.omp_clauses
->num_tasks
;
5969 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
5970 = code
->ext
.omp_clauses
->priority
;
5971 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
5972 = code
->ext
.omp_clauses
->final_expr
;
5973 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
5974 = code
->ext
.omp_clauses
->untied
;
5975 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
5976 = code
->ext
.omp_clauses
->mergeable
;
5977 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
5978 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
5979 /* And this is copied to all. */
5980 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
5981 = code
->ext
.omp_clauses
->if_expr
;
5982 /* Shared and default clauses are allowed on parallel, teams
5984 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
5985 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
5986 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
5987 = code
->ext
.omp_clauses
->default_sharing
;
5988 /* Duplicate collapse. */
5989 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
5990 = code
->ext
.omp_clauses
->collapse
;
5992 /* Private clause is supported on all constructs but master/masked,
5993 it is enough to put it on the innermost one except for master/masked. For
5994 !$ omp parallel do put it on parallel though,
5995 as that's what we did for OpenMP 3.1. */
5996 clausesa
[((innermost
== GFC_OMP_SPLIT_DO
&& !is_loop
)
5997 || code
->op
== EXEC_OMP_PARALLEL_MASTER
5998 || code
->op
== EXEC_OMP_PARALLEL_MASKED
)
5999 ? (int) GFC_OMP_SPLIT_PARALLEL
6000 : innermost
].lists
[OMP_LIST_PRIVATE
]
6001 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
6002 /* Firstprivate clause is supported on all constructs but
6003 simd and masked/master. Put it on the outermost of those and duplicate
6004 on parallel and teams. */
6005 if (mask
& GFC_OMP_MASK_TARGET
)
6006 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
6007 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6008 if (mask
& GFC_OMP_MASK_TEAMS
)
6009 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
6010 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6011 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6012 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
6013 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6014 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6015 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_FIRSTPRIVATE
]
6016 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6017 if ((mask
& GFC_OMP_MASK_PARALLEL
)
6018 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
6019 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
6020 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6021 else if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
6022 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
6023 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6024 /* Lastprivate is allowed on distribute, do, simd, taskloop and loop.
6025 In parallel do{, simd} we actually want to put it on
6026 parallel rather than do. */
6027 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6028 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
6029 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6030 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6031 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_LASTPRIVATE
]
6032 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6033 if ((mask
& GFC_OMP_MASK_PARALLEL
) && !is_loop
6034 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
6035 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
6036 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6037 else if (mask
& GFC_OMP_MASK_DO
)
6038 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
6039 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6040 if (mask
& GFC_OMP_MASK_SIMD
)
6041 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
6042 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6043 /* Reduction is allowed on simd, do, parallel, teams, taskloop, and loop.
6044 Duplicate it on all of them, but
6045 - omit on do if parallel is present;
6046 - omit on task and parallel if loop is present;
6047 additionally, inscan applies to do/simd only. */
6048 for (int i
= OMP_LIST_REDUCTION
; i
<= OMP_LIST_REDUCTION_TASK
; i
++)
6050 if (mask
& GFC_OMP_MASK_TASKLOOP
6051 && i
!= OMP_LIST_REDUCTION_INSCAN
)
6052 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[i
]
6053 = code
->ext
.omp_clauses
->lists
[i
];
6054 if (mask
& GFC_OMP_MASK_TEAMS
6055 && i
!= OMP_LIST_REDUCTION_INSCAN
6057 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[i
]
6058 = code
->ext
.omp_clauses
->lists
[i
];
6059 if (mask
& GFC_OMP_MASK_PARALLEL
6060 && i
!= OMP_LIST_REDUCTION_INSCAN
6061 && !(mask
& GFC_OMP_MASK_TASKLOOP
)
6063 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[i
]
6064 = code
->ext
.omp_clauses
->lists
[i
];
6065 else if (mask
& GFC_OMP_MASK_DO
)
6066 clausesa
[GFC_OMP_SPLIT_DO
].lists
[i
]
6067 = code
->ext
.omp_clauses
->lists
[i
];
6068 if (mask
& GFC_OMP_MASK_SIMD
)
6069 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[i
]
6070 = code
->ext
.omp_clauses
->lists
[i
];
6072 if (mask
& GFC_OMP_MASK_TARGET
)
6073 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IN_REDUCTION
]
6074 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
6075 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6076 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_IN_REDUCTION
]
6077 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
6078 /* Linear clause is supported on do and simd,
6079 put it on the innermost one. */
6080 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
6081 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
6083 /* Propagate firstprivate/lastprivate/reduction vars to
6084 shared (parallel, teams) and map-tofrom (target). */
6085 if (mask
& GFC_OMP_MASK_TARGET
)
6086 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TARGET
],
6087 code
->ext
.omp_clauses
, true, false);
6088 if ((mask
& GFC_OMP_MASK_PARALLEL
) && innermost
!= GFC_OMP_MASK_PARALLEL
)
6089 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6090 code
->ext
.omp_clauses
, false,
6091 mask
& GFC_OMP_MASK_DO
);
6092 if (mask
& GFC_OMP_MASK_TEAMS
&& innermost
!= GFC_OMP_MASK_TEAMS
)
6093 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TEAMS
],
6094 code
->ext
.omp_clauses
, false, false);
6095 if (((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
6096 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
6098 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
6102 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
6103 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
6106 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6107 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
6108 bool free_clausesa
= false;
6111 gfc_start_block (&block
);
6113 gfc_init_block (&block
);
6115 if (clausesa
== NULL
)
6117 clausesa
= clausesa_buf
;
6118 gfc_split_omp_clauses (code
, clausesa
);
6119 free_clausesa
= true;
6123 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
6124 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
6125 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
6128 if (TREE_CODE (body
) != BIND_EXPR
)
6129 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
6133 else if (TREE_CODE (body
) != BIND_EXPR
)
6134 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
6137 stmt
= make_node (OMP_FOR
);
6138 TREE_TYPE (stmt
) = void_type_node
;
6139 OMP_FOR_BODY (stmt
) = body
;
6140 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
6144 gfc_add_expr_to_block (&block
, stmt
);
6146 gfc_free_split_omp_clauses (code
, clausesa
);
6147 return gfc_finish_block (&block
);
6151 gfc_trans_omp_parallel_do (gfc_code
*code
, bool is_loop
, stmtblock_t
*pblock
,
6152 gfc_omp_clauses
*clausesa
)
6154 stmtblock_t block
, *new_pblock
= pblock
;
6155 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6156 tree stmt
, omp_clauses
= NULL_TREE
;
6157 bool free_clausesa
= false;
6160 gfc_start_block (&block
);
6162 gfc_init_block (&block
);
6164 if (clausesa
== NULL
)
6166 clausesa
= clausesa_buf
;
6167 gfc_split_omp_clauses (code
, clausesa
);
6168 free_clausesa
= true;
6171 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6175 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
6176 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
6177 new_pblock
= &block
;
6181 stmt
= gfc_trans_omp_do (code
, is_loop
? EXEC_OMP_LOOP
: EXEC_OMP_DO
,
6182 new_pblock
, &clausesa
[GFC_OMP_SPLIT_DO
],
6186 if (TREE_CODE (stmt
) != BIND_EXPR
)
6187 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6191 else if (TREE_CODE (stmt
) != BIND_EXPR
)
6192 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
6193 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6194 void_type_node
, stmt
, omp_clauses
);
6195 OMP_PARALLEL_COMBINED (stmt
) = 1;
6196 gfc_add_expr_to_block (&block
, stmt
);
6198 gfc_free_split_omp_clauses (code
, clausesa
);
6199 return gfc_finish_block (&block
);
6203 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
6204 gfc_omp_clauses
*clausesa
)
6207 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6208 tree stmt
, omp_clauses
= NULL_TREE
;
6209 bool free_clausesa
= false;
6212 gfc_start_block (&block
);
6214 gfc_init_block (&block
);
6216 if (clausesa
== NULL
)
6218 clausesa
= clausesa_buf
;
6219 gfc_split_omp_clauses (code
, clausesa
);
6220 free_clausesa
= true;
6224 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6228 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
6231 if (TREE_CODE (stmt
) != BIND_EXPR
)
6232 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6236 else if (TREE_CODE (stmt
) != BIND_EXPR
)
6237 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
6240 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6241 void_type_node
, stmt
, omp_clauses
);
6242 OMP_PARALLEL_COMBINED (stmt
) = 1;
6244 gfc_add_expr_to_block (&block
, stmt
);
6246 gfc_free_split_omp_clauses (code
, clausesa
);
6247 return gfc_finish_block (&block
);
6251 gfc_trans_omp_parallel_sections (gfc_code
*code
)
6254 gfc_omp_clauses section_clauses
;
6255 tree stmt
, omp_clauses
;
6257 memset (§ion_clauses
, 0, sizeof (section_clauses
));
6258 section_clauses
.nowait
= true;
6260 gfc_start_block (&block
);
6261 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6264 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
6265 if (TREE_CODE (stmt
) != BIND_EXPR
)
6266 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6269 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6270 void_type_node
, stmt
, omp_clauses
);
6271 OMP_PARALLEL_COMBINED (stmt
) = 1;
6272 gfc_add_expr_to_block (&block
, stmt
);
6273 return gfc_finish_block (&block
);
6277 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
6280 gfc_omp_clauses workshare_clauses
;
6281 tree stmt
, omp_clauses
;
6283 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
6284 workshare_clauses
.nowait
= true;
6286 gfc_start_block (&block
);
6287 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6290 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
6291 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6292 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6293 void_type_node
, stmt
, omp_clauses
);
6294 OMP_PARALLEL_COMBINED (stmt
) = 1;
6295 gfc_add_expr_to_block (&block
, stmt
);
6296 return gfc_finish_block (&block
);
6300 gfc_trans_omp_scope (gfc_code
*code
)
6303 tree body
= gfc_trans_code (code
->block
->next
);
6304 if (IS_EMPTY_STMT (body
))
6306 gfc_start_block (&block
);
6307 tree omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6309 tree stmt
= make_node (OMP_SCOPE
);
6310 TREE_TYPE (stmt
) = void_type_node
;
6311 OMP_SCOPE_BODY (stmt
) = body
;
6312 OMP_SCOPE_CLAUSES (stmt
) = omp_clauses
;
6313 gfc_add_expr_to_block (&block
, stmt
);
6314 return gfc_finish_block (&block
);
6318 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6320 stmtblock_t block
, body
;
6321 tree omp_clauses
, stmt
;
6322 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
6323 location_t loc
= gfc_get_location (&code
->loc
);
6325 gfc_start_block (&block
);
6327 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
6329 gfc_init_block (&body
);
6330 for (code
= code
->block
; code
; code
= code
->block
)
6332 /* Last section is special because of lastprivate, so even if it
6333 is empty, chain it in. */
6334 stmt
= gfc_trans_omp_code (code
->next
,
6335 has_lastprivate
&& code
->block
== NULL
);
6336 if (! IS_EMPTY_STMT (stmt
))
6338 stmt
= build1_v (OMP_SECTION
, stmt
);
6339 gfc_add_expr_to_block (&body
, stmt
);
6342 stmt
= gfc_finish_block (&body
);
6344 stmt
= build2_loc (loc
, OMP_SECTIONS
, void_type_node
, stmt
, omp_clauses
);
6345 gfc_add_expr_to_block (&block
, stmt
);
6347 return gfc_finish_block (&block
);
6351 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6353 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
6354 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6355 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_SINGLE
, void_type_node
,
6361 gfc_trans_omp_task (gfc_code
*code
)
6364 tree stmt
, omp_clauses
;
6366 gfc_start_block (&block
);
6367 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6370 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6371 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6372 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TASK
, void_type_node
,
6374 gfc_add_expr_to_block (&block
, stmt
);
6375 return gfc_finish_block (&block
);
6379 gfc_trans_omp_taskgroup (gfc_code
*code
)
6381 tree body
= gfc_trans_code (code
->block
->next
);
6382 tree stmt
= make_node (OMP_TASKGROUP
);
6383 TREE_TYPE (stmt
) = void_type_node
;
6384 OMP_TASKGROUP_BODY (stmt
) = body
;
6385 OMP_TASKGROUP_CLAUSES (stmt
) = NULL_TREE
;
6390 gfc_trans_omp_taskwait (gfc_code
*code
)
6392 if (!code
->ext
.omp_clauses
)
6394 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
6395 return build_call_expr_loc (input_location
, decl
, 0);
6398 gfc_start_block (&block
);
6399 tree stmt
= make_node (OMP_TASK
);
6400 TREE_TYPE (stmt
) = void_type_node
;
6401 OMP_TASK_BODY (stmt
) = NULL_TREE
;
6402 OMP_TASK_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
6403 code
->ext
.omp_clauses
,
6405 gfc_add_expr_to_block (&block
, stmt
);
6406 return gfc_finish_block (&block
);
6410 gfc_trans_omp_taskyield (void)
6412 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
6413 return build_call_expr_loc (input_location
, decl
, 0);
6417 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
6420 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6421 tree stmt
, omp_clauses
= NULL_TREE
;
6422 bool free_clausesa
= false;
6424 gfc_start_block (&block
);
6425 if (clausesa
== NULL
)
6427 clausesa
= clausesa_buf
;
6428 gfc_split_omp_clauses (code
, clausesa
);
6429 free_clausesa
= true;
6433 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
6437 case EXEC_OMP_DISTRIBUTE
:
6438 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
6439 case EXEC_OMP_TEAMS_DISTRIBUTE
:
6440 /* This is handled in gfc_trans_omp_do. */
6443 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
6444 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6445 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6446 stmt
= gfc_trans_omp_parallel_do (code
, false, &block
, clausesa
);
6447 if (TREE_CODE (stmt
) != BIND_EXPR
)
6448 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6452 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
6453 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6454 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6455 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
6456 if (TREE_CODE (stmt
) != BIND_EXPR
)
6457 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6461 case EXEC_OMP_DISTRIBUTE_SIMD
:
6462 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
6463 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
6464 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
6465 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
6466 if (TREE_CODE (stmt
) != BIND_EXPR
)
6467 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6476 tree distribute
= make_node (OMP_DISTRIBUTE
);
6477 TREE_TYPE (distribute
) = void_type_node
;
6478 OMP_FOR_BODY (distribute
) = stmt
;
6479 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
6482 gfc_add_expr_to_block (&block
, stmt
);
6484 gfc_free_split_omp_clauses (code
, clausesa
);
6485 return gfc_finish_block (&block
);
6489 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
6493 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6495 bool combined
= true, free_clausesa
= false;
6497 gfc_start_block (&block
);
6498 if (clausesa
== NULL
)
6500 clausesa
= clausesa_buf
;
6501 gfc_split_omp_clauses (code
, clausesa
);
6502 free_clausesa
= true;
6507 = chainon (omp_clauses
,
6508 gfc_trans_omp_clauses (&block
,
6509 &clausesa
[GFC_OMP_SPLIT_TEAMS
],
6515 case EXEC_OMP_TARGET_TEAMS
:
6516 case EXEC_OMP_TEAMS
:
6517 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6520 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
6521 case EXEC_OMP_TEAMS_DISTRIBUTE
:
6522 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
6523 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
6526 case EXEC_OMP_TARGET_TEAMS_LOOP
:
6527 case EXEC_OMP_TEAMS_LOOP
:
6528 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_LOOP
, NULL
,
6529 &clausesa
[GFC_OMP_SPLIT_DO
],
6533 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
6538 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6539 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TEAMS
,
6540 void_type_node
, stmt
, omp_clauses
);
6542 OMP_TEAMS_COMBINED (stmt
) = 1;
6544 gfc_add_expr_to_block (&block
, stmt
);
6546 gfc_free_split_omp_clauses (code
, clausesa
);
6547 return gfc_finish_block (&block
);
6551 gfc_trans_omp_target (gfc_code
*code
)
6554 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6555 tree stmt
, omp_clauses
= NULL_TREE
;
6557 gfc_start_block (&block
);
6558 gfc_split_omp_clauses (code
, clausesa
);
6561 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
6565 case EXEC_OMP_TARGET
:
6567 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6568 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6570 case EXEC_OMP_TARGET_PARALLEL
:
6575 gfc_start_block (&iblock
);
6577 = gfc_trans_omp_clauses (&iblock
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6579 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6580 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
6582 gfc_add_expr_to_block (&iblock
, stmt
);
6583 stmt
= gfc_finish_block (&iblock
);
6584 if (TREE_CODE (stmt
) != BIND_EXPR
)
6585 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6590 case EXEC_OMP_TARGET_PARALLEL_DO
:
6591 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
6592 stmt
= gfc_trans_omp_parallel_do (code
,
6594 == EXEC_OMP_TARGET_PARALLEL_LOOP
),
6596 if (TREE_CODE (stmt
) != BIND_EXPR
)
6597 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6601 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
6602 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
6603 if (TREE_CODE (stmt
) != BIND_EXPR
)
6604 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6608 case EXEC_OMP_TARGET_SIMD
:
6609 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
6610 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
6611 if (TREE_CODE (stmt
) != BIND_EXPR
)
6612 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6618 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
6619 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
6621 gfc_omp_clauses clausesb
;
6623 /* For combined !$omp target teams, the num_teams and
6624 thread_limit clauses are evaluated before entering the
6625 target construct. */
6626 memset (&clausesb
, '\0', sizeof (clausesb
));
6627 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
6628 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
6629 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
6630 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
6632 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
6634 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
6639 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
6641 if (TREE_CODE (stmt
) != BIND_EXPR
)
6642 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6649 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET
,
6650 void_type_node
, stmt
, omp_clauses
);
6651 if (code
->op
!= EXEC_OMP_TARGET
)
6652 OMP_TARGET_COMBINED (stmt
) = 1;
6653 cfun
->has_omp_target
= true;
6655 gfc_add_expr_to_block (&block
, stmt
);
6656 gfc_free_split_omp_clauses (code
, clausesa
);
6657 return gfc_finish_block (&block
);
6661 gfc_trans_omp_taskloop (gfc_code
*code
, gfc_exec_op op
)
6664 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6665 tree stmt
, omp_clauses
= NULL_TREE
;
6667 gfc_start_block (&block
);
6668 gfc_split_omp_clauses (code
, clausesa
);
6671 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
6675 case EXEC_OMP_TASKLOOP
:
6676 /* This is handled in gfc_trans_omp_do. */
6679 case EXEC_OMP_TASKLOOP_SIMD
:
6680 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
6681 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
6682 if (TREE_CODE (stmt
) != BIND_EXPR
)
6683 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6692 tree taskloop
= make_node (OMP_TASKLOOP
);
6693 TREE_TYPE (taskloop
) = void_type_node
;
6694 OMP_FOR_BODY (taskloop
) = stmt
;
6695 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
6698 gfc_add_expr_to_block (&block
, stmt
);
6699 gfc_free_split_omp_clauses (code
, clausesa
);
6700 return gfc_finish_block (&block
);
6704 gfc_trans_omp_master_masked_taskloop (gfc_code
*code
, gfc_exec_op op
)
6706 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6710 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
6711 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
6712 gfc_split_omp_clauses (code
, clausesa
);
6715 if (op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
6716 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
)
6717 stmt
= gfc_trans_omp_taskloop (code
, EXEC_OMP_TASKLOOP_SIMD
);
6720 gcc_assert (op
== EXEC_OMP_MASKED_TASKLOOP
6721 || op
== EXEC_OMP_MASTER_TASKLOOP
);
6722 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_TASKLOOP
, NULL
,
6723 code
->op
!= EXEC_OMP_MASTER_TASKLOOP
6724 ? &clausesa
[GFC_OMP_SPLIT_TASKLOOP
]
6725 : code
->ext
.omp_clauses
, NULL
);
6727 if (TREE_CODE (stmt
) != BIND_EXPR
)
6728 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6731 gfc_start_block (&block
);
6732 if (op
== EXEC_OMP_MASKED_TASKLOOP
|| op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
)
6734 tree clauses
= gfc_trans_omp_clauses (&block
,
6735 &clausesa
[GFC_OMP_SPLIT_MASKED
],
6737 tree msk
= make_node (OMP_MASKED
);
6738 TREE_TYPE (msk
) = void_type_node
;
6739 OMP_MASKED_BODY (msk
) = stmt
;
6740 OMP_MASKED_CLAUSES (msk
) = clauses
;
6741 OMP_MASKED_COMBINED (msk
) = 1;
6742 gfc_add_expr_to_block (&block
, msk
);
6746 gcc_assert (op
== EXEC_OMP_MASTER_TASKLOOP
6747 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
);
6748 stmt
= build1_v (OMP_MASTER
, stmt
);
6749 gfc_add_expr_to_block (&block
, stmt
);
6751 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
6752 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
6753 gfc_free_split_omp_clauses (code
, clausesa
);
6754 return gfc_finish_block (&block
);
6758 gfc_trans_omp_parallel_master_masked (gfc_code
*code
)
6761 tree stmt
, omp_clauses
;
6762 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6763 bool parallel_combined
= false;
6765 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
6766 gfc_split_omp_clauses (code
, clausesa
);
6768 gfc_start_block (&block
);
6769 omp_clauses
= gfc_trans_omp_clauses (&block
,
6770 code
->op
== EXEC_OMP_PARALLEL_MASTER
6771 ? code
->ext
.omp_clauses
6772 : &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6775 if (code
->op
== EXEC_OMP_PARALLEL_MASTER
)
6776 stmt
= gfc_trans_omp_master (code
);
6777 else if (code
->op
== EXEC_OMP_PARALLEL_MASKED
)
6778 stmt
= gfc_trans_omp_masked (code
, &clausesa
[GFC_OMP_SPLIT_MASKED
]);
6784 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
6785 op
= EXEC_OMP_MASKED_TASKLOOP
;
6787 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
6788 op
= EXEC_OMP_MASKED_TASKLOOP_SIMD
;
6790 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
6791 op
= EXEC_OMP_MASTER_TASKLOOP
;
6793 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
6794 op
= EXEC_OMP_MASTER_TASKLOOP_SIMD
;
6799 stmt
= gfc_trans_omp_master_masked_taskloop (code
, op
);
6800 parallel_combined
= true;
6802 if (TREE_CODE (stmt
) != BIND_EXPR
)
6803 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6806 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6807 void_type_node
, stmt
, omp_clauses
);
6808 /* masked does have just filter clause, but during gimplification
6809 isn't represented by a gimplification omp context, so for
6810 !$omp parallel masked don't set OMP_PARALLEL_COMBINED,
6812 !$omp parallel masked
6813 !$omp taskloop simd lastprivate (x)
6815 !$omp parallel masked taskloop simd lastprivate (x) */
6816 if (parallel_combined
)
6817 OMP_PARALLEL_COMBINED (stmt
) = 1;
6818 gfc_add_expr_to_block (&block
, stmt
);
6819 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
6820 gfc_free_split_omp_clauses (code
, clausesa
);
6821 return gfc_finish_block (&block
);
6825 gfc_trans_omp_target_data (gfc_code
*code
)
6828 tree stmt
, omp_clauses
;
6830 gfc_start_block (&block
);
6831 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6833 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6834 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET_DATA
,
6835 void_type_node
, stmt
, omp_clauses
);
6836 gfc_add_expr_to_block (&block
, stmt
);
6837 return gfc_finish_block (&block
);
6841 gfc_trans_omp_target_enter_data (gfc_code
*code
)
6844 tree stmt
, omp_clauses
;
6846 gfc_start_block (&block
);
6847 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6849 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
6851 gfc_add_expr_to_block (&block
, stmt
);
6852 return gfc_finish_block (&block
);
6856 gfc_trans_omp_target_exit_data (gfc_code
*code
)
6859 tree stmt
, omp_clauses
;
6861 gfc_start_block (&block
);
6862 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6864 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
6866 gfc_add_expr_to_block (&block
, stmt
);
6867 return gfc_finish_block (&block
);
6871 gfc_trans_omp_target_update (gfc_code
*code
)
6874 tree stmt
, omp_clauses
;
6876 gfc_start_block (&block
);
6877 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6879 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
6881 gfc_add_expr_to_block (&block
, stmt
);
6882 return gfc_finish_block (&block
);
6886 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6888 tree res
, tmp
, stmt
;
6889 stmtblock_t block
, *pblock
= NULL
;
6890 stmtblock_t singleblock
;
6891 int saved_ompws_flags
;
6892 bool singleblock_in_progress
= false;
6893 /* True if previous gfc_code in workshare construct is not workshared. */
6894 bool prev_singleunit
;
6895 location_t loc
= gfc_get_location (&code
->loc
);
6897 code
= code
->block
->next
;
6901 gfc_start_block (&block
);
6904 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
6905 prev_singleunit
= false;
6907 /* Translate statements one by one to trees until we reach
6908 the end of the workshare construct. Adjacent gfc_codes that
6909 are a single unit of work are clustered and encapsulated in a
6910 single OMP_SINGLE construct. */
6911 for (; code
; code
= code
->next
)
6913 if (code
->here
!= 0)
6915 res
= gfc_trans_label_here (code
);
6916 gfc_add_expr_to_block (pblock
, res
);
6919 /* No dependence analysis, use for clauses with wait.
6920 If this is the last gfc_code, use default omp_clauses. */
6921 if (code
->next
== NULL
&& clauses
->nowait
)
6922 ompws_flags
|= OMPWS_NOWAIT
;
6924 /* By default, every gfc_code is a single unit of work. */
6925 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
6926 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
6935 res
= gfc_trans_assign (code
);
6938 case EXEC_POINTER_ASSIGN
:
6939 res
= gfc_trans_pointer_assign (code
);
6942 case EXEC_INIT_ASSIGN
:
6943 res
= gfc_trans_init_assign (code
);
6947 res
= gfc_trans_forall (code
);
6951 res
= gfc_trans_where (code
);
6954 case EXEC_OMP_ATOMIC
:
6955 res
= gfc_trans_omp_directive (code
);
6958 case EXEC_OMP_PARALLEL
:
6959 case EXEC_OMP_PARALLEL_DO
:
6960 case EXEC_OMP_PARALLEL_MASTER
:
6961 case EXEC_OMP_PARALLEL_SECTIONS
:
6962 case EXEC_OMP_PARALLEL_WORKSHARE
:
6963 case EXEC_OMP_CRITICAL
:
6964 saved_ompws_flags
= ompws_flags
;
6966 res
= gfc_trans_omp_directive (code
);
6967 ompws_flags
= saved_ompws_flags
;
6971 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
6974 gfc_set_backend_locus (&code
->loc
);
6976 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
6978 if (prev_singleunit
)
6980 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
6981 /* Add current gfc_code to single block. */
6982 gfc_add_expr_to_block (&singleblock
, res
);
6985 /* Finish single block and add it to pblock. */
6986 tmp
= gfc_finish_block (&singleblock
);
6987 tmp
= build2_loc (loc
, OMP_SINGLE
,
6988 void_type_node
, tmp
, NULL_TREE
);
6989 gfc_add_expr_to_block (pblock
, tmp
);
6990 /* Add current gfc_code to pblock. */
6991 gfc_add_expr_to_block (pblock
, res
);
6992 singleblock_in_progress
= false;
6997 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
6999 /* Start single block. */
7000 gfc_init_block (&singleblock
);
7001 gfc_add_expr_to_block (&singleblock
, res
);
7002 singleblock_in_progress
= true;
7003 loc
= gfc_get_location (&code
->loc
);
7006 /* Add the new statement to the block. */
7007 gfc_add_expr_to_block (pblock
, res
);
7009 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
7013 /* Finish remaining SINGLE block, if we were in the middle of one. */
7014 if (singleblock_in_progress
)
7016 /* Finish single block and add it to pblock. */
7017 tmp
= gfc_finish_block (&singleblock
);
7018 tmp
= build2_loc (loc
, OMP_SINGLE
, void_type_node
, tmp
,
7020 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
7022 gfc_add_expr_to_block (pblock
, tmp
);
7025 stmt
= gfc_finish_block (pblock
);
7026 if (TREE_CODE (stmt
) != BIND_EXPR
)
7028 if (!IS_EMPTY_STMT (stmt
))
7030 tree bindblock
= poplevel (1, 0);
7031 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
7039 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
7040 stmt
= gfc_trans_omp_barrier ();
7047 gfc_trans_oacc_declare (gfc_code
*code
)
7050 tree stmt
, oacc_clauses
;
7051 enum tree_code construct_code
;
7053 construct_code
= OACC_DATA
;
7055 gfc_start_block (&block
);
7057 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
7058 code
->loc
, false, true);
7059 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7060 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
7062 gfc_add_expr_to_block (&block
, stmt
);
7064 return gfc_finish_block (&block
);
7068 gfc_trans_oacc_directive (gfc_code
*code
)
7072 case EXEC_OACC_PARALLEL_LOOP
:
7073 case EXEC_OACC_KERNELS_LOOP
:
7074 case EXEC_OACC_SERIAL_LOOP
:
7075 return gfc_trans_oacc_combined_directive (code
);
7076 case EXEC_OACC_PARALLEL
:
7077 case EXEC_OACC_KERNELS
:
7078 case EXEC_OACC_SERIAL
:
7079 case EXEC_OACC_DATA
:
7080 case EXEC_OACC_HOST_DATA
:
7081 return gfc_trans_oacc_construct (code
);
7082 case EXEC_OACC_LOOP
:
7083 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
7085 case EXEC_OACC_UPDATE
:
7086 case EXEC_OACC_CACHE
:
7087 case EXEC_OACC_ENTER_DATA
:
7088 case EXEC_OACC_EXIT_DATA
:
7089 return gfc_trans_oacc_executable_directive (code
);
7090 case EXEC_OACC_WAIT
:
7091 return gfc_trans_oacc_wait_directive (code
);
7092 case EXEC_OACC_ATOMIC
:
7093 return gfc_trans_omp_atomic (code
);
7094 case EXEC_OACC_DECLARE
:
7095 return gfc_trans_oacc_declare (code
);
7102 gfc_trans_omp_directive (gfc_code
*code
)
7106 case EXEC_OMP_ATOMIC
:
7107 return gfc_trans_omp_atomic (code
);
7108 case EXEC_OMP_BARRIER
:
7109 return gfc_trans_omp_barrier ();
7110 case EXEC_OMP_CANCEL
:
7111 return gfc_trans_omp_cancel (code
);
7112 case EXEC_OMP_CANCELLATION_POINT
:
7113 return gfc_trans_omp_cancellation_point (code
);
7114 case EXEC_OMP_CRITICAL
:
7115 return gfc_trans_omp_critical (code
);
7116 case EXEC_OMP_DEPOBJ
:
7117 return gfc_trans_omp_depobj (code
);
7118 case EXEC_OMP_DISTRIBUTE
:
7122 case EXEC_OMP_TASKLOOP
:
7123 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
7125 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
7126 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
7127 case EXEC_OMP_DISTRIBUTE_SIMD
:
7128 return gfc_trans_omp_distribute (code
, NULL
);
7129 case EXEC_OMP_DO_SIMD
:
7130 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
7131 case EXEC_OMP_ERROR
:
7132 return gfc_trans_omp_error (code
);
7133 case EXEC_OMP_FLUSH
:
7134 return gfc_trans_omp_flush (code
);
7135 case EXEC_OMP_MASKED
:
7136 return gfc_trans_omp_masked (code
, NULL
);
7137 case EXEC_OMP_MASTER
:
7138 return gfc_trans_omp_master (code
);
7139 case EXEC_OMP_MASKED_TASKLOOP
:
7140 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
7141 case EXEC_OMP_MASTER_TASKLOOP
:
7142 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
7143 return gfc_trans_omp_master_masked_taskloop (code
, code
->op
);
7144 case EXEC_OMP_ORDERED
:
7145 return gfc_trans_omp_ordered (code
);
7146 case EXEC_OMP_PARALLEL
:
7147 return gfc_trans_omp_parallel (code
);
7148 case EXEC_OMP_PARALLEL_DO
:
7149 return gfc_trans_omp_parallel_do (code
, false, NULL
, NULL
);
7150 case EXEC_OMP_PARALLEL_LOOP
:
7151 return gfc_trans_omp_parallel_do (code
, true, NULL
, NULL
);
7152 case EXEC_OMP_PARALLEL_DO_SIMD
:
7153 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
7154 case EXEC_OMP_PARALLEL_MASKED
:
7155 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
7156 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
7157 case EXEC_OMP_PARALLEL_MASTER
:
7158 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
7159 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
7160 return gfc_trans_omp_parallel_master_masked (code
);
7161 case EXEC_OMP_PARALLEL_SECTIONS
:
7162 return gfc_trans_omp_parallel_sections (code
);
7163 case EXEC_OMP_PARALLEL_WORKSHARE
:
7164 return gfc_trans_omp_parallel_workshare (code
);
7165 case EXEC_OMP_SCOPE
:
7166 return gfc_trans_omp_scope (code
);
7167 case EXEC_OMP_SECTIONS
:
7168 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
7169 case EXEC_OMP_SINGLE
:
7170 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
7171 case EXEC_OMP_TARGET
:
7172 case EXEC_OMP_TARGET_PARALLEL
:
7173 case EXEC_OMP_TARGET_PARALLEL_DO
:
7174 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
7175 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
7176 case EXEC_OMP_TARGET_SIMD
:
7177 case EXEC_OMP_TARGET_TEAMS
:
7178 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
7179 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7180 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7181 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
7182 case EXEC_OMP_TARGET_TEAMS_LOOP
:
7183 return gfc_trans_omp_target (code
);
7184 case EXEC_OMP_TARGET_DATA
:
7185 return gfc_trans_omp_target_data (code
);
7186 case EXEC_OMP_TARGET_ENTER_DATA
:
7187 return gfc_trans_omp_target_enter_data (code
);
7188 case EXEC_OMP_TARGET_EXIT_DATA
:
7189 return gfc_trans_omp_target_exit_data (code
);
7190 case EXEC_OMP_TARGET_UPDATE
:
7191 return gfc_trans_omp_target_update (code
);
7193 return gfc_trans_omp_task (code
);
7194 case EXEC_OMP_TASKGROUP
:
7195 return gfc_trans_omp_taskgroup (code
);
7196 case EXEC_OMP_TASKLOOP_SIMD
:
7197 return gfc_trans_omp_taskloop (code
, code
->op
);
7198 case EXEC_OMP_TASKWAIT
:
7199 return gfc_trans_omp_taskwait (code
);
7200 case EXEC_OMP_TASKYIELD
:
7201 return gfc_trans_omp_taskyield ();
7202 case EXEC_OMP_TEAMS
:
7203 case EXEC_OMP_TEAMS_DISTRIBUTE
:
7204 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7205 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7206 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
7207 case EXEC_OMP_TEAMS_LOOP
:
7208 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
7209 case EXEC_OMP_WORKSHARE
:
7210 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
7217 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
7222 gfc_omp_declare_simd
*ods
;
7223 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
7225 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
7226 tree fndecl
= ns
->proc_name
->backend_decl
;
7228 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
7229 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
7230 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
7231 DECL_ATTRIBUTES (fndecl
) = c
;