1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "constructor.h"
39 #include "gomp-constants.h"
40 #include "omp-general.h"
42 #include "memmodel.h" /* For MEMMODEL_ enums. */
43 #include "dependency.h"
46 #define GCC_DIAG_STYLE __gcc_tdiag__
47 #include "diagnostic-core.h"
49 #define GCC_DIAG_STYLE __gcc_gfc__
55 /* True if OpenMP should regard this DECL as being a scalar which has Fortran's
56 allocatable or pointer attribute. */
59 gfc_omp_is_allocatable_or_ptr (const_tree decl
)
62 && (GFC_DECL_GET_SCALAR_POINTER (decl
)
63 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)));
66 /* True if the argument is an optional argument; except that false is also
67 returned for arguments with the value attribute (nonpointers) and for
68 assumed-shape variables (decl is a local variable containing arg->data).
69 Note that for 'procedure(), optional' the value false is used as that's
70 always a pointer and no additional indirection is used.
71 Note that pvoid_type_node is for 'type(c_ptr), value' (and c_funloc). */
74 gfc_omp_is_optional_argument (const_tree decl
)
76 /* Note: VAR_DECL can occur with BIND(C) and array descriptors. */
77 return ((TREE_CODE (decl
) == PARM_DECL
|| VAR_P (decl
))
78 && DECL_LANG_SPECIFIC (decl
)
79 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
80 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
81 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) != FUNCTION_TYPE
82 && GFC_DECL_OPTIONAL_ARGUMENT (decl
));
85 /* Check whether this DECL belongs to a Fortran optional argument.
86 With 'for_present_check' set to false, decls which are optional parameters
87 themselves are returned as tree - or a NULL_TREE otherwise. Those decls are
88 always pointers. With 'for_present_check' set to true, the decl for checking
89 whether an argument is present is returned; for arguments with value
90 attribute this is the hidden argument and of BOOLEAN_TYPE. If the decl is
91 unrelated to optional arguments, NULL_TREE is returned. */
94 gfc_omp_check_optional_argument (tree decl
, bool for_present_check
)
96 if (!for_present_check
)
97 return gfc_omp_is_optional_argument (decl
) ? decl
: NULL_TREE
;
99 if (!DECL_LANG_SPECIFIC (decl
))
102 tree orig_decl
= decl
;
104 /* For assumed-shape arrays, a local decl with arg->data is used. */
105 if (TREE_CODE (decl
) != PARM_DECL
106 && (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
107 || GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))))
108 decl
= GFC_DECL_SAVED_DESCRIPTOR (decl
);
110 /* Note: With BIND(C), array descriptors are converted to a VAR_DECL. */
111 if (decl
== NULL_TREE
112 || (TREE_CODE (decl
) != PARM_DECL
&& TREE_CODE (decl
) != VAR_DECL
)
113 || !DECL_LANG_SPECIFIC (decl
)
114 || !GFC_DECL_OPTIONAL_ARGUMENT (decl
))
117 /* Scalars with VALUE attribute which are passed by value use a hidden
118 argument to denote the present status. They are passed as nonpointer type
119 with one exception: 'type(c_ptr), value' as 'void*'. */
120 /* Cf. trans-expr.cc's gfc_conv_expr_present. */
121 if (TREE_CODE (TREE_TYPE (decl
)) != POINTER_TYPE
122 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
124 char name
[GFC_MAX_SYMBOL_LEN
+ 2];
128 strcpy (&name
[1], IDENTIFIER_POINTER (DECL_NAME (decl
)));
129 tree_name
= get_identifier (name
);
131 /* Walk function argument list to find the hidden arg. */
132 decl
= DECL_ARGUMENTS (DECL_CONTEXT (decl
));
133 for ( ; decl
!= NULL_TREE
; decl
= TREE_CHAIN (decl
))
134 if (DECL_NAME (decl
) == tree_name
135 && DECL_ARTIFICIAL (decl
))
142 return fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
143 orig_decl
, null_pointer_node
);
147 /* Returns tree with NULL if it is not an array descriptor and with the tree to
148 access the 'data' component otherwise. With type_only = true, it returns the
149 TREE_TYPE without creating a new tree. */
152 gfc_omp_array_data (tree decl
, bool type_only
)
154 tree type
= TREE_TYPE (decl
);
156 if (POINTER_TYPE_P (type
))
157 type
= TREE_TYPE (type
);
159 if (!GFC_DESCRIPTOR_TYPE_P (type
))
163 return GFC_TYPE_ARRAY_DATAPTR_TYPE (type
);
165 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
166 decl
= build_fold_indirect_ref (decl
);
168 decl
= gfc_conv_descriptor_data_get (decl
);
173 /* Return the byte-size of the passed array descriptor. */
176 gfc_omp_array_size (tree decl
, gimple_seq
*pre_p
)
179 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
180 decl
= build_fold_indirect_ref (decl
);
181 tree type
= TREE_TYPE (decl
);
182 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
183 bool allocatable
= (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
184 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
185 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
);
186 gfc_init_block (&block
);
187 tree size
= gfc_full_array_size (&block
, decl
,
188 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)));
189 size
= fold_convert (size_type_node
, size
);
190 tree elemsz
= gfc_get_element_type (TREE_TYPE (decl
));
191 if (TREE_CODE (elemsz
) == ARRAY_TYPE
&& TYPE_STRING_FLAG (elemsz
))
192 elemsz
= gfc_conv_descriptor_elem_len (decl
);
194 elemsz
= TYPE_SIZE_UNIT (elemsz
);
195 size
= fold_build2 (MULT_EXPR
, size_type_node
, size
, elemsz
);
197 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
200 tree var
= create_tmp_var (size_type_node
);
201 gfc_add_expr_to_block (&block
, build2 (MODIFY_EXPR
, sizetype
, var
, size
));
202 tree tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
203 gfc_conv_descriptor_data_get (decl
),
205 tmp
= build3_loc (input_location
, COND_EXPR
, void_type_node
, tmp
,
206 gfc_finish_block (&block
),
207 build2 (MODIFY_EXPR
, sizetype
, var
, size_zero_node
));
208 gimplify_and_add (tmp
, pre_p
);
215 /* True if OpenMP should privatize what this DECL points to rather
216 than the DECL itself. */
219 gfc_omp_privatize_by_reference (const_tree decl
)
221 tree type
= TREE_TYPE (decl
);
223 if (TREE_CODE (type
) == REFERENCE_TYPE
224 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
227 if (TREE_CODE (type
) == POINTER_TYPE
228 && gfc_omp_is_optional_argument (decl
))
231 if (TREE_CODE (type
) == POINTER_TYPE
)
233 while (TREE_CODE (decl
) == COMPONENT_REF
)
234 decl
= TREE_OPERAND (decl
, 1);
236 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
237 that have POINTER_TYPE type and aren't scalar pointers, scalar
238 allocatables, Cray pointees or C pointers are supposed to be
239 privatized by reference. */
240 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
241 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
242 || GFC_DECL_CRAY_POINTEE (decl
)
243 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
244 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
247 if (!DECL_ARTIFICIAL (decl
)
248 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
251 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
253 if (DECL_LANG_SPECIFIC (decl
)
254 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
261 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
262 of DECL is predetermined. */
264 enum omp_clause_default_kind
265 gfc_omp_predetermined_sharing (tree decl
)
267 /* Associate names preserve the association established during ASSOCIATE.
268 As they are implemented either as pointers to the selector or array
269 descriptor and shouldn't really change in the ASSOCIATE region,
270 this decl can be either shared or firstprivate. If it is a pointer,
271 use firstprivate, as it is cheaper that way, otherwise make it shared. */
272 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
274 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
275 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
277 return OMP_CLAUSE_DEFAULT_SHARED
;
280 if (DECL_ARTIFICIAL (decl
)
281 && ! GFC_DECL_RESULT (decl
)
282 && ! (DECL_LANG_SPECIFIC (decl
)
283 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
284 return OMP_CLAUSE_DEFAULT_SHARED
;
286 /* Cray pointees shouldn't be listed in any clauses and should be
287 gimplified to dereference of the corresponding Cray pointer.
288 Make them all private, so that they are emitted in the debug
290 if (GFC_DECL_CRAY_POINTEE (decl
))
291 return OMP_CLAUSE_DEFAULT_PRIVATE
;
293 /* Assumed-size arrays are predetermined shared. */
294 if (TREE_CODE (decl
) == PARM_DECL
295 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
296 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
297 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
298 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
300 return OMP_CLAUSE_DEFAULT_SHARED
;
302 /* Dummy procedures aren't considered variables by OpenMP, thus are
303 disallowed in OpenMP clauses. They are represented as PARM_DECLs
304 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
305 to avoid complaining about their uses with default(none). */
306 if (TREE_CODE (decl
) == PARM_DECL
307 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
308 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
309 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
311 /* COMMON and EQUIVALENCE decls are shared. They
312 are only referenced through DECL_VALUE_EXPR of the variables
313 contained in them. If those are privatized, they will not be
314 gimplified to the COMMON or EQUIVALENCE decls. */
315 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
316 return OMP_CLAUSE_DEFAULT_SHARED
;
318 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
319 return OMP_CLAUSE_DEFAULT_SHARED
;
321 /* These are either array or derived parameters, or vtables.
322 In the former cases, the OpenMP standard doesn't consider them to be
323 variables at all (they can't be redefined), but they can nevertheless appear
324 in parallel/task regions and for default(none) purposes treat them as shared.
325 For vtables likely the same handling is desirable. */
326 if (VAR_P (decl
) && TREE_READONLY (decl
)
327 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
328 return OMP_CLAUSE_DEFAULT_SHARED
;
330 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
334 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
335 of DECL is predetermined. */
337 enum omp_clause_defaultmap_kind
338 gfc_omp_predetermined_mapping (tree decl
)
340 if (DECL_ARTIFICIAL (decl
)
341 && ! GFC_DECL_RESULT (decl
)
342 && ! (DECL_LANG_SPECIFIC (decl
)
343 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
344 return OMP_CLAUSE_DEFAULTMAP_TO
;
346 /* These are either array or derived parameters, or vtables. */
347 if (VAR_P (decl
) && TREE_READONLY (decl
)
348 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
349 return OMP_CLAUSE_DEFAULTMAP_TO
;
351 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
355 /* Return decl that should be used when reporting DEFAULT(NONE)
359 gfc_omp_report_decl (tree decl
)
361 if (DECL_ARTIFICIAL (decl
)
362 && DECL_LANG_SPECIFIC (decl
)
363 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
364 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
369 /* Return true if TYPE has any allocatable components. */
372 gfc_has_alloc_comps (tree type
, tree decl
)
376 if (POINTER_TYPE_P (type
))
378 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
379 type
= TREE_TYPE (type
);
380 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
384 if (GFC_DESCRIPTOR_TYPE_P (type
)
385 && (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
386 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
))
389 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
390 type
= gfc_get_element_type (type
);
392 if (TREE_CODE (type
) != RECORD_TYPE
)
395 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
397 ftype
= TREE_TYPE (field
);
398 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
400 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
401 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
403 if (gfc_has_alloc_comps (ftype
, field
))
409 /* Return true if TYPE is polymorphic but not with pointer attribute. */
412 gfc_is_polymorphic_nonptr (tree type
)
414 if (POINTER_TYPE_P (type
))
415 type
= TREE_TYPE (type
);
416 return GFC_CLASS_TYPE_P (type
);
419 /* Return true if TYPE is unlimited polymorphic but not with pointer attribute;
420 unlimited means also intrinsic types are handled and _len is used. */
423 gfc_is_unlimited_polymorphic_nonptr (tree type
)
425 if (POINTER_TYPE_P (type
))
426 type
= TREE_TYPE (type
);
427 if (!GFC_CLASS_TYPE_P (type
))
430 tree field
= TYPE_FIELDS (type
); /* _data */
432 field
= DECL_CHAIN (field
); /* _vptr */
434 field
= DECL_CHAIN (field
);
437 gcc_assert (strcmp ("_len", IDENTIFIER_POINTER (DECL_NAME (field
))) == 0);
441 /* Return true if the DECL is for an allocatable array or scalar. */
444 gfc_omp_allocatable_p (tree decl
)
449 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
452 tree type
= TREE_TYPE (decl
);
453 if (gfc_omp_privatize_by_reference (decl
))
454 type
= TREE_TYPE (type
);
456 if (GFC_DESCRIPTOR_TYPE_P (type
)
457 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
464 /* Return true if DECL in private clause needs
465 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
467 gfc_omp_private_outer_ref (tree decl
)
469 tree type
= TREE_TYPE (decl
);
471 if (gfc_omp_privatize_by_reference (decl
))
472 type
= TREE_TYPE (type
);
474 if (GFC_DESCRIPTOR_TYPE_P (type
)
475 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
478 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
481 if (gfc_has_alloc_comps (type
, decl
))
487 /* Callback for gfc_omp_unshare_expr. */
490 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
493 enum tree_code code
= TREE_CODE (t
);
495 /* Stop at types, decls, constants like copy_tree_r. */
496 if (TREE_CODE_CLASS (code
) == tcc_type
497 || TREE_CODE_CLASS (code
) == tcc_declaration
498 || TREE_CODE_CLASS (code
) == tcc_constant
501 else if (handled_component_p (t
)
502 || TREE_CODE (t
) == MEM_REF
)
504 *tp
= unshare_expr (t
);
511 /* Unshare in expr anything that the FE which normally doesn't
512 care much about tree sharing (because during gimplification
513 everything is unshared) could cause problems with tree sharing
514 at omp-low.cc time. */
517 gfc_omp_unshare_expr (tree expr
)
519 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
523 enum walk_alloc_comps
525 WALK_ALLOC_COMPS_DTOR
,
526 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
527 WALK_ALLOC_COMPS_COPY_CTOR
530 /* Handle allocatable components in OpenMP clauses. */
533 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
534 enum walk_alloc_comps kind
)
536 stmtblock_t block
, tmpblock
;
537 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
538 gfc_init_block (&block
);
540 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
542 if (GFC_DESCRIPTOR_TYPE_P (type
))
544 gfc_init_block (&tmpblock
);
545 tem
= gfc_full_array_size (&tmpblock
, decl
,
546 GFC_TYPE_ARRAY_RANK (type
));
547 then_b
= gfc_finish_block (&tmpblock
);
548 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
549 tem
= gfc_omp_unshare_expr (tem
);
550 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
551 gfc_array_index_type
, tem
,
556 bool compute_nelts
= false;
557 if (!TYPE_DOMAIN (type
)
558 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
559 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
560 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
561 compute_nelts
= true;
562 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
564 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
565 if (lookup_attribute ("omp dummy var", a
))
566 compute_nelts
= true;
570 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
571 TYPE_SIZE_UNIT (type
),
572 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
573 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
576 tem
= array_type_nelts (type
);
577 tem
= fold_convert (gfc_array_index_type
, tem
);
580 tree nelems
= gfc_evaluate_now (tem
, &block
);
581 tree index
= gfc_create_var (gfc_array_index_type
, "S");
583 gfc_init_block (&tmpblock
);
584 tem
= gfc_conv_array_data (decl
);
585 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
586 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
587 tree destvar
, destvref
= NULL_TREE
;
590 tem
= gfc_conv_array_data (dest
);
591 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
592 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
594 gfc_add_expr_to_block (&tmpblock
,
595 gfc_walk_alloc_comps (declvref
, destvref
,
599 gfc_init_loopinfo (&loop
);
601 loop
.from
[0] = gfc_index_zero_node
;
602 loop
.loopvar
[0] = index
;
604 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
605 gfc_add_block_to_block (&block
, &loop
.pre
);
606 return gfc_finish_block (&block
);
608 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
610 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
612 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
613 type
= TREE_TYPE (decl
);
616 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
617 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
619 tree ftype
= TREE_TYPE (field
);
620 tree declf
, destf
= NULL_TREE
;
621 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
622 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
623 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
624 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
627 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
628 decl
, field
, NULL_TREE
);
630 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
631 dest
, field
, NULL_TREE
);
636 case WALK_ALLOC_COMPS_DTOR
:
638 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
639 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
640 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
642 gfc_add_modify (&block
, unshare_expr (destf
),
643 unshare_expr (declf
));
644 tem
= gfc_duplicate_allocatable_nocopy
645 (destf
, declf
, ftype
,
646 GFC_TYPE_ARRAY_RANK (ftype
));
648 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
649 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
651 case WALK_ALLOC_COMPS_COPY_CTOR
:
652 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
653 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
654 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
655 GFC_TYPE_ARRAY_RANK (ftype
),
657 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
658 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
663 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
666 gfc_init_block (&tmpblock
);
667 gfc_add_expr_to_block (&tmpblock
,
668 gfc_walk_alloc_comps (declf
, destf
,
670 then_b
= gfc_finish_block (&tmpblock
);
671 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
672 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
673 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
674 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
675 tem
= unshare_expr (declf
);
680 tem
= fold_convert (pvoid_type_node
, tem
);
681 tem
= fold_build2_loc (input_location
, NE_EXPR
,
682 logical_type_node
, tem
,
684 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
686 build_empty_stmt (input_location
));
688 gfc_add_expr_to_block (&block
, then_b
);
690 if (kind
== WALK_ALLOC_COMPS_DTOR
)
692 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
693 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
695 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
696 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
697 NULL_TREE
, NULL_TREE
, true,
699 GFC_CAF_COARRAY_NOCOARRAY
);
700 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
702 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
704 tem
= gfc_call_free (unshare_expr (declf
));
705 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
710 return gfc_finish_block (&block
);
713 /* Return code to initialize DECL with its default constructor, or
714 NULL if there's nothing to do. */
717 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
719 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
720 stmtblock_t block
, cond_block
;
722 switch (OMP_CLAUSE_CODE (clause
))
724 case OMP_CLAUSE__LOOPTEMP_
:
725 case OMP_CLAUSE__REDUCTEMP_
:
726 case OMP_CLAUSE__CONDTEMP_
:
727 case OMP_CLAUSE__SCANTEMP_
:
729 case OMP_CLAUSE_PRIVATE
:
730 case OMP_CLAUSE_LASTPRIVATE
:
731 case OMP_CLAUSE_LINEAR
:
732 case OMP_CLAUSE_REDUCTION
:
733 case OMP_CLAUSE_IN_REDUCTION
:
734 case OMP_CLAUSE_TASK_REDUCTION
:
740 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
741 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
742 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
743 || !POINTER_TYPE_P (type
)))
745 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
748 gfc_start_block (&block
);
749 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
750 OMP_CLAUSE_DECL (clause
),
751 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
752 gfc_add_expr_to_block (&block
, tem
);
753 return gfc_finish_block (&block
);
758 gcc_assert (outer
!= NULL_TREE
);
760 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
761 "not currently allocated" allocation status if outer
762 array is "not currently allocated", otherwise should be allocated. */
763 gfc_start_block (&block
);
765 gfc_init_block (&cond_block
);
767 if (GFC_DESCRIPTOR_TYPE_P (type
))
769 gfc_add_modify (&cond_block
, decl
, outer
);
770 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
771 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
772 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
774 gfc_conv_descriptor_lbound_get (decl
, rank
));
775 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
776 size
, gfc_index_one_node
);
777 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
778 size
= fold_build2_loc (input_location
, MULT_EXPR
,
779 gfc_array_index_type
, size
,
780 gfc_conv_descriptor_stride_get (decl
, rank
));
781 tree esize
= fold_convert (gfc_array_index_type
,
782 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
783 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
785 size
= unshare_expr (size
);
786 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
790 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
791 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
792 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
793 if (GFC_DESCRIPTOR_TYPE_P (type
))
794 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
796 gfc_add_modify (&cond_block
, unshare_expr (decl
),
797 fold_convert (TREE_TYPE (decl
), ptr
));
798 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
800 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
801 OMP_CLAUSE_DECL (clause
),
802 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
803 gfc_add_expr_to_block (&cond_block
, tem
);
805 then_b
= gfc_finish_block (&cond_block
);
807 /* Reduction clause requires allocated ALLOCATABLE. */
808 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
809 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_IN_REDUCTION
810 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_TASK_REDUCTION
)
812 gfc_init_block (&cond_block
);
813 if (GFC_DESCRIPTOR_TYPE_P (type
))
814 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
817 gfc_add_modify (&cond_block
, unshare_expr (decl
),
818 build_zero_cst (TREE_TYPE (decl
)));
819 else_b
= gfc_finish_block (&cond_block
);
821 tree tem
= fold_convert (pvoid_type_node
,
822 GFC_DESCRIPTOR_TYPE_P (type
)
823 ? gfc_conv_descriptor_data_get (outer
) : outer
);
824 tem
= unshare_expr (tem
);
825 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
826 tem
, null_pointer_node
);
827 gfc_add_expr_to_block (&block
,
828 build3_loc (input_location
, COND_EXPR
,
829 void_type_node
, cond
, then_b
,
831 /* Avoid -W*uninitialized warnings. */
833 suppress_warning (decl
, OPT_Wuninitialized
);
836 gfc_add_expr_to_block (&block
, then_b
);
838 return gfc_finish_block (&block
);
841 /* Build and return code for a copy constructor from SRC to DEST. */
844 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
846 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
847 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
848 tree cond
, then_b
, else_b
;
849 stmtblock_t block
, cond_block
;
851 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
852 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
854 /* Privatize pointer, only; cf. gfc_omp_predetermined_sharing. */
855 if (DECL_P (OMP_CLAUSE_DECL (clause
))
856 && GFC_DECL_ASSOCIATE_VAR_P (OMP_CLAUSE_DECL (clause
)))
857 return build2 (MODIFY_EXPR
, TREE_TYPE (dest
), dest
, src
);
859 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
860 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
861 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
863 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
865 if (gfc_is_polymorphic_nonptr (decl_type
))
867 if (POINTER_TYPE_P (decl_type
))
868 decl_type
= TREE_TYPE (decl_type
);
869 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
870 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
871 fatal_error (input_location
,
872 "Sorry, polymorphic arrays not yet supported for "
875 tree nelems
= build_int_cst (size_type_node
, 1); /* Scalar. */
876 tree src_data
= gfc_class_data_get (unshare_expr (src
));
877 tree dest_data
= gfc_class_data_get (unshare_expr (dest
));
878 bool unlimited
= gfc_is_unlimited_polymorphic_nonptr (type
);
880 gfc_start_block (&block
);
881 gfc_add_modify (&block
, gfc_class_vptr_get (dest
),
882 gfc_class_vptr_get (src
));
883 gfc_init_block (&cond_block
);
887 src_len
= gfc_class_len_get (src
);
888 gfc_add_modify (&cond_block
, gfc_class_len_get (unshare_expr (dest
)), src_len
);
891 /* Use: size = class._vtab._size * (class._len > 0 ? class._len : 1). */
892 size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (src
));
895 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
896 unshare_expr (src_len
),
897 build_zero_cst (TREE_TYPE (src_len
)));
898 cond
= build3_loc (input_location
, COND_EXPR
, size_type_node
, cond
,
899 fold_convert (size_type_node
,
900 unshare_expr (src_len
)),
901 build_int_cst (size_type_node
, 1));
902 size
= fold_build2_loc (input_location
, MULT_EXPR
, size_type_node
,
906 /* Malloc memory + call class->_vpt->_copy. */
907 call
= builtin_decl_explicit (BUILT_IN_MALLOC
);
908 call
= build_call_expr_loc (input_location
, call
, 1, size
);
909 gfc_add_modify (&cond_block
, dest_data
,
910 fold_convert (TREE_TYPE (dest_data
), call
));
911 gfc_add_expr_to_block (&cond_block
,
912 gfc_copy_class_to_class (src
, dest
, nelems
,
915 gcc_assert (TREE_CODE (dest_data
) == COMPONENT_REF
);
916 if (!GFC_DECL_GET_SCALAR_ALLOCATABLE (TREE_OPERAND (dest_data
, 1)))
918 gfc_add_block_to_block (&block
, &cond_block
);
922 /* Create: if (class._data != 0) <cond_block> else class._data = NULL; */
923 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
924 src_data
, null_pointer_node
);
925 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
926 void_type_node
, cond
,
927 gfc_finish_block (&cond_block
),
928 fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
929 unshare_expr (dest_data
), null_pointer_node
)));
931 return gfc_finish_block (&block
);
934 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
935 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
936 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
937 || !POINTER_TYPE_P (type
)))
939 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
941 gfc_start_block (&block
);
942 gfc_add_modify (&block
, dest
, src
);
943 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
944 WALK_ALLOC_COMPS_COPY_CTOR
);
945 gfc_add_expr_to_block (&block
, tem
);
946 return gfc_finish_block (&block
);
949 return build2_v (MODIFY_EXPR
, dest
, src
);
952 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
953 and copied from SRC. */
954 gfc_start_block (&block
);
956 gfc_init_block (&cond_block
);
958 gfc_add_modify (&cond_block
, dest
, fold_convert (TREE_TYPE (dest
), src
));
959 if (GFC_DESCRIPTOR_TYPE_P (type
))
961 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
962 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
963 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
965 gfc_conv_descriptor_lbound_get (dest
, rank
));
966 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
967 size
, gfc_index_one_node
);
968 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
969 size
= fold_build2_loc (input_location
, MULT_EXPR
,
970 gfc_array_index_type
, size
,
971 gfc_conv_descriptor_stride_get (dest
, rank
));
972 tree esize
= fold_convert (gfc_array_index_type
,
973 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
974 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
976 size
= unshare_expr (size
);
977 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
981 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
982 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
983 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
984 if (GFC_DESCRIPTOR_TYPE_P (type
))
985 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
987 gfc_add_modify (&cond_block
, unshare_expr (dest
),
988 fold_convert (TREE_TYPE (dest
), ptr
));
990 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
991 ? gfc_conv_descriptor_data_get (src
) : src
;
992 srcptr
= unshare_expr (srcptr
);
993 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
994 call
= build_call_expr_loc (input_location
,
995 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
997 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
998 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1000 tree tem
= gfc_walk_alloc_comps (src
, dest
,
1001 OMP_CLAUSE_DECL (clause
),
1002 WALK_ALLOC_COMPS_COPY_CTOR
);
1003 gfc_add_expr_to_block (&cond_block
, tem
);
1005 then_b
= gfc_finish_block (&cond_block
);
1007 gfc_init_block (&cond_block
);
1008 if (GFC_DESCRIPTOR_TYPE_P (type
))
1009 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
1012 gfc_add_modify (&cond_block
, unshare_expr (dest
),
1013 build_zero_cst (TREE_TYPE (dest
)));
1014 else_b
= gfc_finish_block (&cond_block
);
1016 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1017 unshare_expr (srcptr
), null_pointer_node
);
1018 gfc_add_expr_to_block (&block
,
1019 build3_loc (input_location
, COND_EXPR
,
1020 void_type_node
, cond
, then_b
, else_b
));
1021 /* Avoid -W*uninitialized warnings. */
1023 suppress_warning (dest
, OPT_Wuninitialized
);
1025 return gfc_finish_block (&block
);
1028 /* Similarly, except use an intrinsic or pointer assignment operator
1032 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
1034 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
1035 tree cond
, then_b
, else_b
;
1036 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
1038 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1039 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1040 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1041 || !POINTER_TYPE_P (type
)))
1043 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1045 gfc_start_block (&block
);
1046 /* First dealloc any allocatable components in DEST. */
1047 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
1048 OMP_CLAUSE_DECL (clause
),
1049 WALK_ALLOC_COMPS_DTOR
);
1050 gfc_add_expr_to_block (&block
, tem
);
1051 /* Then copy over toplevel data. */
1052 gfc_add_modify (&block
, dest
, src
);
1053 /* Finally allocate any allocatable components and copy. */
1054 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
1055 WALK_ALLOC_COMPS_COPY_CTOR
);
1056 gfc_add_expr_to_block (&block
, tem
);
1057 return gfc_finish_block (&block
);
1060 return build2_v (MODIFY_EXPR
, dest
, src
);
1063 gfc_start_block (&block
);
1065 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1067 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
1068 WALK_ALLOC_COMPS_DTOR
);
1069 tree tem
= fold_convert (pvoid_type_node
,
1070 GFC_DESCRIPTOR_TYPE_P (type
)
1071 ? gfc_conv_descriptor_data_get (dest
) : dest
);
1072 tem
= unshare_expr (tem
);
1073 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1074 tem
, null_pointer_node
);
1075 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1076 then_b
, build_empty_stmt (input_location
));
1077 gfc_add_expr_to_block (&block
, tem
);
1080 gfc_init_block (&cond_block
);
1082 if (GFC_DESCRIPTOR_TYPE_P (type
))
1084 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1085 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
1086 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1088 gfc_conv_descriptor_lbound_get (src
, rank
));
1089 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1090 size
, gfc_index_one_node
);
1091 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1092 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1093 gfc_array_index_type
, size
,
1094 gfc_conv_descriptor_stride_get (src
, rank
));
1095 tree esize
= fold_convert (gfc_array_index_type
,
1096 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1097 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1099 size
= unshare_expr (size
);
1100 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1104 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1105 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1107 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1108 ? gfc_conv_descriptor_data_get (dest
) : dest
;
1109 destptr
= unshare_expr (destptr
);
1110 destptr
= fold_convert (pvoid_type_node
, destptr
);
1111 gfc_add_modify (&cond_block
, ptr
, destptr
);
1113 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
1114 destptr
, null_pointer_node
);
1116 if (GFC_DESCRIPTOR_TYPE_P (type
))
1119 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
1121 tree rank
= gfc_rank_cst
[i
];
1122 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
1123 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
1124 gfc_array_index_type
, tem
,
1125 gfc_conv_descriptor_lbound_get (src
, rank
));
1126 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
1127 gfc_array_index_type
, tem
,
1128 gfc_conv_descriptor_lbound_get (dest
, rank
));
1129 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1130 tem
, gfc_conv_descriptor_ubound_get (dest
,
1132 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
1133 logical_type_node
, cond
, tem
);
1137 gfc_init_block (&cond_block2
);
1139 if (GFC_DESCRIPTOR_TYPE_P (type
))
1141 gfc_init_block (&inner_block
);
1142 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
1143 then_b
= gfc_finish_block (&inner_block
);
1145 gfc_init_block (&inner_block
);
1146 gfc_add_modify (&inner_block
, ptr
,
1147 gfc_call_realloc (&inner_block
, ptr
, size
));
1148 else_b
= gfc_finish_block (&inner_block
);
1150 gfc_add_expr_to_block (&cond_block2
,
1151 build3_loc (input_location
, COND_EXPR
,
1153 unshare_expr (nonalloc
),
1155 gfc_add_modify (&cond_block2
, dest
, src
);
1156 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
1160 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
1161 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
1162 fold_convert (type
, ptr
));
1164 then_b
= gfc_finish_block (&cond_block2
);
1165 else_b
= build_empty_stmt (input_location
);
1167 gfc_add_expr_to_block (&cond_block
,
1168 build3_loc (input_location
, COND_EXPR
,
1169 void_type_node
, unshare_expr (cond
),
1172 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1173 ? gfc_conv_descriptor_data_get (src
) : src
;
1174 srcptr
= unshare_expr (srcptr
);
1175 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
1176 call
= build_call_expr_loc (input_location
,
1177 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
1179 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1180 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1182 tree tem
= gfc_walk_alloc_comps (src
, dest
,
1183 OMP_CLAUSE_DECL (clause
),
1184 WALK_ALLOC_COMPS_COPY_CTOR
);
1185 gfc_add_expr_to_block (&cond_block
, tem
);
1187 then_b
= gfc_finish_block (&cond_block
);
1189 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
1191 gfc_init_block (&cond_block
);
1192 if (GFC_DESCRIPTOR_TYPE_P (type
))
1194 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
1195 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
1196 NULL_TREE
, NULL_TREE
, true, NULL
,
1197 GFC_CAF_COARRAY_NOCOARRAY
);
1198 gfc_add_expr_to_block (&cond_block
, tmp
);
1202 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
1203 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
1204 gfc_add_modify (&cond_block
, unshare_expr (dest
),
1205 build_zero_cst (TREE_TYPE (dest
)));
1207 else_b
= gfc_finish_block (&cond_block
);
1209 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1210 unshare_expr (srcptr
), null_pointer_node
);
1211 gfc_add_expr_to_block (&block
,
1212 build3_loc (input_location
, COND_EXPR
,
1213 void_type_node
, cond
,
1217 gfc_add_expr_to_block (&block
, then_b
);
1219 return gfc_finish_block (&block
);
1223 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
1224 tree add
, tree nelems
)
1226 stmtblock_t tmpblock
;
1227 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
1228 nelems
= gfc_evaluate_now (nelems
, block
);
1230 gfc_init_block (&tmpblock
);
1231 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
1233 desta
= gfc_build_array_ref (dest
, index
, NULL
);
1234 srca
= gfc_build_array_ref (src
, index
, NULL
);
1238 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
1239 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
1240 fold_convert (sizetype
, index
),
1241 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
1242 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1243 TREE_TYPE (dest
), dest
,
1245 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1246 TREE_TYPE (src
), src
,
1249 gfc_add_modify (&tmpblock
, desta
,
1250 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
1254 gfc_init_loopinfo (&loop
);
1256 loop
.from
[0] = gfc_index_zero_node
;
1257 loop
.loopvar
[0] = index
;
1258 loop
.to
[0] = nelems
;
1259 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
1260 gfc_add_block_to_block (block
, &loop
.pre
);
1263 /* Build and return code for a constructor of DEST that initializes
1264 it to SRC plus ADD (ADD is scalar integer). */
1267 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
1269 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
1272 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
1274 gfc_start_block (&block
);
1275 add
= gfc_evaluate_now (add
, &block
);
1277 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1278 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1279 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1280 || !POINTER_TYPE_P (type
)))
1282 bool compute_nelts
= false;
1283 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
1284 if (!TYPE_DOMAIN (type
)
1285 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
1286 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
1287 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
1288 compute_nelts
= true;
1289 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1291 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
1292 if (lookup_attribute ("omp dummy var", a
))
1293 compute_nelts
= true;
1297 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
1298 TYPE_SIZE_UNIT (type
),
1299 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1300 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
1303 nelems
= array_type_nelts (type
);
1304 nelems
= fold_convert (gfc_array_index_type
, nelems
);
1306 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
1307 return gfc_finish_block (&block
);
1310 /* Allocatable arrays in LINEAR clauses need to be allocated
1311 and copied from SRC. */
1312 gfc_add_modify (&block
, dest
, src
);
1313 if (GFC_DESCRIPTOR_TYPE_P (type
))
1315 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1316 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
1317 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1319 gfc_conv_descriptor_lbound_get (dest
, rank
));
1320 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1321 size
, gfc_index_one_node
);
1322 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1323 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1324 gfc_array_index_type
, size
,
1325 gfc_conv_descriptor_stride_get (dest
, rank
));
1326 tree esize
= fold_convert (gfc_array_index_type
,
1327 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1328 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
1329 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1330 nelems
, unshare_expr (esize
));
1331 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1333 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
1334 gfc_array_index_type
, nelems
,
1335 gfc_index_one_node
);
1338 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1339 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1340 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
1341 if (GFC_DESCRIPTOR_TYPE_P (type
))
1343 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
1344 tree etype
= gfc_get_element_type (type
);
1345 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
1346 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
1347 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
1348 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
1352 gfc_add_modify (&block
, unshare_expr (dest
),
1353 fold_convert (TREE_TYPE (dest
), ptr
));
1354 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
1355 tree dstm
= build_fold_indirect_ref (ptr
);
1356 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
1357 gfc_add_modify (&block
, dstm
,
1358 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
1360 return gfc_finish_block (&block
);
1363 /* Build and return code destructing DECL. Return NULL if nothing
1367 gfc_omp_clause_dtor (tree clause
, tree decl
)
1369 tree type
= TREE_TYPE (decl
), tem
;
1370 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
1372 /* Only pointer was privatized; cf. gfc_omp_clause_copy_ctor. */
1373 if (DECL_P (OMP_CLAUSE_DECL (clause
))
1374 && GFC_DECL_ASSOCIATE_VAR_P (OMP_CLAUSE_DECL (clause
)))
1377 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
1378 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
1379 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
1381 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
1382 if (gfc_is_polymorphic_nonptr (decl_type
))
1384 if (POINTER_TYPE_P (decl_type
))
1385 decl_type
= TREE_TYPE (decl_type
);
1386 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
1387 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
1388 fatal_error (input_location
,
1389 "Sorry, polymorphic arrays not yet supported for "
1391 stmtblock_t block
, cond_block
;
1392 gfc_start_block (&block
);
1393 gfc_init_block (&cond_block
);
1394 tree final
= gfc_class_vtab_final_get (decl
);
1395 tree size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (decl
));
1397 gfc_init_se (&se
, NULL
);
1398 symbol_attribute attr
= {};
1399 tree data
= gfc_class_data_get (decl
);
1400 tree desc
= gfc_conv_scalar_to_descriptor (&se
, data
, attr
);
1402 /* Call class->_vpt->_finalize + free. */
1403 tree call
= build_fold_indirect_ref (final
);
1404 call
= build_call_expr_loc (input_location
, call
, 3,
1405 gfc_build_addr_expr (NULL
, desc
),
1406 size
, boolean_false_node
);
1407 gfc_add_block_to_block (&cond_block
, &se
.pre
);
1408 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1409 gfc_add_block_to_block (&cond_block
, &se
.post
);
1410 /* Create: if (_vtab && _final) <cond_block> */
1411 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1412 gfc_class_vptr_get (decl
),
1414 tree cond2
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1415 final
, null_pointer_node
);
1416 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1417 boolean_type_node
, cond
, cond2
);
1418 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1419 void_type_node
, cond
,
1420 gfc_finish_block (&cond_block
), NULL_TREE
));
1421 call
= builtin_decl_explicit (BUILT_IN_FREE
);
1422 call
= build_call_expr_loc (input_location
, call
, 1, data
);
1423 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
1424 return gfc_finish_block (&block
);
1427 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1428 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1429 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1430 || !POINTER_TYPE_P (type
)))
1432 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1433 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
1434 OMP_CLAUSE_DECL (clause
),
1435 WALK_ALLOC_COMPS_DTOR
);
1439 if (GFC_DESCRIPTOR_TYPE_P (type
))
1441 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1442 to be deallocated if they were allocated. */
1443 tem
= gfc_conv_descriptor_data_get (decl
);
1444 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1445 NULL_TREE
, true, NULL
,
1446 GFC_CAF_COARRAY_NOCOARRAY
);
1449 tem
= gfc_call_free (decl
);
1450 tem
= gfc_omp_unshare_expr (tem
);
1452 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1457 gfc_init_block (&block
);
1458 gfc_add_expr_to_block (&block
,
1459 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1460 OMP_CLAUSE_DECL (clause
),
1461 WALK_ALLOC_COMPS_DTOR
));
1462 gfc_add_expr_to_block (&block
, tem
);
1463 then_b
= gfc_finish_block (&block
);
1465 tem
= fold_convert (pvoid_type_node
,
1466 GFC_DESCRIPTOR_TYPE_P (type
)
1467 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1468 tem
= unshare_expr (tem
);
1469 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1470 tem
, null_pointer_node
);
1471 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1472 then_b
, build_empty_stmt (input_location
));
1477 /* Build a conditional expression in BLOCK. If COND_VAL is not
1478 null, then the block THEN_B is executed, otherwise ELSE_VAL
1479 is assigned to VAL. */
1482 gfc_build_cond_assign (stmtblock_t
*block
, tree val
, tree cond_val
,
1483 tree then_b
, tree else_val
)
1485 stmtblock_t cond_block
;
1486 tree else_b
= NULL_TREE
;
1487 tree val_ty
= TREE_TYPE (val
);
1491 gfc_init_block (&cond_block
);
1492 gfc_add_modify (&cond_block
, val
, fold_convert (val_ty
, else_val
));
1493 else_b
= gfc_finish_block (&cond_block
);
1495 gfc_add_expr_to_block (block
,
1496 build3_loc (input_location
, COND_EXPR
, void_type_node
,
1497 cond_val
, then_b
, else_b
));
1500 /* Build a conditional expression in BLOCK, returning a temporary
1501 variable containing the result. If COND_VAL is not null, then
1502 THEN_VAL will be assigned to the variable, otherwise ELSE_VAL
1507 gfc_build_cond_assign_expr (stmtblock_t
*block
, tree cond_val
,
1508 tree then_val
, tree else_val
)
1511 tree val_ty
= TREE_TYPE (then_val
);
1512 stmtblock_t cond_block
;
1514 val
= create_tmp_var (val_ty
);
1516 gfc_init_block (&cond_block
);
1517 gfc_add_modify (&cond_block
, val
, then_val
);
1518 tree then_b
= gfc_finish_block (&cond_block
);
1520 gfc_build_cond_assign (block
, val
, cond_val
, then_b
, else_val
);
1526 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
, bool openacc
)
1528 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1531 tree decl
= OMP_CLAUSE_DECL (c
);
1533 /* Assumed-size arrays can't be mapped implicitly, they have to be
1534 mapped explicitly using array sections. */
1535 if (TREE_CODE (decl
) == PARM_DECL
1536 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1537 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1538 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1539 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1542 error_at (OMP_CLAUSE_LOCATION (c
),
1543 "implicit mapping of assumed size array %qD", decl
);
1547 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1548 tree present
= gfc_omp_check_optional_argument (decl
, true);
1549 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1551 if (!gfc_omp_privatize_by_reference (decl
)
1552 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1553 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1554 && !GFC_DECL_CRAY_POINTEE (decl
)
1555 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1557 tree orig_decl
= decl
;
1559 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1560 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1561 OMP_CLAUSE_DECL (c4
) = decl
;
1562 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1563 decl
= build_fold_indirect_ref (decl
);
1565 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1566 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1568 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1569 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_POINTER
);
1570 OMP_CLAUSE_DECL (c2
) = decl
;
1571 OMP_CLAUSE_SIZE (c2
) = size_int (0);
1574 gfc_start_block (&block
);
1576 ptr
= gfc_build_cond_assign_expr (&block
, present
, decl
,
1578 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
1579 ptr
= build_fold_indirect_ref (ptr
);
1580 OMP_CLAUSE_DECL (c
) = ptr
;
1581 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
1585 OMP_CLAUSE_DECL (c
) = decl
;
1586 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1588 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1589 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1590 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1592 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1593 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1594 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1595 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1596 decl
= build_fold_indirect_ref (decl
);
1597 OMP_CLAUSE_DECL (c
) = decl
;
1600 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1603 gfc_start_block (&block
);
1604 tree type
= TREE_TYPE (decl
);
1605 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1607 /* OpenMP: automatically map pointer targets with the pointer;
1608 hence, always update the descriptor/pointer itself.
1609 NOTE: This also remaps the pointer for allocatable arrays with
1610 'target' attribute which also don't have the 'restrict' qualifier. */
1611 bool always_modifier
= false;
1614 && !(TYPE_QUALS (TREE_TYPE (ptr
)) & TYPE_QUAL_RESTRICT
))
1615 always_modifier
= true;
1618 ptr
= gfc_build_cond_assign_expr (&block
, present
, ptr
,
1620 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
1621 ptr
= build_fold_indirect_ref (ptr
);
1622 OMP_CLAUSE_DECL (c
) = ptr
;
1623 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1624 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1627 ptr
= create_tmp_var (TREE_TYPE (TREE_OPERAND (decl
, 0)));
1628 gfc_add_modify (&block
, ptr
, TREE_OPERAND (decl
, 0));
1630 OMP_CLAUSE_DECL (c2
) = build_fold_indirect_ref (ptr
);
1633 OMP_CLAUSE_DECL (c2
) = decl
;
1634 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1635 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1636 OMP_CLAUSE_SET_MAP_KIND (c3
, always_modifier
? GOMP_MAP_ALWAYS_POINTER
1637 : GOMP_MAP_POINTER
);
1640 ptr
= gfc_conv_descriptor_data_get (decl
);
1641 ptr
= gfc_build_addr_expr (NULL
, ptr
);
1642 ptr
= gfc_build_cond_assign_expr (&block
, present
,
1643 ptr
, null_pointer_node
);
1644 ptr
= build_fold_indirect_ref (ptr
);
1645 OMP_CLAUSE_DECL (c3
) = ptr
;
1648 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1649 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1650 tree size
= create_tmp_var (gfc_array_index_type
);
1651 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1652 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1653 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
1654 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1655 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1657 stmtblock_t cond_block
;
1658 tree tem
, then_b
, else_b
, zero
, cond
;
1660 gfc_init_block (&cond_block
);
1661 tem
= gfc_full_array_size (&cond_block
, decl
,
1662 GFC_TYPE_ARRAY_RANK (type
));
1663 gfc_add_modify (&cond_block
, size
, tem
);
1664 gfc_add_modify (&cond_block
, size
,
1665 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1667 then_b
= gfc_finish_block (&cond_block
);
1668 gfc_init_block (&cond_block
);
1669 zero
= build_int_cst (gfc_array_index_type
, 0);
1670 gfc_add_modify (&cond_block
, size
, zero
);
1671 else_b
= gfc_finish_block (&cond_block
);
1672 tem
= gfc_conv_descriptor_data_get (decl
);
1673 tem
= fold_convert (pvoid_type_node
, tem
);
1674 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1675 boolean_type_node
, tem
, null_pointer_node
);
1678 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1679 boolean_type_node
, present
, cond
);
1681 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1682 void_type_node
, cond
,
1687 stmtblock_t cond_block
;
1690 gfc_init_block (&cond_block
);
1691 gfc_add_modify (&cond_block
, size
,
1692 gfc_full_array_size (&cond_block
, decl
,
1693 GFC_TYPE_ARRAY_RANK (type
)));
1694 gfc_add_modify (&cond_block
, size
,
1695 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1697 then_b
= gfc_finish_block (&cond_block
);
1699 gfc_build_cond_assign (&block
, size
, present
, then_b
,
1700 build_int_cst (gfc_array_index_type
, 0));
1704 gfc_add_modify (&block
, size
,
1705 gfc_full_array_size (&block
, decl
,
1706 GFC_TYPE_ARRAY_RANK (type
)));
1707 gfc_add_modify (&block
, size
,
1708 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1711 OMP_CLAUSE_SIZE (c
) = size
;
1712 tree stmt
= gfc_finish_block (&block
);
1713 gimplify_and_add (stmt
, pre_p
);
1716 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1718 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1719 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1720 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
1721 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
1722 OMP_CLAUSE_SIZE (c
) = size_int (0);
1725 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1726 OMP_CLAUSE_CHAIN (last
) = c2
;
1731 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1732 OMP_CLAUSE_CHAIN (last
) = c3
;
1737 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1738 OMP_CLAUSE_CHAIN (last
) = c4
;
1743 /* Return true if DECL is a scalar variable (for the purpose of
1744 implicit firstprivatization/mapping). Only if 'ptr_alloc_ok.'
1745 is true, allocatables and pointers are permitted. */
1748 gfc_omp_scalar_p (tree decl
, bool ptr_alloc_ok
)
1750 tree type
= TREE_TYPE (decl
);
1751 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1752 type
= TREE_TYPE (type
);
1753 if (TREE_CODE (type
) == POINTER_TYPE
)
1755 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1756 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1760 type
= TREE_TYPE (type
);
1762 if (GFC_ARRAY_TYPE_P (type
)
1763 || GFC_CLASS_TYPE_P (type
))
1766 if ((TREE_CODE (type
) == ARRAY_TYPE
|| TREE_CODE (type
) == INTEGER_TYPE
)
1767 && TYPE_STRING_FLAG (type
))
1769 if (INTEGRAL_TYPE_P (type
)
1770 || SCALAR_FLOAT_TYPE_P (type
)
1771 || COMPLEX_FLOAT_TYPE_P (type
))
1777 /* Return true if DECL is a scalar with target attribute but does not have the
1778 allocatable (or pointer) attribute (for the purpose of implicit mapping). */
1781 gfc_omp_scalar_target_p (tree decl
)
1783 return (DECL_P (decl
) && GFC_DECL_GET_SCALAR_TARGET (decl
)
1784 && gfc_omp_scalar_p (decl
, false));
1788 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1789 disregarded in OpenMP construct, because it is going to be
1790 remapped during OpenMP lowering. SHARED is true if DECL
1791 is going to be shared, false if it is going to be privatized. */
1794 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1796 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1797 && DECL_HAS_VALUE_EXPR_P (decl
))
1799 tree value
= DECL_VALUE_EXPR (decl
);
1801 if (TREE_CODE (value
) == COMPONENT_REF
1802 && VAR_P (TREE_OPERAND (value
, 0))
1803 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1805 /* If variable in COMMON or EQUIVALENCE is privatized, return
1806 true, as just that variable is supposed to be privatized,
1807 not the whole COMMON or whole EQUIVALENCE.
1808 For shared variables in COMMON or EQUIVALENCE, let them be
1809 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1810 from the same COMMON or EQUIVALENCE just one sharing of the
1811 whole COMMON or EQUIVALENCE is enough. */
1816 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1822 /* Return true if DECL that is shared iff SHARED is true should
1823 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1827 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1829 if (GFC_DECL_CRAY_POINTEE (decl
))
1832 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1833 && DECL_HAS_VALUE_EXPR_P (decl
))
1835 tree value
= DECL_VALUE_EXPR (decl
);
1837 if (TREE_CODE (value
) == COMPONENT_REF
1838 && VAR_P (TREE_OPERAND (value
, 0))
1839 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1846 /* Register language specific type size variables as potentially OpenMP
1847 firstprivate variables. */
1850 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1852 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1856 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1857 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1859 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1860 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1861 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1863 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1864 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1870 gfc_trans_add_clause (tree node
, tree tail
)
1872 OMP_CLAUSE_CHAIN (node
) = tail
;
1877 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1882 gfc_symbol
*proc_sym
;
1883 gfc_formal_arglist
*f
;
1885 gcc_assert (sym
->attr
.dummy
);
1886 proc_sym
= sym
->ns
->proc_name
;
1887 if (proc_sym
->attr
.entry_master
)
1889 if (gfc_return_by_reference (proc_sym
))
1892 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1895 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1901 return build_int_cst (integer_type_node
, cnt
);
1904 tree t
= gfc_get_symbol_decl (sym
);
1908 bool alternate_entry
;
1911 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1912 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1913 && sym
->result
== sym
;
1914 entry_master
= sym
->attr
.result
1915 && sym
->ns
->proc_name
->attr
.entry_master
1916 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1917 parent_decl
= current_function_decl
1918 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1920 if ((t
== parent_decl
&& return_value
)
1921 || (sym
->ns
&& sym
->ns
->proc_name
1922 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1923 && (alternate_entry
|| entry_master
)))
1928 /* Special case for assigning the return value of a function.
1929 Self recursive functions must have an explicit return value. */
1930 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1931 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1933 /* Similarly for alternate entry points. */
1934 else if (alternate_entry
1935 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1938 gfc_entry_list
*el
= NULL
;
1940 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1943 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1948 else if (entry_master
1949 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1951 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1957 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1958 gfc_omp_namelist
*namelist
, tree list
,
1961 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1962 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1964 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1965 if (t
!= error_mark_node
)
1968 node
= build_omp_clause (input_location
, code
);
1969 OMP_CLAUSE_DECL (node
) = t
;
1970 list
= gfc_trans_add_clause (node
, list
);
1972 if (code
== OMP_CLAUSE_LASTPRIVATE
1973 && namelist
->u
.lastprivate_conditional
)
1974 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (node
) = 1;
1980 struct omp_udr_find_orig_data
1982 gfc_omp_udr
*omp_udr
;
1987 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1990 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1991 if ((*e
)->expr_type
== EXPR_VARIABLE
1992 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1993 cd
->omp_orig_seen
= true;
1999 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
2001 gfc_symbol
*sym
= n
->sym
;
2002 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
2003 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
2004 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
2005 gfc_symbol omp_var_copy
[4];
2006 gfc_expr
*e1
, *e2
, *e3
, *e4
;
2008 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
2009 locus old_loc
= gfc_current_locus
;
2012 gfc_omp_udr
*udr
= n
->u2
.udr
? n
->u2
.udr
->udr
: NULL
;
2014 decl
= OMP_CLAUSE_DECL (c
);
2015 gfc_current_locus
= where
;
2016 type
= TREE_TYPE (decl
);
2017 outer_decl
= create_tmp_var_raw (type
);
2018 if (TREE_CODE (decl
) == PARM_DECL
2019 && TREE_CODE (type
) == REFERENCE_TYPE
2020 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
2021 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
2023 decl
= build_fold_indirect_ref (decl
);
2024 type
= TREE_TYPE (type
);
2027 /* Create a fake symbol for init value. */
2028 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
2029 init_val_sym
.ns
= sym
->ns
;
2030 init_val_sym
.name
= sym
->name
;
2031 init_val_sym
.ts
= sym
->ts
;
2032 init_val_sym
.attr
.referenced
= 1;
2033 init_val_sym
.declared_at
= where
;
2034 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
2035 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
2036 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
2037 else if (udr
->initializer_ns
)
2038 backend_decl
= NULL
;
2040 switch (sym
->ts
.type
)
2046 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
2049 backend_decl
= NULL_TREE
;
2052 init_val_sym
.backend_decl
= backend_decl
;
2054 /* Create a fake symbol for the outer array reference. */
2057 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
2058 outer_sym
.attr
.dummy
= 0;
2059 outer_sym
.attr
.result
= 0;
2060 outer_sym
.attr
.flavor
= FL_VARIABLE
;
2061 outer_sym
.backend_decl
= outer_decl
;
2062 if (decl
!= OMP_CLAUSE_DECL (c
))
2063 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
2065 /* Create fake symtrees for it. */
2066 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
2067 symtree1
->n
.sym
= sym
;
2068 gcc_assert (symtree1
== root1
);
2070 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
2071 symtree2
->n
.sym
= &init_val_sym
;
2072 gcc_assert (symtree2
== root2
);
2074 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
2075 symtree3
->n
.sym
= &outer_sym
;
2076 gcc_assert (symtree3
== root3
);
2078 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
2081 omp_var_copy
[0] = *udr
->omp_out
;
2082 omp_var_copy
[1] = *udr
->omp_in
;
2083 *udr
->omp_out
= outer_sym
;
2084 *udr
->omp_in
= *sym
;
2085 if (udr
->initializer_ns
)
2087 omp_var_copy
[2] = *udr
->omp_priv
;
2088 omp_var_copy
[3] = *udr
->omp_orig
;
2089 *udr
->omp_priv
= *sym
;
2090 *udr
->omp_orig
= outer_sym
;
2094 /* Create expressions. */
2095 e1
= gfc_get_expr ();
2096 e1
->expr_type
= EXPR_VARIABLE
;
2098 e1
->symtree
= symtree1
;
2100 if (sym
->attr
.dimension
)
2102 e1
->ref
= ref
= gfc_get_ref ();
2103 ref
->type
= REF_ARRAY
;
2104 ref
->u
.ar
.where
= where
;
2105 ref
->u
.ar
.as
= sym
->as
;
2106 ref
->u
.ar
.type
= AR_FULL
;
2107 ref
->u
.ar
.dimen
= 0;
2109 t
= gfc_resolve_expr (e1
);
2113 if (backend_decl
!= NULL_TREE
)
2115 e2
= gfc_get_expr ();
2116 e2
->expr_type
= EXPR_VARIABLE
;
2118 e2
->symtree
= symtree2
;
2120 t
= gfc_resolve_expr (e2
);
2123 else if (udr
->initializer_ns
== NULL
)
2125 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
2126 e2
= gfc_default_initializer (&sym
->ts
);
2128 t
= gfc_resolve_expr (e2
);
2131 else if (n
->u2
.udr
->initializer
->op
== EXEC_ASSIGN
)
2133 e2
= gfc_copy_expr (n
->u2
.udr
->initializer
->expr2
);
2134 t
= gfc_resolve_expr (e2
);
2137 if (udr
&& udr
->initializer_ns
)
2139 struct omp_udr_find_orig_data cd
;
2141 cd
.omp_orig_seen
= false;
2142 gfc_code_walker (&n
->u2
.udr
->initializer
,
2143 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
2144 if (cd
.omp_orig_seen
)
2145 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
2148 e3
= gfc_copy_expr (e1
);
2149 e3
->symtree
= symtree3
;
2150 t
= gfc_resolve_expr (e3
);
2155 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
2159 e4
= gfc_add (e3
, e1
);
2162 e4
= gfc_multiply (e3
, e1
);
2164 case TRUTH_ANDIF_EXPR
:
2165 e4
= gfc_and (e3
, e1
);
2167 case TRUTH_ORIF_EXPR
:
2168 e4
= gfc_or (e3
, e1
);
2171 e4
= gfc_eqv (e3
, e1
);
2174 e4
= gfc_neqv (e3
, e1
);
2192 if (n
->u2
.udr
->combiner
->op
== EXEC_ASSIGN
)
2195 e3
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr1
);
2196 e4
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr2
);
2197 t
= gfc_resolve_expr (e3
);
2199 t
= gfc_resolve_expr (e4
);
2208 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
2209 intrinsic_sym
.ns
= sym
->ns
;
2210 intrinsic_sym
.name
= iname
;
2211 intrinsic_sym
.ts
= sym
->ts
;
2212 intrinsic_sym
.attr
.referenced
= 1;
2213 intrinsic_sym
.attr
.intrinsic
= 1;
2214 intrinsic_sym
.attr
.function
= 1;
2215 intrinsic_sym
.attr
.implicit_type
= 1;
2216 intrinsic_sym
.result
= &intrinsic_sym
;
2217 intrinsic_sym
.declared_at
= where
;
2219 symtree4
= gfc_new_symtree (&root4
, iname
);
2220 symtree4
->n
.sym
= &intrinsic_sym
;
2221 gcc_assert (symtree4
== root4
);
2223 e4
= gfc_get_expr ();
2224 e4
->expr_type
= EXPR_FUNCTION
;
2226 e4
->symtree
= symtree4
;
2227 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
2228 e4
->value
.function
.actual
->expr
= e3
;
2229 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
2230 e4
->value
.function
.actual
->next
->expr
= e1
;
2232 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
2234 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
2235 e1
= gfc_copy_expr (e1
);
2236 e3
= gfc_copy_expr (e3
);
2237 t
= gfc_resolve_expr (e4
);
2241 /* Create the init statement list. */
2244 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
2246 stmt
= gfc_trans_call (n
->u2
.udr
->initializer
, false,
2247 NULL_TREE
, NULL_TREE
, false);
2248 if (TREE_CODE (stmt
) != BIND_EXPR
)
2249 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2252 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
2254 /* Create the merge statement list. */
2257 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
2259 stmt
= gfc_trans_call (n
->u2
.udr
->combiner
, false,
2260 NULL_TREE
, NULL_TREE
, false);
2261 if (TREE_CODE (stmt
) != BIND_EXPR
)
2262 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2265 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
2267 /* And stick the placeholder VAR_DECL into the clause as well. */
2268 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
2270 gfc_current_locus
= old_loc
;
2283 gfc_free_array_spec (outer_sym
.as
);
2287 *udr
->omp_out
= omp_var_copy
[0];
2288 *udr
->omp_in
= omp_var_copy
[1];
2289 if (udr
->initializer_ns
)
2291 *udr
->omp_priv
= omp_var_copy
[2];
2292 *udr
->omp_orig
= omp_var_copy
[3];
2298 gfc_trans_omp_reduction_list (int kind
, gfc_omp_namelist
*namelist
, tree list
,
2299 locus where
, bool mark_addressable
)
2301 omp_clause_code clause
= OMP_CLAUSE_REDUCTION
;
2304 case OMP_LIST_REDUCTION
:
2305 case OMP_LIST_REDUCTION_INSCAN
:
2306 case OMP_LIST_REDUCTION_TASK
:
2308 case OMP_LIST_IN_REDUCTION
:
2309 clause
= OMP_CLAUSE_IN_REDUCTION
;
2311 case OMP_LIST_TASK_REDUCTION
:
2312 clause
= OMP_CLAUSE_TASK_REDUCTION
;
2317 for (; namelist
!= NULL
; namelist
= namelist
->next
)
2318 if (namelist
->sym
->attr
.referenced
)
2320 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
2321 if (t
!= error_mark_node
)
2323 tree node
= build_omp_clause (gfc_get_location (&namelist
->where
),
2325 OMP_CLAUSE_DECL (node
) = t
;
2326 if (mark_addressable
)
2327 TREE_ADDRESSABLE (t
) = 1;
2328 if (kind
== OMP_LIST_REDUCTION_INSCAN
)
2329 OMP_CLAUSE_REDUCTION_INSCAN (node
) = 1;
2330 if (kind
== OMP_LIST_REDUCTION_TASK
)
2331 OMP_CLAUSE_REDUCTION_TASK (node
) = 1;
2332 switch (namelist
->u
.reduction_op
)
2334 case OMP_REDUCTION_PLUS
:
2335 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
2337 case OMP_REDUCTION_MINUS
:
2338 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
2340 case OMP_REDUCTION_TIMES
:
2341 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
2343 case OMP_REDUCTION_AND
:
2344 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
2346 case OMP_REDUCTION_OR
:
2347 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
2349 case OMP_REDUCTION_EQV
:
2350 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
2352 case OMP_REDUCTION_NEQV
:
2353 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
2355 case OMP_REDUCTION_MAX
:
2356 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
2358 case OMP_REDUCTION_MIN
:
2359 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
2361 case OMP_REDUCTION_IAND
:
2362 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
2364 case OMP_REDUCTION_IOR
:
2365 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
2367 case OMP_REDUCTION_IEOR
:
2368 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
2370 case OMP_REDUCTION_USER
:
2371 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
2376 if (namelist
->sym
->attr
.dimension
2377 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
2378 || namelist
->sym
->attr
.allocatable
)
2379 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
2380 list
= gfc_trans_add_clause (node
, list
);
2387 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
2392 gfc_init_se (&se
, NULL
);
2393 gfc_conv_expr (&se
, expr
);
2394 gfc_add_block_to_block (block
, &se
.pre
);
2395 result
= gfc_evaluate_now (se
.expr
, block
);
2396 gfc_add_block_to_block (block
, &se
.post
);
2401 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
2404 /* Translate an array section or array element. */
2407 gfc_trans_omp_array_section (stmtblock_t
*block
, gfc_exec_op op
,
2408 gfc_omp_namelist
*n
, tree decl
, bool element
,
2409 bool openmp
, gomp_map_kind ptr_kind
, tree
&node
,
2410 tree
&node2
, tree
&node3
, tree
&node4
)
2414 tree elemsz
= NULL_TREE
;
2416 gfc_init_se (&se
, NULL
);
2419 gfc_conv_expr_reference (&se
, n
->expr
);
2420 gfc_add_block_to_block (block
, &se
.pre
);
2425 gfc_conv_expr_descriptor (&se
, n
->expr
);
2426 ptr
= gfc_conv_array_data (se
.expr
);
2428 if (n
->expr
->ts
.type
== BT_CHARACTER
&& n
->expr
->ts
.deferred
)
2430 gcc_assert (se
.string_length
);
2431 tree len
= gfc_evaluate_now (se
.string_length
, block
);
2432 elemsz
= gfc_get_char_type (n
->expr
->ts
.kind
);
2433 elemsz
= TYPE_SIZE_UNIT (elemsz
);
2434 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
2435 fold_convert (size_type_node
, len
), elemsz
);
2440 elemsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr
)));
2441 OMP_CLAUSE_SIZE (node
) = elemsz
;
2445 tree type
= TREE_TYPE (se
.expr
);
2446 gfc_add_block_to_block (block
, &se
.pre
);
2447 OMP_CLAUSE_SIZE (node
) = gfc_full_array_size (block
, se
.expr
,
2448 GFC_TYPE_ARRAY_RANK (type
));
2450 elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2451 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2452 OMP_CLAUSE_SIZE (node
) = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2453 OMP_CLAUSE_SIZE (node
), elemsz
);
2455 gcc_assert (se
.post
.head
== NULL_TREE
);
2456 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
2457 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2458 ptr
= fold_convert (ptrdiff_type_node
, ptr
);
2460 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2461 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
2462 && ptr_kind
== GOMP_MAP_POINTER
2463 && op
!= EXEC_OMP_TARGET_EXIT_DATA
2464 && OMP_CLAUSE_MAP_KIND (node
) != GOMP_MAP_RELEASE
2465 && OMP_CLAUSE_MAP_KIND (node
) != GOMP_MAP_DELETE
)
2468 node4
= build_omp_clause (input_location
,
2470 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2471 OMP_CLAUSE_DECL (node4
) = decl
;
2472 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2473 decl
= build_fold_indirect_ref (decl
);
2475 else if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
2476 && n
->expr
->ts
.type
== BT_CHARACTER
2477 && n
->expr
->ts
.deferred
)
2479 gomp_map_kind map_kind
;
2480 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
)
2481 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
2482 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
2483 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_RELEASE
)
2484 map_kind
= GOMP_MAP_RELEASE
;
2486 map_kind
= GOMP_MAP_TO
;
2487 gcc_assert (se
.string_length
);
2488 node4
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2489 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
2490 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
2491 OMP_CLAUSE_SIZE (node4
) = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
2493 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2495 tree type
= TREE_TYPE (decl
);
2496 ptr2
= gfc_conv_descriptor_data_get (decl
);
2497 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2498 OMP_CLAUSE_DECL (node2
) = decl
;
2499 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2500 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
2501 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_RELEASE
2502 || op
== EXEC_OMP_TARGET_EXIT_DATA
2503 || op
== EXEC_OACC_EXIT_DATA
)
2505 gomp_map_kind map_kind
2506 = OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
? GOMP_MAP_DELETE
2508 OMP_CLAUSE_SET_MAP_KIND (node2
, map_kind
);
2509 OMP_CLAUSE_RELEASE_DESCRIPTOR (node2
) = 1;
2512 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2513 node3
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2514 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2515 OMP_CLAUSE_DECL (node3
) = gfc_conv_descriptor_data_get (decl
);
2516 /* This purposely does not include GOMP_MAP_ALWAYS_POINTER. The extra
2517 cast prevents gimplify.cc from recognising it as being part of the
2518 struct - and adding an 'alloc: for the 'desc.data' pointer, which
2519 would break as the 'desc' (the descriptor) is also mapped
2520 (see node4 above). */
2521 if (ptr_kind
== GOMP_MAP_ATTACH_DETACH
&& !openmp
)
2522 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
2526 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2529 ptr2
= build_fold_addr_expr (decl
);
2530 offset
= fold_build2 (MINUS_EXPR
, ptrdiff_type_node
, ptr
,
2531 fold_convert (ptrdiff_type_node
, ptr2
));
2532 offset
= build2 (TRUNC_DIV_EXPR
, ptrdiff_type_node
,
2533 offset
, fold_convert (ptrdiff_type_node
, elemsz
));
2534 offset
= build4_loc (input_location
, ARRAY_REF
,
2535 TREE_TYPE (TREE_TYPE (decl
)),
2536 decl
, offset
, NULL_TREE
, NULL_TREE
);
2537 OMP_CLAUSE_DECL (node
) = offset
;
2539 if (ptr_kind
== GOMP_MAP_ATTACH_DETACH
&& openmp
)
2544 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2547 node3
= build_omp_clause (input_location
,
2549 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2550 OMP_CLAUSE_DECL (node3
) = decl
;
2552 ptr2
= fold_convert (ptrdiff_type_node
, ptr2
);
2553 OMP_CLAUSE_SIZE (node3
) = fold_build2 (MINUS_EXPR
, ptrdiff_type_node
,
2558 handle_iterator (gfc_namespace
*ns
, stmtblock_t
*iter_block
, tree block
)
2560 tree list
= NULL_TREE
;
2561 for (gfc_symbol
*sym
= ns
->omp_affinity_iterators
; sym
; sym
= sym
->tlink
)
2566 tree last
= make_tree_vec (6);
2567 tree iter_var
= gfc_get_symbol_decl (sym
);
2568 tree type
= TREE_TYPE (iter_var
);
2569 TREE_VEC_ELT (last
, 0) = iter_var
;
2570 DECL_CHAIN (iter_var
) = BLOCK_VARS (block
);
2571 BLOCK_VARS (block
) = iter_var
;
2574 c
= gfc_constructor_first (sym
->value
->value
.constructor
);
2575 gfc_init_se (&se
, NULL
);
2576 gfc_conv_expr (&se
, c
->expr
);
2577 gfc_add_block_to_block (iter_block
, &se
.pre
);
2578 gfc_add_block_to_block (iter_block
, &se
.post
);
2579 TREE_VEC_ELT (last
, 1) = fold_convert (type
,
2580 gfc_evaluate_now (se
.expr
,
2583 c
= gfc_constructor_next (c
);
2584 gfc_init_se (&se
, NULL
);
2585 gfc_conv_expr (&se
, c
->expr
);
2586 gfc_add_block_to_block (iter_block
, &se
.pre
);
2587 gfc_add_block_to_block (iter_block
, &se
.post
);
2588 TREE_VEC_ELT (last
, 2) = fold_convert (type
,
2589 gfc_evaluate_now (se
.expr
,
2592 c
= gfc_constructor_next (c
);
2596 gfc_init_se (&se
, NULL
);
2597 gfc_conv_expr (&se
, c
->expr
);
2598 gfc_add_block_to_block (iter_block
, &se
.pre
);
2599 gfc_add_block_to_block (iter_block
, &se
.post
);
2600 gfc_conv_expr (&se
, c
->expr
);
2601 step
= fold_convert (type
,
2602 gfc_evaluate_now (se
.expr
,
2606 step
= build_int_cst (type
, 1);
2607 TREE_VEC_ELT (last
, 3) = step
;
2609 TREE_VEC_ELT (last
, 4) = save_expr (step
);
2610 TREE_CHAIN (last
) = list
;
2616 /* To alleviate quadratic behaviour in checking each entry of a
2617 gfc_omp_namelist against every other entry, we build a hashtable indexed by
2618 gfc_symbol pointer, which we can use in the usual case that a map
2619 expression has a symbol as its root term. Return a namelist based on the
2620 root symbol used by N, building a new table in SYM_ROOTED_NL using the
2621 gfc_omp_namelist N2 (all clauses) if we haven't done so already. */
2623 static gfc_omp_namelist
*
2624 get_symbol_rooted_namelist (hash_map
<gfc_symbol
*,
2625 gfc_omp_namelist
*> *&sym_rooted_nl
,
2626 gfc_omp_namelist
*n
,
2627 gfc_omp_namelist
*n2
, bool *sym_based
)
2629 /* Early-out if we have a NULL clause list (e.g. for OpenACC). */
2633 gfc_symbol
*use_sym
= NULL
;
2635 /* We're only interested in cases where we have an expression, e.g. a
2636 component access. */
2637 if (n
->expr
&& n
->expr
->expr_type
== EXPR_VARIABLE
&& n
->expr
->symtree
)
2638 use_sym
= n
->expr
->symtree
->n
.sym
;
2647 sym_rooted_nl
= new hash_map
<gfc_symbol
*, gfc_omp_namelist
*> ();
2649 for (; n2
!= NULL
; n2
= n2
->next
)
2652 || n2
->expr
->expr_type
!= EXPR_VARIABLE
2653 || !n2
->expr
->symtree
)
2656 gfc_omp_namelist
*nl_copy
= gfc_get_omp_namelist ();
2657 memcpy (nl_copy
, n2
, sizeof *nl_copy
);
2658 nl_copy
->u2
.duplicate_of
= n2
;
2659 nl_copy
->next
= NULL
;
2661 gfc_symbol
*idx_sym
= n2
->expr
->symtree
->n
.sym
;
2664 gfc_omp_namelist
*&entry
2665 = sym_rooted_nl
->get_or_insert (idx_sym
, &existed
);
2667 nl_copy
->next
= entry
;
2672 gfc_omp_namelist
**n2_sym
= sym_rooted_nl
->get (use_sym
);
2684 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
2685 locus where
, bool declare_simd
= false,
2686 bool openacc
= false, gfc_exec_op op
= EXEC_NOP
)
2688 tree omp_clauses
= NULL_TREE
, prev_clauses
, chunk_size
, c
;
2689 tree iterator
= NULL_TREE
;
2690 tree tree_block
= NULL_TREE
;
2691 stmtblock_t iter_block
;
2693 enum omp_clause_code clause_code
;
2694 gfc_omp_namelist
*prev
= NULL
;
2697 if (clauses
== NULL
)
2700 hash_map
<gfc_symbol
*, gfc_omp_namelist
*> *sym_rooted_nl
= NULL
;
2702 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
2704 gfc_omp_namelist
*n
= clauses
->lists
[list
];
2710 case OMP_LIST_REDUCTION
:
2711 case OMP_LIST_REDUCTION_INSCAN
:
2712 case OMP_LIST_REDUCTION_TASK
:
2713 case OMP_LIST_IN_REDUCTION
:
2714 case OMP_LIST_TASK_REDUCTION
:
2715 /* An OpenACC async clause indicates the need to set reduction
2716 arguments addressable, to allow asynchronous copy-out. */
2717 omp_clauses
= gfc_trans_omp_reduction_list (list
, n
, omp_clauses
,
2718 where
, clauses
->async
);
2720 case OMP_LIST_PRIVATE
:
2721 clause_code
= OMP_CLAUSE_PRIVATE
;
2723 case OMP_LIST_SHARED
:
2724 clause_code
= OMP_CLAUSE_SHARED
;
2726 case OMP_LIST_FIRSTPRIVATE
:
2727 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
2729 case OMP_LIST_LASTPRIVATE
:
2730 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
2732 case OMP_LIST_COPYIN
:
2733 clause_code
= OMP_CLAUSE_COPYIN
;
2735 case OMP_LIST_COPYPRIVATE
:
2736 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
2738 case OMP_LIST_UNIFORM
:
2739 clause_code
= OMP_CLAUSE_UNIFORM
;
2741 case OMP_LIST_USE_DEVICE
:
2742 case OMP_LIST_USE_DEVICE_PTR
:
2743 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
2745 case OMP_LIST_USE_DEVICE_ADDR
:
2746 clause_code
= OMP_CLAUSE_USE_DEVICE_ADDR
;
2748 case OMP_LIST_IS_DEVICE_PTR
:
2749 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
2751 case OMP_LIST_HAS_DEVICE_ADDR
:
2752 clause_code
= OMP_CLAUSE_HAS_DEVICE_ADDR
;
2754 case OMP_LIST_NONTEMPORAL
:
2755 clause_code
= OMP_CLAUSE_NONTEMPORAL
;
2757 case OMP_LIST_SCAN_IN
:
2758 clause_code
= OMP_CLAUSE_INCLUSIVE
;
2760 case OMP_LIST_SCAN_EX
:
2761 clause_code
= OMP_CLAUSE_EXCLUSIVE
;
2766 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
2769 case OMP_LIST_ALIGNED
:
2770 for (; n
!= NULL
; n
= n
->next
)
2771 if (n
->sym
->attr
.referenced
|| declare_simd
)
2773 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2774 if (t
!= error_mark_node
)
2776 tree node
= build_omp_clause (input_location
,
2777 OMP_CLAUSE_ALIGNED
);
2778 OMP_CLAUSE_DECL (node
) = t
;
2784 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
2787 gfc_init_se (&se
, NULL
);
2788 gfc_conv_expr (&se
, n
->expr
);
2789 gfc_add_block_to_block (block
, &se
.pre
);
2790 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
2791 gfc_add_block_to_block (block
, &se
.post
);
2793 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
2795 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2799 case OMP_LIST_ALLOCATE
:
2801 tree allocator_
= NULL_TREE
;
2802 gfc_expr
*alloc_expr
= NULL
;
2803 for (; n
!= NULL
; n
= n
->next
)
2804 if (n
->sym
->attr
.referenced
)
2806 tree t
= gfc_trans_omp_variable (n
->sym
, false);
2807 if (t
!= error_mark_node
)
2809 tree node
= build_omp_clause (input_location
,
2810 OMP_CLAUSE_ALLOCATE
);
2811 OMP_CLAUSE_DECL (node
) = t
;
2812 if (n
->u2
.allocator
)
2814 if (alloc_expr
!= n
->u2
.allocator
)
2816 gfc_init_se (&se
, NULL
);
2817 gfc_conv_expr (&se
, n
->u2
.allocator
);
2818 gfc_add_block_to_block (block
, &se
.pre
);
2819 allocator_
= gfc_evaluate_now (se
.expr
, block
);
2820 gfc_add_block_to_block (block
, &se
.post
);
2822 OMP_CLAUSE_ALLOCATE_ALLOCATOR (node
) = allocator_
;
2824 alloc_expr
= n
->u2
.allocator
;
2828 gfc_init_se (&se
, NULL
);
2829 gfc_conv_expr (&se
, n
->u
.align
);
2830 gcc_assert (CONSTANT_CLASS_P (se
.expr
)
2831 && se
.pre
.head
== NULL
2832 && se
.post
.head
== NULL
);
2834 OMP_CLAUSE_ALLOCATE_ALIGN (node
) = align_
;
2836 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2840 alloc_expr
= n
->u2
.allocator
;
2843 case OMP_LIST_LINEAR
:
2845 gfc_expr
*last_step_expr
= NULL
;
2846 tree last_step
= NULL_TREE
;
2847 bool last_step_parm
= false;
2849 for (; n
!= NULL
; n
= n
->next
)
2853 last_step_expr
= n
->expr
;
2854 last_step
= NULL_TREE
;
2855 last_step_parm
= false;
2857 if (n
->sym
->attr
.referenced
|| declare_simd
)
2859 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2860 if (t
!= error_mark_node
)
2862 tree node
= build_omp_clause (input_location
,
2864 OMP_CLAUSE_DECL (node
) = t
;
2865 omp_clause_linear_kind kind
;
2866 switch (n
->u
.linear
.op
)
2868 case OMP_LINEAR_DEFAULT
:
2869 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
2871 case OMP_LINEAR_REF
:
2872 kind
= OMP_CLAUSE_LINEAR_REF
;
2874 case OMP_LINEAR_VAL
:
2875 kind
= OMP_CLAUSE_LINEAR_VAL
;
2877 case OMP_LINEAR_UVAL
:
2878 kind
= OMP_CLAUSE_LINEAR_UVAL
;
2883 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
2884 OMP_CLAUSE_LINEAR_OLD_LINEAR_MODIFIER (node
)
2885 = n
->u
.linear
.old_modifier
;
2886 if (last_step_expr
&& last_step
== NULL_TREE
)
2890 gfc_init_se (&se
, NULL
);
2891 gfc_conv_expr (&se
, last_step_expr
);
2892 gfc_add_block_to_block (block
, &se
.pre
);
2893 last_step
= gfc_evaluate_now (se
.expr
, block
);
2894 gfc_add_block_to_block (block
, &se
.post
);
2896 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
2898 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
2899 last_step
= gfc_trans_omp_variable (s
, true);
2900 last_step_parm
= true;
2904 = gfc_conv_constant_to_tree (last_step_expr
);
2908 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
2909 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
2913 if (kind
== OMP_CLAUSE_LINEAR_REF
)
2916 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
2918 type
= gfc_get_function_type (n
->sym
);
2919 type
= build_pointer_type (type
);
2922 type
= gfc_sym_type (n
->sym
);
2923 if (POINTER_TYPE_P (type
))
2924 type
= TREE_TYPE (type
);
2925 /* Otherwise to be determined what exactly
2927 tree t
= fold_convert (sizetype
, last_step
);
2928 t
= size_binop (MULT_EXPR
, t
,
2929 TYPE_SIZE_UNIT (type
));
2930 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
2935 = gfc_typenode_for_spec (&n
->sym
->ts
);
2936 OMP_CLAUSE_LINEAR_STEP (node
)
2937 = fold_convert (type
, last_step
);
2940 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
2941 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
2942 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2948 case OMP_LIST_AFFINITY
:
2949 case OMP_LIST_DEPEND
:
2950 iterator
= NULL_TREE
;
2952 prev_clauses
= omp_clauses
;
2953 for (; n
!= NULL
; n
= n
->next
)
2955 if (iterator
&& prev
->u2
.ns
!= n
->u2
.ns
)
2957 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
2958 TREE_VEC_ELT (iterator
, 5) = tree_block
;
2959 for (tree c
= omp_clauses
; c
!= prev_clauses
;
2960 c
= OMP_CLAUSE_CHAIN (c
))
2961 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
2962 OMP_CLAUSE_DECL (c
));
2963 prev_clauses
= omp_clauses
;
2964 iterator
= NULL_TREE
;
2966 if (n
->u2
.ns
&& (!prev
|| prev
->u2
.ns
!= n
->u2
.ns
))
2968 gfc_init_block (&iter_block
);
2969 tree_block
= make_node (BLOCK
);
2970 TREE_USED (tree_block
) = 1;
2971 BLOCK_VARS (tree_block
) = NULL_TREE
;
2972 iterator
= handle_iterator (n
->u2
.ns
, block
,
2976 gfc_init_block (&iter_block
);
2978 if (list
== OMP_LIST_DEPEND
2979 && (n
->u
.depend_doacross_op
== OMP_DOACROSS_SINK_FIRST
2980 || n
->u
.depend_doacross_op
== OMP_DEPEND_SINK_FIRST
))
2982 tree vec
= NULL_TREE
;
2985 = n
->u
.depend_doacross_op
== OMP_DEPEND_SINK_FIRST
;
2988 tree addend
= integer_zero_node
, t
;
2990 if (n
->sym
&& n
->expr
)
2992 addend
= gfc_conv_constant_to_tree (n
->expr
);
2993 if (TREE_CODE (addend
) == INTEGER_CST
2994 && tree_int_cst_sgn (addend
) == -1)
2997 addend
= const_unop (NEGATE_EXPR
,
2998 TREE_TYPE (addend
), addend
);
3003 t
= null_pointer_node
; /* "omp_cur_iteration - 1". */
3005 t
= gfc_trans_omp_variable (n
->sym
, false);
3006 if (t
!= error_mark_node
)
3008 if (i
< vec_safe_length (doacross_steps
)
3009 && !integer_zerop (addend
)
3010 && (*doacross_steps
)[i
])
3012 tree step
= (*doacross_steps
)[i
];
3013 addend
= fold_convert (TREE_TYPE (step
), addend
);
3014 addend
= build2 (TRUNC_DIV_EXPR
,
3015 TREE_TYPE (step
), addend
, step
);
3017 vec
= tree_cons (addend
, t
, vec
);
3019 OMP_CLAUSE_DOACROSS_SINK_NEGATIVE (vec
) = 1;
3022 || n
->next
->u
.depend_doacross_op
!= OMP_DOACROSS_SINK
)
3026 if (vec
== NULL_TREE
)
3029 tree node
= build_omp_clause (input_location
,
3030 OMP_CLAUSE_DOACROSS
);
3031 OMP_CLAUSE_DOACROSS_KIND (node
) = OMP_CLAUSE_DOACROSS_SINK
;
3032 OMP_CLAUSE_DOACROSS_DEPEND (node
) = is_depend
;
3033 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
3034 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3038 if (n
->sym
&& !n
->sym
->attr
.referenced
)
3041 tree node
= build_omp_clause (input_location
,
3042 list
== OMP_LIST_DEPEND
3044 : OMP_CLAUSE_AFFINITY
);
3045 if (n
->sym
== NULL
) /* omp_all_memory */
3046 OMP_CLAUSE_DECL (node
) = null_pointer_node
;
3047 else if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
3049 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
3050 if (gfc_omp_privatize_by_reference (decl
))
3051 decl
= build_fold_indirect_ref (decl
);
3052 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3054 decl
= gfc_conv_descriptor_data_get (decl
);
3055 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
3056 decl
= build_fold_indirect_ref (decl
);
3058 else if (n
->sym
->attr
.allocatable
|| n
->sym
->attr
.pointer
)
3059 decl
= build_fold_indirect_ref (decl
);
3060 else if (DECL_P (decl
))
3061 TREE_ADDRESSABLE (decl
) = 1;
3062 OMP_CLAUSE_DECL (node
) = decl
;
3067 gfc_init_se (&se
, NULL
);
3068 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
3070 gfc_conv_expr_reference (&se
, n
->expr
);
3075 gfc_conv_expr_descriptor (&se
, n
->expr
);
3076 ptr
= gfc_conv_array_data (se
.expr
);
3078 gfc_add_block_to_block (&iter_block
, &se
.pre
);
3079 gfc_add_block_to_block (&iter_block
, &se
.post
);
3080 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
3081 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
3083 if (list
== OMP_LIST_DEPEND
)
3084 switch (n
->u
.depend_doacross_op
)
3087 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
3089 case OMP_DEPEND_OUT
:
3090 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
3092 case OMP_DEPEND_INOUT
:
3093 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
3095 case OMP_DEPEND_INOUTSET
:
3096 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUTSET
;
3098 case OMP_DEPEND_MUTEXINOUTSET
:
3099 OMP_CLAUSE_DEPEND_KIND (node
)
3100 = OMP_CLAUSE_DEPEND_MUTEXINOUTSET
;
3102 case OMP_DEPEND_DEPOBJ
:
3103 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_DEPOBJ
;
3109 gfc_add_block_to_block (block
, &iter_block
);
3110 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3114 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
3115 TREE_VEC_ELT (iterator
, 5) = tree_block
;
3116 for (tree c
= omp_clauses
; c
!= prev_clauses
;
3117 c
= OMP_CLAUSE_CHAIN (c
))
3118 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
3119 OMP_CLAUSE_DECL (c
));
3123 for (; n
!= NULL
; n
= n
->next
)
3125 if (!n
->sym
->attr
.referenced
)
3128 bool always_modifier
= false;
3129 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3130 tree node2
= NULL_TREE
;
3131 tree node3
= NULL_TREE
;
3132 tree node4
= NULL_TREE
;
3133 tree node5
= NULL_TREE
;
3135 /* OpenMP: automatically map pointer targets with the pointer;
3136 hence, always update the descriptor/pointer itself. */
3138 && ((n
->expr
== NULL
&& n
->sym
->attr
.pointer
)
3139 || (n
->expr
&& gfc_expr_attr (n
->expr
).pointer
)))
3140 always_modifier
= true;
3142 switch (n
->u
.map_op
)
3145 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
3147 case OMP_MAP_IF_PRESENT
:
3148 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_IF_PRESENT
);
3150 case OMP_MAP_ATTACH
:
3151 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ATTACH
);
3154 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
3157 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
3159 case OMP_MAP_TOFROM
:
3160 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
3162 case OMP_MAP_ALWAYS_TO
:
3163 always_modifier
= true;
3164 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
3166 case OMP_MAP_ALWAYS_FROM
:
3167 always_modifier
= true;
3168 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
3170 case OMP_MAP_ALWAYS_TOFROM
:
3171 always_modifier
= true;
3172 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
3174 case OMP_MAP_PRESENT_ALLOC
:
3175 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_ALLOC
);
3177 case OMP_MAP_PRESENT_TO
:
3178 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_TO
);
3180 case OMP_MAP_PRESENT_FROM
:
3181 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_FROM
);
3183 case OMP_MAP_PRESENT_TOFROM
:
3184 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_PRESENT_TOFROM
);
3186 case OMP_MAP_ALWAYS_PRESENT_TO
:
3187 always_modifier
= true;
3188 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_PRESENT_TO
);
3190 case OMP_MAP_ALWAYS_PRESENT_FROM
:
3191 always_modifier
= true;
3192 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_PRESENT_FROM
);
3194 case OMP_MAP_ALWAYS_PRESENT_TOFROM
:
3195 always_modifier
= true;
3196 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_PRESENT_TOFROM
);
3198 case OMP_MAP_RELEASE
:
3199 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
3201 case OMP_MAP_DELETE
:
3202 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
3204 case OMP_MAP_DETACH
:
3205 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DETACH
);
3207 case OMP_MAP_FORCE_ALLOC
:
3208 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
3210 case OMP_MAP_FORCE_TO
:
3211 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
3213 case OMP_MAP_FORCE_FROM
:
3214 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
3216 case OMP_MAP_FORCE_TOFROM
:
3217 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
3219 case OMP_MAP_FORCE_PRESENT
:
3220 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
3222 case OMP_MAP_FORCE_DEVICEPTR
:
3223 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
3229 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
3231 TREE_ADDRESSABLE (decl
) = 1;
3233 gfc_ref
*lastref
= NULL
;
3236 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3237 if (ref
->type
== REF_COMPONENT
|| ref
->type
== REF_ARRAY
)
3240 bool allocatable
= false, pointer
= false;
3242 if (lastref
&& lastref
->type
== REF_COMPONENT
)
3244 gfc_component
*c
= lastref
->u
.c
.component
;
3246 if (c
->ts
.type
== BT_CLASS
)
3248 pointer
= CLASS_DATA (c
)->attr
.class_pointer
;
3249 allocatable
= CLASS_DATA (c
)->attr
.allocatable
;
3253 pointer
= c
->attr
.pointer
;
3254 allocatable
= c
->attr
.allocatable
;
3259 || (n
->expr
->ref
->type
== REF_ARRAY
3260 && n
->expr
->ref
->u
.ar
.type
== AR_FULL
))
3262 gomp_map_kind map_kind
;
3263 tree type
= TREE_TYPE (decl
);
3264 if (n
->sym
->ts
.type
== BT_CHARACTER
3265 && n
->sym
->ts
.deferred
3266 && n
->sym
->attr
.omp_declare_target
3267 && (always_modifier
|| n
->sym
->attr
.pointer
)
3268 && op
!= EXEC_OMP_TARGET_EXIT_DATA
3269 && n
->u
.map_op
!= OMP_MAP_DELETE
3270 && n
->u
.map_op
!= OMP_MAP_RELEASE
)
3272 gcc_assert (n
->sym
->ts
.u
.cl
->backend_decl
);
3273 node5
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3274 OMP_CLAUSE_SET_MAP_KIND (node5
, GOMP_MAP_ALWAYS_TO
);
3275 OMP_CLAUSE_DECL (node5
) = n
->sym
->ts
.u
.cl
->backend_decl
;
3276 OMP_CLAUSE_SIZE (node5
)
3277 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3280 tree present
= gfc_omp_check_optional_argument (decl
, true);
3281 if (openacc
&& n
->sym
->ts
.type
== BT_CLASS
)
3283 if (n
->sym
->attr
.optional
)
3284 sorry ("optional class parameter");
3285 tree ptr
= gfc_class_data_get (decl
);
3286 ptr
= build_fold_indirect_ref (ptr
);
3287 OMP_CLAUSE_DECL (node
) = ptr
;
3288 OMP_CLAUSE_SIZE (node
) = gfc_class_vtab_size_get (decl
);
3289 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3290 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_ATTACH_DETACH
);
3291 OMP_CLAUSE_DECL (node2
) = gfc_class_data_get (decl
);
3292 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3293 goto finalize_map_clause
;
3295 else if (POINTER_TYPE_P (type
)
3296 && (gfc_omp_privatize_by_reference (decl
)
3297 || GFC_DECL_GET_SCALAR_POINTER (decl
)
3298 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
3299 || GFC_DECL_CRAY_POINTEE (decl
)
3300 || GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
3301 || (n
->sym
->ts
.type
== BT_DERIVED
3302 && (n
->sym
->ts
.u
.derived
->ts
.f90_type
3305 tree orig_decl
= decl
;
3307 /* For nonallocatable, nonpointer arrays, a temporary
3308 variable is generated, but this one is only defined if
3309 the variable is present; hence, we now set it to NULL
3310 to avoid accessing undefined variables. We cannot use
3311 a temporary variable here as otherwise the replacement
3312 of the variables in omp-low.cc will not work. */
3313 if (present
&& GFC_ARRAY_TYPE_P (type
))
3315 tree tmp
= fold_build2_loc (input_location
,
3317 void_type_node
, decl
,
3319 tree cond
= fold_build1_loc (input_location
,
3323 gfc_add_expr_to_block (block
,
3324 build3_loc (input_location
,
3330 /* For descriptor types, the unmapping happens below. */
3331 if (op
!= EXEC_OMP_TARGET_EXIT_DATA
3332 || !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3334 enum gomp_map_kind gmk
= GOMP_MAP_POINTER
;
3335 if (op
== EXEC_OMP_TARGET_EXIT_DATA
3336 && n
->u
.map_op
== OMP_MAP_DELETE
)
3337 gmk
= GOMP_MAP_DELETE
;
3338 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3339 gmk
= GOMP_MAP_RELEASE
;
3341 if (gmk
== GOMP_MAP_RELEASE
|| gmk
== GOMP_MAP_DELETE
)
3342 size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
3344 size
= size_int (0);
3345 node4
= build_omp_clause (input_location
,
3347 OMP_CLAUSE_SET_MAP_KIND (node4
, gmk
);
3348 OMP_CLAUSE_DECL (node4
) = decl
;
3349 OMP_CLAUSE_SIZE (node4
) = size
;
3351 decl
= build_fold_indirect_ref (decl
);
3352 if ((TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
3353 || gfc_omp_is_optional_argument (orig_decl
))
3354 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
3355 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
3357 enum gomp_map_kind gmk
;
3358 if (op
== EXEC_OMP_TARGET_EXIT_DATA
3359 && n
->u
.map_op
== OMP_MAP_DELETE
)
3360 gmk
= GOMP_MAP_DELETE
;
3361 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3362 gmk
= GOMP_MAP_RELEASE
;
3364 gmk
= GOMP_MAP_POINTER
;
3366 if (gmk
== GOMP_MAP_RELEASE
|| gmk
== GOMP_MAP_DELETE
)
3367 size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
3369 size
= size_int (0);
3370 node3
= build_omp_clause (input_location
,
3372 OMP_CLAUSE_SET_MAP_KIND (node3
, gmk
);
3373 OMP_CLAUSE_DECL (node3
) = decl
;
3374 OMP_CLAUSE_SIZE (node3
) = size
;
3375 decl
= build_fold_indirect_ref (decl
);
3378 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3380 tree type
= TREE_TYPE (decl
);
3381 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3383 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
3385 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
3386 ptr
= build_fold_indirect_ref (ptr
);
3387 OMP_CLAUSE_DECL (node
) = ptr
;
3388 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3389 OMP_CLAUSE_DECL (node2
) = decl
;
3390 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3391 if (n
->u
.map_op
== OMP_MAP_DELETE
)
3392 map_kind
= GOMP_MAP_DELETE
;
3393 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
3394 || n
->u
.map_op
== OMP_MAP_RELEASE
)
3395 map_kind
= GOMP_MAP_RELEASE
;
3397 map_kind
= GOMP_MAP_TO_PSET
;
3398 OMP_CLAUSE_SET_MAP_KIND (node2
, map_kind
);
3400 if (op
!= EXEC_OMP_TARGET_EXIT_DATA
3401 && n
->u
.map_op
!= OMP_MAP_DELETE
3402 && n
->u
.map_op
!= OMP_MAP_RELEASE
)
3404 node3
= build_omp_clause (input_location
,
3408 ptr
= gfc_conv_descriptor_data_get (decl
);
3409 ptr
= gfc_build_addr_expr (NULL
, ptr
);
3410 ptr
= gfc_build_cond_assign_expr (
3411 block
, present
, ptr
, null_pointer_node
);
3412 ptr
= build_fold_indirect_ref (ptr
);
3413 OMP_CLAUSE_DECL (node3
) = ptr
;
3416 OMP_CLAUSE_DECL (node3
)
3417 = gfc_conv_descriptor_data_get (decl
);
3418 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3420 if (n
->u
.map_op
== OMP_MAP_ATTACH
)
3422 /* Standalone attach clauses used with arrays with
3423 descriptors must copy the descriptor to the
3424 target, else they won't have anything to
3425 perform the attachment onto (see OpenACC 2.6,
3426 "2.6.3. Data Structures with Pointers"). */
3427 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH
);
3428 /* We don't want to map PTR at all in this case,
3429 so delete its node and shuffle the others
3434 goto finalize_map_clause
;
3436 else if (n
->u
.map_op
== OMP_MAP_DETACH
)
3438 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_DETACH
);
3439 /* Similarly to above, we don't want to unmap PTR
3444 goto finalize_map_clause
;
3447 OMP_CLAUSE_SET_MAP_KIND (node3
,
3449 ? GOMP_MAP_ALWAYS_POINTER
3450 : GOMP_MAP_POINTER
);
3453 /* We have to check for n->sym->attr.dimension because
3454 of scalar coarrays. */
3455 if ((n
->sym
->attr
.pointer
|| n
->sym
->attr
.allocatable
)
3456 && n
->sym
->attr
.dimension
)
3458 stmtblock_t cond_block
;
3460 = gfc_create_var (gfc_array_index_type
, NULL
);
3461 tree tem
, then_b
, else_b
, zero
, cond
;
3463 gfc_init_block (&cond_block
);
3465 = gfc_full_array_size (&cond_block
, decl
,
3466 GFC_TYPE_ARRAY_RANK (type
));
3468 if (n
->sym
->ts
.type
== BT_CHARACTER
3469 && n
->sym
->ts
.deferred
)
3471 tree len
= n
->sym
->ts
.u
.cl
->backend_decl
;
3472 len
= fold_convert (size_type_node
, len
);
3473 elemsz
= gfc_get_char_type (n
->sym
->ts
.kind
);
3474 elemsz
= TYPE_SIZE_UNIT (elemsz
);
3475 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
3480 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3481 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3482 tem
= fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3484 gfc_add_modify (&cond_block
, size
, tem
);
3485 then_b
= gfc_finish_block (&cond_block
);
3486 gfc_init_block (&cond_block
);
3487 zero
= build_int_cst (gfc_array_index_type
, 0);
3488 gfc_add_modify (&cond_block
, size
, zero
);
3489 else_b
= gfc_finish_block (&cond_block
);
3490 tem
= gfc_conv_descriptor_data_get (decl
);
3491 tem
= fold_convert (pvoid_type_node
, tem
);
3492 cond
= fold_build2_loc (input_location
, NE_EXPR
,
3494 tem
, null_pointer_node
);
3496 cond
= fold_build2_loc (input_location
,
3500 gfc_add_expr_to_block (block
,
3501 build3_loc (input_location
,
3506 OMP_CLAUSE_SIZE (node
) = size
;
3508 else if (n
->sym
->attr
.dimension
)
3510 stmtblock_t cond_block
;
3511 gfc_init_block (&cond_block
);
3512 tree size
= gfc_full_array_size (&cond_block
, decl
,
3513 GFC_TYPE_ARRAY_RANK (type
));
3515 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3516 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3517 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3519 size
= gfc_evaluate_now (size
, &cond_block
);
3522 tree var
= gfc_create_var (gfc_array_index_type
,
3524 gfc_add_modify (&cond_block
, var
, size
);
3525 tree cond_body
= gfc_finish_block (&cond_block
);
3526 tree cond
= build3_loc (input_location
, COND_EXPR
,
3527 void_type_node
, present
,
3528 cond_body
, NULL_TREE
);
3529 gfc_add_expr_to_block (block
, cond
);
3530 OMP_CLAUSE_SIZE (node
) = var
;
3534 gfc_add_block_to_block (block
, &cond_block
);
3535 OMP_CLAUSE_SIZE (node
) = size
;
3540 && INDIRECT_REF_P (decl
)
3541 && INDIRECT_REF_P (TREE_OPERAND (decl
, 0)))
3543 /* A single indirectref is handled by the middle end. */
3544 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
3545 decl
= TREE_OPERAND (decl
, 0);
3546 decl
= gfc_build_cond_assign_expr (block
, present
, decl
,
3548 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (decl
);
3551 OMP_CLAUSE_DECL (node
) = decl
;
3553 if (!n
->sym
->attr
.dimension
3554 && n
->sym
->ts
.type
== BT_CHARACTER
3555 && n
->sym
->ts
.deferred
)
3559 gcc_assert (TREE_CODE (decl
) == INDIRECT_REF
);
3560 decl
= TREE_OPERAND (decl
, 0);
3562 tree cond
= fold_build2_loc (input_location
, NE_EXPR
,
3564 decl
, null_pointer_node
);
3566 cond
= fold_build2_loc (input_location
,
3570 tree len
= n
->sym
->ts
.u
.cl
->backend_decl
;
3571 len
= fold_convert (size_type_node
, len
);
3572 tree size
= gfc_get_char_type (n
->sym
->ts
.kind
);
3573 size
= TYPE_SIZE_UNIT (size
);
3574 size
= fold_build2 (MULT_EXPR
, size_type_node
, len
, size
);
3575 size
= build3_loc (input_location
,
3580 size
= gfc_evaluate_now (size
, block
);
3581 OMP_CLAUSE_SIZE (node
) = size
;
3585 && n
->expr
->expr_type
== EXPR_VARIABLE
3586 && n
->expr
->ref
->type
== REF_ARRAY
3587 && !n
->expr
->ref
->next
)
3589 /* An array element or array section which is not part of a
3590 derived type, etc. */
3591 bool element
= n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
;
3592 tree type
= TREE_TYPE (decl
);
3593 gomp_map_kind k
= GOMP_MAP_POINTER
;
3595 && !GFC_DESCRIPTOR_TYPE_P (type
)
3596 && !(POINTER_TYPE_P (type
)
3597 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))))
3598 k
= GOMP_MAP_FIRSTPRIVATE_POINTER
;
3599 gfc_trans_omp_array_section (block
, op
, n
, decl
, element
,
3600 !openacc
, k
, node
, node2
,
3604 && n
->expr
->expr_type
== EXPR_VARIABLE
3605 && (n
->expr
->ref
->type
== REF_COMPONENT
3606 || n
->expr
->ref
->type
== REF_ARRAY
)
3608 && lastref
->type
== REF_COMPONENT
3609 && lastref
->u
.c
.component
->ts
.type
!= BT_CLASS
3610 && lastref
->u
.c
.component
->ts
.type
!= BT_DERIVED
3611 && !lastref
->u
.c
.component
->attr
.dimension
)
3613 /* Derived type access with last component being a scalar. */
3614 gfc_init_se (&se
, NULL
);
3616 gfc_conv_expr (&se
, n
->expr
);
3617 gfc_add_block_to_block (block
, &se
.pre
);
3618 /* For BT_CHARACTER a pointer is returned. */
3619 OMP_CLAUSE_DECL (node
)
3620 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3621 ? build_fold_indirect_ref (se
.expr
) : se
.expr
;
3622 gfc_add_block_to_block (block
, &se
.post
);
3623 if (pointer
|| allocatable
)
3625 /* If it's a bare attach/detach clause, we just want
3626 to perform a single attach/detach operation, of the
3627 pointer itself, not of the pointed-to object. */
3629 && (n
->u
.map_op
== OMP_MAP_ATTACH
3630 || n
->u
.map_op
== OMP_MAP_DETACH
))
3632 OMP_CLAUSE_DECL (node
)
3633 = build_fold_addr_expr (OMP_CLAUSE_DECL (node
));
3634 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3635 goto finalize_map_clause
;
3638 node2
= build_omp_clause (input_location
,
3640 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_ATTACH_DETACH
);
3641 OMP_CLAUSE_DECL (node2
)
3642 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3644 : gfc_build_addr_expr (NULL
, se
.expr
);
3645 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3647 && n
->expr
->ts
.type
== BT_CHARACTER
3648 && n
->expr
->ts
.deferred
)
3650 gcc_assert (se
.string_length
);
3652 = gfc_get_char_type (n
->expr
->ts
.kind
);
3653 OMP_CLAUSE_SIZE (node
)
3654 = fold_build2 (MULT_EXPR
, size_type_node
,
3655 fold_convert (size_type_node
,
3657 TYPE_SIZE_UNIT (tmp
));
3659 if (n
->u
.map_op
== OMP_MAP_DELETE
)
3660 kind
= GOMP_MAP_DELETE
;
3661 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
)
3662 kind
= GOMP_MAP_RELEASE
;
3665 node3
= build_omp_clause (input_location
,
3667 OMP_CLAUSE_SET_MAP_KIND (node3
, kind
);
3668 OMP_CLAUSE_DECL (node3
) = se
.string_length
;
3669 OMP_CLAUSE_SIZE (node3
)
3670 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3675 && n
->expr
->expr_type
== EXPR_VARIABLE
3676 && (n
->expr
->ref
->type
== REF_COMPONENT
3677 || n
->expr
->ref
->type
== REF_ARRAY
))
3679 gfc_init_se (&se
, NULL
);
3680 se
.expr
= gfc_maybe_dereference_var (n
->sym
, decl
);
3682 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3684 if (ref
->type
== REF_COMPONENT
)
3686 if (ref
->u
.c
.sym
->attr
.extension
)
3687 conv_parent_component_references (&se
, ref
);
3689 gfc_conv_component_ref (&se
, ref
);
3691 else if (ref
->type
== REF_ARRAY
)
3693 if (ref
->u
.ar
.type
== AR_ELEMENT
&& ref
->next
)
3694 gfc_conv_array_ref (&se
, &ref
->u
.ar
, n
->expr
,
3697 gcc_assert (!ref
->next
);
3700 sorry ("unhandled expression type");
3703 tree inner
= se
.expr
;
3705 /* Last component is a derived type or class pointer. */
3706 if (lastref
->type
== REF_COMPONENT
3707 && (lastref
->u
.c
.component
->ts
.type
== BT_DERIVED
3708 || lastref
->u
.c
.component
->ts
.type
== BT_CLASS
))
3710 if (pointer
|| (openacc
&& allocatable
))
3712 /* If it's a bare attach/detach clause, we just want
3713 to perform a single attach/detach operation, of the
3714 pointer itself, not of the pointed-to object. */
3716 && (n
->u
.map_op
== OMP_MAP_ATTACH
3717 || n
->u
.map_op
== OMP_MAP_DETACH
))
3719 OMP_CLAUSE_DECL (node
)
3720 = build_fold_addr_expr (inner
);
3721 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3722 goto finalize_map_clause
;
3725 gfc_omp_namelist
*n2
3726 = openacc
? NULL
: clauses
->lists
[OMP_LIST_MAP
];
3729 n2
= get_symbol_rooted_namelist (sym_rooted_nl
, n
,
3732 /* If the last reference is a pointer to a derived
3733 type ("foo%dt_ptr"), check if any subcomponents
3734 of the same derived type member are being mapped
3735 elsewhere in the clause list ("foo%dt_ptr%x",
3736 etc.). If we have such subcomponent mappings,
3737 we only create an ALLOC node for the pointer
3738 itself, and inhibit mapping the whole derived
3741 for (; n2
!= NULL
; n2
= n2
->next
)
3743 if ((!sym_based
&& n
== n2
)
3744 || (sym_based
&& n
== n2
->u2
.duplicate_of
)
3748 if (!gfc_omp_expr_prefix_same (n
->expr
,
3752 gfc_ref
*ref1
= n
->expr
->ref
;
3753 gfc_ref
*ref2
= n2
->expr
->ref
;
3755 while (ref1
->next
&& ref2
->next
)
3763 inner
= build_fold_addr_expr (inner
);
3764 OMP_CLAUSE_SET_MAP_KIND (node
,
3766 OMP_CLAUSE_DECL (node
) = inner
;
3767 OMP_CLAUSE_SIZE (node
)
3768 = TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3769 goto finalize_map_clause
;
3775 if (lastref
->u
.c
.component
->ts
.type
== BT_CLASS
)
3777 data
= gfc_class_data_get (inner
);
3778 gcc_assert (POINTER_TYPE_P (TREE_TYPE (data
)));
3779 data
= build_fold_indirect_ref (data
);
3780 size
= gfc_class_vtab_size_get (inner
);
3782 else /* BT_DERIVED. */
3785 size
= TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3788 OMP_CLAUSE_DECL (node
) = data
;
3789 OMP_CLAUSE_SIZE (node
) = size
;
3790 node2
= build_omp_clause (input_location
,
3792 OMP_CLAUSE_SET_MAP_KIND (node2
,
3793 GOMP_MAP_ATTACH_DETACH
);
3794 OMP_CLAUSE_DECL (node2
) = build_fold_addr_expr (data
);
3795 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3799 OMP_CLAUSE_DECL (node
) = inner
;
3800 OMP_CLAUSE_SIZE (node
)
3801 = TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3804 else if (lastref
->type
== REF_ARRAY
3805 && lastref
->u
.ar
.type
== AR_FULL
)
3807 /* Bare attach and detach clauses don't want any
3808 additional nodes. */
3809 if ((n
->u
.map_op
== OMP_MAP_ATTACH
3810 || n
->u
.map_op
== OMP_MAP_DETACH
)
3811 && (POINTER_TYPE_P (TREE_TYPE (inner
))
3812 || GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
))))
3814 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
3816 tree ptr
= gfc_conv_descriptor_data_get (inner
);
3817 OMP_CLAUSE_DECL (node
) = ptr
;
3820 OMP_CLAUSE_DECL (node
) = inner
;
3821 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3822 goto finalize_map_clause
;
3825 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
3827 gomp_map_kind map_kind
;
3828 tree type
= TREE_TYPE (inner
);
3829 tree ptr
= gfc_conv_descriptor_data_get (inner
);
3830 ptr
= build_fold_indirect_ref (ptr
);
3831 OMP_CLAUSE_DECL (node
) = ptr
;
3832 int rank
= GFC_TYPE_ARRAY_RANK (type
);
3833 OMP_CLAUSE_SIZE (node
)
3834 = gfc_full_array_size (block
, inner
, rank
);
3836 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3837 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
3838 if (GOMP_MAP_COPY_TO_P (map_kind
)
3839 || map_kind
== GOMP_MAP_ALLOC
)
3840 map_kind
= ((GOMP_MAP_ALWAYS_P (map_kind
)
3841 || gfc_expr_attr (n
->expr
).pointer
)
3842 ? GOMP_MAP_ALWAYS_TO
: GOMP_MAP_TO
);
3843 else if (n
->u
.map_op
== OMP_MAP_RELEASE
3844 || n
->u
.map_op
== OMP_MAP_DELETE
)
3846 else if (op
== EXEC_OMP_TARGET_EXIT_DATA
3847 || op
== EXEC_OACC_EXIT_DATA
)
3848 map_kind
= GOMP_MAP_RELEASE
;
3850 map_kind
= GOMP_MAP_ALLOC
;
3852 && n
->expr
->ts
.type
== BT_CHARACTER
3853 && n
->expr
->ts
.deferred
)
3855 gcc_assert (se
.string_length
);
3856 tree len
= fold_convert (size_type_node
,
3858 elemsz
= gfc_get_char_type (n
->expr
->ts
.kind
);
3859 elemsz
= TYPE_SIZE_UNIT (elemsz
);
3860 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
3862 node4
= build_omp_clause (input_location
,
3864 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
3865 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
3866 OMP_CLAUSE_SIZE (node4
)
3867 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3869 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3870 OMP_CLAUSE_SIZE (node
)
3871 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3872 OMP_CLAUSE_SIZE (node
), elemsz
);
3873 node2
= build_omp_clause (input_location
,
3875 if (map_kind
== GOMP_MAP_RELEASE
3876 || map_kind
== GOMP_MAP_DELETE
)
3878 OMP_CLAUSE_SET_MAP_KIND (node2
, map_kind
);
3879 OMP_CLAUSE_RELEASE_DESCRIPTOR (node2
) = 1;
3882 OMP_CLAUSE_SET_MAP_KIND (node2
,
3884 OMP_CLAUSE_DECL (node2
) = inner
;
3885 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3888 gfc_omp_namelist
*n2
3889 = clauses
->lists
[OMP_LIST_MAP
];
3891 /* If we don't have a mapping of a smaller part
3892 of the array -- or we can't prove that we do
3893 statically -- set this flag. If there is a
3894 mapping of a smaller part of the array after
3895 all, this will turn into a no-op at
3897 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (node
) = 1;
3900 n2
= get_symbol_rooted_namelist (sym_rooted_nl
,
3904 bool drop_mapping
= false;
3906 for (; n2
!= NULL
; n2
= n2
->next
)
3908 if ((!sym_based
&& n
== n2
)
3909 || (sym_based
&& n
== n2
->u2
.duplicate_of
)
3913 if (!gfc_omp_expr_prefix_same (n
->expr
,
3917 gfc_ref
*ref1
= n
->expr
->ref
;
3918 gfc_ref
*ref2
= n2
->expr
->ref
;
3920 /* We know ref1 and ref2 overlap. We're
3921 interested in whether ref2 describes a
3922 smaller part of the array than ref1, which
3923 we already know refers to the full
3926 while (ref1
->next
&& ref2
->next
)
3933 || (ref2
->type
== REF_ARRAY
3934 && (ref2
->u
.ar
.type
== AR_ELEMENT
3938 drop_mapping
= true;
3945 node3
= build_omp_clause (input_location
,
3947 OMP_CLAUSE_SET_MAP_KIND (node3
,
3948 GOMP_MAP_ATTACH_DETACH
);
3949 OMP_CLAUSE_DECL (node3
)
3950 = gfc_conv_descriptor_data_get (inner
);
3951 /* Similar to gfc_trans_omp_array_section (details
3952 there), we add/keep the cast for OpenMP to prevent
3953 that an 'alloc:' gets added for node3 ('desc.data')
3954 as that is part of the whole descriptor (node3).
3955 TODO: Remove once the ME handles this properly. */
3957 OMP_CLAUSE_DECL (node3
)
3958 = fold_convert (TREE_TYPE (TREE_OPERAND(ptr
, 0)),
3959 OMP_CLAUSE_DECL (node3
));
3961 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
3962 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3965 OMP_CLAUSE_DECL (node
) = inner
;
3967 else if (lastref
->type
== REF_ARRAY
)
3969 /* An array element or section. */
3970 bool element
= lastref
->u
.ar
.type
== AR_ELEMENT
;
3971 gomp_map_kind kind
= GOMP_MAP_ATTACH_DETACH
;
3972 gfc_trans_omp_array_section (block
, op
, n
, inner
, element
,
3973 !openacc
, kind
, node
, node2
,
3980 sorry ("unhandled expression");
3982 finalize_map_clause
:
3984 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3986 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
3988 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
3990 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
3992 omp_clauses
= gfc_trans_add_clause (node5
, omp_clauses
);
3997 case OMP_LIST_CACHE
:
3998 for (; n
!= NULL
; n
= n
->next
)
4000 if (!n
->sym
->attr
.referenced
)
4006 clause_code
= OMP_CLAUSE_TO
;
4009 clause_code
= OMP_CLAUSE_FROM
;
4011 case OMP_LIST_CACHE
:
4012 clause_code
= OMP_CLAUSE__CACHE_
;
4017 tree node
= build_omp_clause (input_location
, clause_code
);
4019 || (n
->expr
->ref
->type
== REF_ARRAY
4020 && n
->expr
->ref
->u
.ar
.type
== AR_FULL
4021 && n
->expr
->ref
->next
== NULL
))
4023 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
4024 if (gfc_omp_privatize_by_reference (decl
))
4026 if (gfc_omp_is_allocatable_or_ptr (decl
))
4027 decl
= build_fold_indirect_ref (decl
);
4028 decl
= build_fold_indirect_ref (decl
);
4030 else if (DECL_P (decl
))
4031 TREE_ADDRESSABLE (decl
) = 1;
4032 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
4034 tree type
= TREE_TYPE (decl
);
4035 tree ptr
= gfc_conv_descriptor_data_get (decl
);
4036 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
4037 ptr
= build_fold_indirect_ref (ptr
);
4038 OMP_CLAUSE_DECL (node
) = ptr
;
4039 OMP_CLAUSE_SIZE (node
)
4040 = gfc_full_array_size (block
, decl
,
4041 GFC_TYPE_ARRAY_RANK (type
));
4043 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
4044 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
4045 OMP_CLAUSE_SIZE (node
)
4046 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
4047 OMP_CLAUSE_SIZE (node
), elemsz
);
4051 OMP_CLAUSE_DECL (node
) = decl
;
4052 if (gfc_omp_is_allocatable_or_ptr (decl
))
4053 OMP_CLAUSE_SIZE (node
)
4054 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
4060 gfc_init_se (&se
, NULL
);
4061 if (n
->expr
->rank
== 0)
4063 gfc_conv_expr_reference (&se
, n
->expr
);
4065 gfc_add_block_to_block (block
, &se
.pre
);
4066 OMP_CLAUSE_SIZE (node
)
4067 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr
)));
4071 gfc_conv_expr_descriptor (&se
, n
->expr
);
4072 ptr
= gfc_conv_array_data (se
.expr
);
4073 tree type
= TREE_TYPE (se
.expr
);
4074 gfc_add_block_to_block (block
, &se
.pre
);
4075 OMP_CLAUSE_SIZE (node
)
4076 = gfc_full_array_size (block
, se
.expr
,
4077 GFC_TYPE_ARRAY_RANK (type
));
4079 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
4080 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
4081 OMP_CLAUSE_SIZE (node
)
4082 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
4083 OMP_CLAUSE_SIZE (node
), elemsz
);
4085 gfc_add_block_to_block (block
, &se
.post
);
4086 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
4087 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
4089 if (n
->u
.present_modifier
)
4090 OMP_CLAUSE_MOTION_PRESENT (node
) = 1;
4091 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
4094 case OMP_LIST_USES_ALLOCATORS
:
4095 /* Ignore pre-defined allocators as no special treatment is needed. */
4096 for (; n
!= NULL
; n
= n
->next
)
4097 if (n
->sym
->attr
.flavor
== FL_VARIABLE
)
4100 sorry_at (input_location
, "%<uses_allocators%> clause with traits "
4101 "and memory spaces");
4108 /* Free hashmap if we built it. */
4111 typedef hash_map
<gfc_symbol
*, gfc_omp_namelist
*>::iterator hti
;
4112 for (hti it
= sym_rooted_nl
->begin (); it
!= sym_rooted_nl
->end (); ++it
)
4114 gfc_omp_namelist
*&nl
= (*it
).second
;
4117 gfc_omp_namelist
*next
= nl
->next
;
4122 delete sym_rooted_nl
;
4125 if (clauses
->if_expr
)
4129 gfc_init_se (&se
, NULL
);
4130 gfc_conv_expr (&se
, clauses
->if_expr
);
4131 gfc_add_block_to_block (block
, &se
.pre
);
4132 if_var
= gfc_evaluate_now (se
.expr
, block
);
4133 gfc_add_block_to_block (block
, &se
.post
);
4135 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
4136 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
4137 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
4138 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4141 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
4142 if (clauses
->if_exprs
[ifc
])
4146 gfc_init_se (&se
, NULL
);
4147 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
4148 gfc_add_block_to_block (block
, &se
.pre
);
4149 if_var
= gfc_evaluate_now (se
.expr
, block
);
4150 gfc_add_block_to_block (block
, &se
.post
);
4152 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
4156 OMP_CLAUSE_IF_MODIFIER (c
) = VOID_CST
;
4158 case OMP_IF_PARALLEL
:
4159 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
4162 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_SIMD
;
4165 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
4167 case OMP_IF_TASKLOOP
:
4168 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
4171 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
4173 case OMP_IF_TARGET_DATA
:
4174 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
4176 case OMP_IF_TARGET_UPDATE
:
4177 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
4179 case OMP_IF_TARGET_ENTER_DATA
:
4180 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
4182 case OMP_IF_TARGET_EXIT_DATA
:
4183 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
4188 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
4189 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4192 if (clauses
->self_expr
)
4196 gfc_init_se (&se
, NULL
);
4197 gfc_conv_expr (&se
, clauses
->self_expr
);
4198 gfc_add_block_to_block (block
, &se
.pre
);
4199 self_var
= gfc_evaluate_now (se
.expr
, block
);
4200 gfc_add_block_to_block (block
, &se
.post
);
4202 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SELF
);
4203 OMP_CLAUSE_SELF_EXPR (c
) = self_var
;
4204 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4207 if (clauses
->final_expr
)
4211 gfc_init_se (&se
, NULL
);
4212 gfc_conv_expr (&se
, clauses
->final_expr
);
4213 gfc_add_block_to_block (block
, &se
.pre
);
4214 final_var
= gfc_evaluate_now (se
.expr
, block
);
4215 gfc_add_block_to_block (block
, &se
.post
);
4217 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINAL
);
4218 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
4219 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4222 if (clauses
->num_threads
)
4226 gfc_init_se (&se
, NULL
);
4227 gfc_conv_expr (&se
, clauses
->num_threads
);
4228 gfc_add_block_to_block (block
, &se
.pre
);
4229 num_threads
= gfc_evaluate_now (se
.expr
, block
);
4230 gfc_add_block_to_block (block
, &se
.post
);
4232 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_THREADS
);
4233 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
4234 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4237 chunk_size
= NULL_TREE
;
4238 if (clauses
->chunk_size
)
4240 gfc_init_se (&se
, NULL
);
4241 gfc_conv_expr (&se
, clauses
->chunk_size
);
4242 gfc_add_block_to_block (block
, &se
.pre
);
4243 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
4244 gfc_add_block_to_block (block
, &se
.post
);
4247 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
4249 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SCHEDULE
);
4250 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
4251 switch (clauses
->sched_kind
)
4253 case OMP_SCHED_STATIC
:
4254 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
4256 case OMP_SCHED_DYNAMIC
:
4257 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
4259 case OMP_SCHED_GUIDED
:
4260 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
4262 case OMP_SCHED_RUNTIME
:
4263 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
4265 case OMP_SCHED_AUTO
:
4266 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
4271 if (clauses
->sched_monotonic
)
4272 OMP_CLAUSE_SCHEDULE_KIND (c
)
4273 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
4274 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
4275 else if (clauses
->sched_nonmonotonic
)
4276 OMP_CLAUSE_SCHEDULE_KIND (c
)
4277 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
4278 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
4279 if (clauses
->sched_simd
)
4280 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
4281 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4284 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
4286 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULT
);
4287 switch (clauses
->default_sharing
)
4289 case OMP_DEFAULT_NONE
:
4290 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
4292 case OMP_DEFAULT_SHARED
:
4293 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
4295 case OMP_DEFAULT_PRIVATE
:
4296 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
4298 case OMP_DEFAULT_FIRSTPRIVATE
:
4299 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
4301 case OMP_DEFAULT_PRESENT
:
4302 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
4307 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4310 if (clauses
->nowait
)
4312 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOWAIT
);
4313 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4316 if (clauses
->ordered
)
4318 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDERED
);
4319 OMP_CLAUSE_ORDERED_EXPR (c
)
4320 = clauses
->orderedc
? build_int_cst (integer_type_node
,
4321 clauses
->orderedc
) : NULL_TREE
;
4322 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4325 if (clauses
->order_concurrent
)
4327 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDER
);
4328 OMP_CLAUSE_ORDER_UNCONSTRAINED (c
) = clauses
->order_unconstrained
;
4329 OMP_CLAUSE_ORDER_REPRODUCIBLE (c
) = clauses
->order_reproducible
;
4330 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4333 if (clauses
->untied
)
4335 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_UNTIED
);
4336 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4339 if (clauses
->mergeable
)
4341 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_MERGEABLE
);
4342 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4345 if (clauses
->collapse
)
4347 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_COLLAPSE
);
4348 OMP_CLAUSE_COLLAPSE_EXPR (c
)
4349 = build_int_cst (integer_type_node
, clauses
->collapse
);
4350 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4353 if (clauses
->inbranch
)
4355 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INBRANCH
);
4356 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4359 if (clauses
->notinbranch
)
4361 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOTINBRANCH
);
4362 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4365 switch (clauses
->cancel
)
4367 case OMP_CANCEL_UNKNOWN
:
4369 case OMP_CANCEL_PARALLEL
:
4370 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PARALLEL
);
4371 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4373 case OMP_CANCEL_SECTIONS
:
4374 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SECTIONS
);
4375 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4378 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FOR
);
4379 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4381 case OMP_CANCEL_TASKGROUP
:
4382 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TASKGROUP
);
4383 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4387 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
4389 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PROC_BIND
);
4390 switch (clauses
->proc_bind
)
4392 case OMP_PROC_BIND_PRIMARY
:
4393 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_PRIMARY
;
4395 case OMP_PROC_BIND_MASTER
:
4396 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
4398 case OMP_PROC_BIND_SPREAD
:
4399 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
4401 case OMP_PROC_BIND_CLOSE
:
4402 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
4407 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4410 if (clauses
->safelen_expr
)
4414 gfc_init_se (&se
, NULL
);
4415 gfc_conv_expr (&se
, clauses
->safelen_expr
);
4416 gfc_add_block_to_block (block
, &se
.pre
);
4417 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
4418 gfc_add_block_to_block (block
, &se
.post
);
4420 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SAFELEN
);
4421 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
4422 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4425 if (clauses
->simdlen_expr
)
4429 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
4430 OMP_CLAUSE_SIMDLEN_EXPR (c
)
4431 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
4432 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4438 gfc_init_se (&se
, NULL
);
4439 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
4440 gfc_add_block_to_block (block
, &se
.pre
);
4441 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
4442 gfc_add_block_to_block (block
, &se
.post
);
4444 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
4445 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
4446 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4450 if (clauses
->num_teams_upper
)
4452 tree num_teams_lower
= NULL_TREE
, num_teams_upper
;
4454 gfc_init_se (&se
, NULL
);
4455 gfc_conv_expr (&se
, clauses
->num_teams_upper
);
4456 gfc_add_block_to_block (block
, &se
.pre
);
4457 num_teams_upper
= gfc_evaluate_now (se
.expr
, block
);
4458 gfc_add_block_to_block (block
, &se
.post
);
4460 if (clauses
->num_teams_lower
)
4462 gfc_init_se (&se
, NULL
);
4463 gfc_conv_expr (&se
, clauses
->num_teams_lower
);
4464 gfc_add_block_to_block (block
, &se
.pre
);
4465 num_teams_lower
= gfc_evaluate_now (se
.expr
, block
);
4466 gfc_add_block_to_block (block
, &se
.post
);
4468 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TEAMS
);
4469 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
4470 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
4471 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4474 if (clauses
->device
)
4478 gfc_init_se (&se
, NULL
);
4479 gfc_conv_expr (&se
, clauses
->device
);
4480 gfc_add_block_to_block (block
, &se
.pre
);
4481 device
= gfc_evaluate_now (se
.expr
, block
);
4482 gfc_add_block_to_block (block
, &se
.post
);
4484 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEVICE
);
4485 OMP_CLAUSE_DEVICE_ID (c
) = device
;
4487 if (clauses
->ancestor
)
4488 OMP_CLAUSE_DEVICE_ANCESTOR (c
) = 1;
4490 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4493 if (clauses
->thread_limit
)
4497 gfc_init_se (&se
, NULL
);
4498 gfc_conv_expr (&se
, clauses
->thread_limit
);
4499 gfc_add_block_to_block (block
, &se
.pre
);
4500 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
4501 gfc_add_block_to_block (block
, &se
.post
);
4503 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREAD_LIMIT
);
4504 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
4505 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4508 chunk_size
= NULL_TREE
;
4509 if (clauses
->dist_chunk_size
)
4511 gfc_init_se (&se
, NULL
);
4512 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
4513 gfc_add_block_to_block (block
, &se
.pre
);
4514 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
4515 gfc_add_block_to_block (block
, &se
.post
);
4518 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
4520 c
= build_omp_clause (gfc_get_location (&where
),
4521 OMP_CLAUSE_DIST_SCHEDULE
);
4522 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
4523 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4526 if (clauses
->grainsize
)
4530 gfc_init_se (&se
, NULL
);
4531 gfc_conv_expr (&se
, clauses
->grainsize
);
4532 gfc_add_block_to_block (block
, &se
.pre
);
4533 grainsize
= gfc_evaluate_now (se
.expr
, block
);
4534 gfc_add_block_to_block (block
, &se
.post
);
4536 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GRAINSIZE
);
4537 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
4538 if (clauses
->grainsize_strict
)
4539 OMP_CLAUSE_GRAINSIZE_STRICT (c
) = 1;
4540 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4543 if (clauses
->num_tasks
)
4547 gfc_init_se (&se
, NULL
);
4548 gfc_conv_expr (&se
, clauses
->num_tasks
);
4549 gfc_add_block_to_block (block
, &se
.pre
);
4550 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
4551 gfc_add_block_to_block (block
, &se
.post
);
4553 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TASKS
);
4554 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
4555 if (clauses
->num_tasks_strict
)
4556 OMP_CLAUSE_NUM_TASKS_STRICT (c
) = 1;
4557 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4560 if (clauses
->priority
)
4564 gfc_init_se (&se
, NULL
);
4565 gfc_conv_expr (&se
, clauses
->priority
);
4566 gfc_add_block_to_block (block
, &se
.pre
);
4567 priority
= gfc_evaluate_now (se
.expr
, block
);
4568 gfc_add_block_to_block (block
, &se
.post
);
4570 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PRIORITY
);
4571 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
4572 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4575 if (clauses
->detach
)
4579 gfc_init_se (&se
, NULL
);
4580 gfc_conv_expr (&se
, clauses
->detach
);
4581 gfc_add_block_to_block (block
, &se
.pre
);
4583 gfc_add_block_to_block (block
, &se
.post
);
4585 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DETACH
);
4586 TREE_ADDRESSABLE (detach
) = 1;
4587 OMP_CLAUSE_DECL (c
) = detach
;
4588 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4591 if (clauses
->filter
)
4595 gfc_init_se (&se
, NULL
);
4596 gfc_conv_expr (&se
, clauses
->filter
);
4597 gfc_add_block_to_block (block
, &se
.pre
);
4598 filter
= gfc_evaluate_now (se
.expr
, block
);
4599 gfc_add_block_to_block (block
, &se
.post
);
4601 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FILTER
);
4602 OMP_CLAUSE_FILTER_EXPR (c
) = filter
;
4603 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4610 gfc_init_se (&se
, NULL
);
4611 gfc_conv_expr (&se
, clauses
->hint
);
4612 gfc_add_block_to_block (block
, &se
.pre
);
4613 hint
= gfc_evaluate_now (se
.expr
, block
);
4614 gfc_add_block_to_block (block
, &se
.post
);
4616 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_HINT
);
4617 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
4618 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4623 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMD
);
4624 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4626 if (clauses
->threads
)
4628 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREADS
);
4629 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4631 if (clauses
->nogroup
)
4633 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOGROUP
);
4634 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4637 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
4639 if (clauses
->defaultmap
[i
] == OMP_DEFAULTMAP_UNSET
)
4641 enum omp_clause_defaultmap_kind behavior
, category
;
4642 switch ((gfc_omp_defaultmap_category
) i
)
4644 case OMP_DEFAULTMAP_CAT_UNCATEGORIZED
:
4645 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
4647 case OMP_DEFAULTMAP_CAT_ALL
:
4648 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL
;
4650 case OMP_DEFAULTMAP_CAT_SCALAR
:
4651 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
;
4653 case OMP_DEFAULTMAP_CAT_AGGREGATE
:
4654 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
;
4656 case OMP_DEFAULTMAP_CAT_ALLOCATABLE
:
4657 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
;
4659 case OMP_DEFAULTMAP_CAT_POINTER
:
4660 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
;
4662 default: gcc_unreachable ();
4664 switch (clauses
->defaultmap
[i
])
4666 case OMP_DEFAULTMAP_ALLOC
:
4667 behavior
= OMP_CLAUSE_DEFAULTMAP_ALLOC
;
4669 case OMP_DEFAULTMAP_TO
: behavior
= OMP_CLAUSE_DEFAULTMAP_TO
; break;
4670 case OMP_DEFAULTMAP_FROM
: behavior
= OMP_CLAUSE_DEFAULTMAP_FROM
; break;
4671 case OMP_DEFAULTMAP_TOFROM
:
4672 behavior
= OMP_CLAUSE_DEFAULTMAP_TOFROM
;
4674 case OMP_DEFAULTMAP_FIRSTPRIVATE
:
4675 behavior
= OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
4677 case OMP_DEFAULTMAP_PRESENT
:
4678 behavior
= OMP_CLAUSE_DEFAULTMAP_PRESENT
;
4680 case OMP_DEFAULTMAP_NONE
: behavior
= OMP_CLAUSE_DEFAULTMAP_NONE
; break;
4681 case OMP_DEFAULTMAP_DEFAULT
:
4682 behavior
= OMP_CLAUSE_DEFAULTMAP_DEFAULT
;
4684 default: gcc_unreachable ();
4686 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULTMAP
);
4687 OMP_CLAUSE_DEFAULTMAP_SET_KIND (c
, behavior
, category
);
4688 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4691 if (clauses
->doacross_source
)
4693 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DOACROSS
);
4694 OMP_CLAUSE_DOACROSS_KIND (c
) = OMP_CLAUSE_DOACROSS_SOURCE
;
4695 OMP_CLAUSE_DOACROSS_DEPEND (c
) = clauses
->depend_source
;
4696 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4701 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ASYNC
);
4702 if (clauses
->async_expr
)
4703 OMP_CLAUSE_ASYNC_EXPR (c
)
4704 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
4706 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
4707 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4711 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SEQ
);
4712 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4714 if (clauses
->par_auto
)
4716 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_AUTO
);
4717 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4719 if (clauses
->if_present
)
4721 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF_PRESENT
);
4722 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4724 if (clauses
->finalize
)
4726 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINALIZE
);
4727 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4729 if (clauses
->independent
)
4731 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INDEPENDENT
);
4732 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4734 if (clauses
->wait_list
)
4738 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4740 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WAIT
);
4741 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
4742 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
4746 if (clauses
->num_gangs_expr
)
4749 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
4750 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_GANGS
);
4751 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
4752 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4754 if (clauses
->num_workers_expr
)
4756 tree num_workers_var
4757 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
4758 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_WORKERS
);
4759 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
4760 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4762 if (clauses
->vector_length_expr
)
4764 tree vector_length_var
4765 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
4766 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR_LENGTH
);
4767 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
4768 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4770 if (clauses
->tile_list
)
4772 vec
<tree
, va_gc
> *tvec
;
4775 vec_alloc (tvec
, 4);
4777 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
4778 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
4780 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TILE
);
4781 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
4782 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4785 if (clauses
->vector
)
4787 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR
);
4788 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4790 if (clauses
->vector_expr
)
4793 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
4794 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
4796 /* TODO: We're not capturing location information for individual
4797 clauses. However, if we have an expression attached to the
4798 clause, that one provides better location information. */
4799 OMP_CLAUSE_LOCATION (c
)
4800 = gfc_get_location (&clauses
->vector_expr
->where
);
4803 if (clauses
->worker
)
4805 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WORKER
);
4806 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4808 if (clauses
->worker_expr
)
4811 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
4812 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
4814 /* TODO: We're not capturing location information for individual
4815 clauses. However, if we have an expression attached to the
4816 clause, that one provides better location information. */
4817 OMP_CLAUSE_LOCATION (c
)
4818 = gfc_get_location (&clauses
->worker_expr
->where
);
4824 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GANG
);
4825 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4827 if (clauses
->gang_num_expr
)
4829 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
4830 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
4832 /* TODO: We're not capturing location information for individual
4833 clauses. However, if we have an expression attached to the
4834 clause, that one provides better location information. */
4835 OMP_CLAUSE_LOCATION (c
)
4836 = gfc_get_location (&clauses
->gang_num_expr
->where
);
4839 if (clauses
->gang_static
)
4841 arg
= clauses
->gang_static_expr
4842 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
4843 : integer_minus_one_node
;
4844 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
4847 if (clauses
->bind
!= OMP_BIND_UNSET
)
4849 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_BIND
);
4850 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4851 switch (clauses
->bind
)
4853 case OMP_BIND_TEAMS
:
4854 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_TEAMS
;
4856 case OMP_BIND_PARALLEL
:
4857 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_PARALLEL
;
4859 case OMP_BIND_THREAD
:
4860 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_THREAD
;
4866 /* OpenACC 'nohost' clauses cannot appear here. */
4867 gcc_checking_assert (!clauses
->nohost
);
4869 return nreverse (omp_clauses
);
4872 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
4875 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
4880 stmt
= gfc_trans_code (code
);
4881 if (TREE_CODE (stmt
) != BIND_EXPR
)
4883 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
4885 tree block
= poplevel (1, 0);
4886 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
4896 /* Translate OpenACC 'parallel', 'kernels', 'serial', 'data', 'host_data'
4900 gfc_trans_oacc_construct (gfc_code
*code
)
4903 tree stmt
, oacc_clauses
;
4904 enum tree_code construct_code
;
4908 case EXEC_OACC_PARALLEL
:
4909 construct_code
= OACC_PARALLEL
;
4911 case EXEC_OACC_KERNELS
:
4912 construct_code
= OACC_KERNELS
;
4914 case EXEC_OACC_SERIAL
:
4915 construct_code
= OACC_SERIAL
;
4917 case EXEC_OACC_DATA
:
4918 construct_code
= OACC_DATA
;
4920 case EXEC_OACC_HOST_DATA
:
4921 construct_code
= OACC_HOST_DATA
;
4927 gfc_start_block (&block
);
4928 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4929 code
->loc
, false, true);
4931 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4932 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4933 stmt
= build2_loc (gfc_get_location (&code
->loc
), construct_code
,
4934 void_type_node
, stmt
, oacc_clauses
);
4935 gfc_add_expr_to_block (&block
, stmt
);
4936 return gfc_finish_block (&block
);
4939 /* update, enter_data, exit_data, cache. */
4941 gfc_trans_oacc_executable_directive (gfc_code
*code
)
4944 tree stmt
, oacc_clauses
;
4945 enum tree_code construct_code
;
4949 case EXEC_OACC_UPDATE
:
4950 construct_code
= OACC_UPDATE
;
4952 case EXEC_OACC_ENTER_DATA
:
4953 construct_code
= OACC_ENTER_DATA
;
4955 case EXEC_OACC_EXIT_DATA
:
4956 construct_code
= OACC_EXIT_DATA
;
4958 case EXEC_OACC_CACHE
:
4959 construct_code
= OACC_CACHE
;
4965 gfc_start_block (&block
);
4966 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4967 code
->loc
, false, true, code
->op
);
4968 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
4970 gfc_add_expr_to_block (&block
, stmt
);
4971 return gfc_finish_block (&block
);
4975 gfc_trans_oacc_wait_directive (gfc_code
*code
)
4979 vec
<tree
, va_gc
> *args
;
4982 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4983 location_t loc
= input_location
;
4985 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4988 vec_alloc (args
, nparms
+ 2);
4989 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
4991 gfc_start_block (&block
);
4993 if (clauses
->async_expr
)
4994 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
4996 t
= build_int_cst (integer_type_node
, -2);
4998 args
->quick_push (t
);
4999 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
5001 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
5002 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
5004 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
5005 gfc_add_expr_to_block (&block
, stmt
);
5009 return gfc_finish_block (&block
);
5012 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
5013 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
5016 gfc_trans_omp_allocators (gfc_code
*code
)
5018 static bool warned
= false;
5019 gfc_omp_namelist
*omp_allocate
5020 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALLOCATE
];
5021 if (!flag_openmp_allocators
&& !warned
)
5023 omp_allocate
= NULL
;
5024 gfc_error ("%<!$OMP %s%> at %L requires %<-fopenmp-allocators%>",
5025 code
->op
== EXEC_OMP_ALLOCATE
? "ALLOCATE" : "ALLOCATORS",
5027 warning (0, "All files that might deallocate such a variable must be "
5028 "compiled with %<-fopenmp-allocators%>");
5029 inform (UNKNOWN_LOCATION
,
5030 "This includes explicit DEALLOCATE, reallocation on intrinsic "
5031 "assignment, INTENT(OUT) for allocatable dummy arguments, and "
5032 "reallocation of allocatable components allocated with an "
5033 "OpenMP allocator");
5036 return gfc_trans_allocate (code
->block
->next
, omp_allocate
);
5040 gfc_trans_omp_assume (gfc_code
*code
)
5043 gfc_init_block (&block
);
5044 gfc_omp_assumptions
*assume
= code
->ext
.omp_clauses
->assume
;
5046 for (gfc_expr_list
*el
= assume
->holds
; el
; el
= el
->next
)
5048 location_t loc
= gfc_get_location (&el
->expr
->where
);
5050 gfc_init_se (&se
, NULL
);
5051 gfc_conv_expr (&se
, el
->expr
);
5053 if (se
.pre
.head
== NULL_TREE
&& se
.post
.head
== NULL_TREE
)
5057 tree var
= create_tmp_var_raw (boolean_type_node
);
5058 DECL_CONTEXT (var
) = current_function_decl
;
5060 gfc_init_block (&block2
);
5061 gfc_add_block_to_block (&block2
, &se
.pre
);
5062 gfc_add_modify_loc (loc
, &block2
, var
,
5063 fold_convert_loc (loc
, boolean_type_node
,
5065 gfc_add_block_to_block (&block2
, &se
.post
);
5066 t
= gfc_finish_block (&block2
);
5067 t
= build4 (TARGET_EXPR
, boolean_type_node
, var
, t
, NULL
, NULL
);
5069 t
= build_call_expr_internal_loc (loc
, IFN_ASSUME
,
5070 void_type_node
, 1, t
);
5071 gfc_add_expr_to_block (&block
, t
);
5073 gfc_add_expr_to_block (&block
, gfc_trans_omp_code (code
->block
->next
, true));
5074 return gfc_finish_block (&block
);
5078 gfc_trans_omp_atomic (gfc_code
*code
)
5080 gfc_code
*atomic_code
= code
->block
;
5084 gfc_expr
*expr1
, *expr2
, *e
, *capture_expr1
= NULL
, *capture_expr2
= NULL
;
5087 tree lhsaddr
, type
, rhs
, x
, compare
= NULL_TREE
, comp_tgt
= NULL_TREE
;
5088 enum tree_code op
= ERROR_MARK
;
5089 enum tree_code aop
= OMP_ATOMIC
;
5090 bool var_on_left
= false, else_branch
= false;
5091 enum omp_memory_order mo
, fail_mo
;
5092 switch (atomic_code
->ext
.omp_clauses
->memorder
)
5094 case OMP_MEMORDER_UNSET
: mo
= OMP_MEMORY_ORDER_UNSPECIFIED
; break;
5095 case OMP_MEMORDER_ACQ_REL
: mo
= OMP_MEMORY_ORDER_ACQ_REL
; break;
5096 case OMP_MEMORDER_ACQUIRE
: mo
= OMP_MEMORY_ORDER_ACQUIRE
; break;
5097 case OMP_MEMORDER_RELAXED
: mo
= OMP_MEMORY_ORDER_RELAXED
; break;
5098 case OMP_MEMORDER_RELEASE
: mo
= OMP_MEMORY_ORDER_RELEASE
; break;
5099 case OMP_MEMORDER_SEQ_CST
: mo
= OMP_MEMORY_ORDER_SEQ_CST
; break;
5100 default: gcc_unreachable ();
5102 switch (atomic_code
->ext
.omp_clauses
->fail
)
5104 case OMP_MEMORDER_UNSET
: fail_mo
= OMP_FAIL_MEMORY_ORDER_UNSPECIFIED
; break;
5105 case OMP_MEMORDER_ACQUIRE
: fail_mo
= OMP_FAIL_MEMORY_ORDER_ACQUIRE
; break;
5106 case OMP_MEMORDER_RELAXED
: fail_mo
= OMP_FAIL_MEMORY_ORDER_RELAXED
; break;
5107 case OMP_MEMORDER_SEQ_CST
: fail_mo
= OMP_FAIL_MEMORY_ORDER_SEQ_CST
; break;
5108 default: gcc_unreachable ();
5110 mo
= (omp_memory_order
) (mo
| fail_mo
);
5112 code
= code
->block
->next
;
5113 if (atomic_code
->ext
.omp_clauses
->compare
)
5115 gfc_expr
*comp_expr
;
5116 if (code
->op
== EXEC_IF
)
5118 comp_expr
= code
->block
->expr1
;
5119 gcc_assert (code
->block
->next
->op
== EXEC_ASSIGN
);
5120 expr1
= code
->block
->next
->expr1
;
5121 expr2
= code
->block
->next
->expr2
;
5122 if (code
->block
->block
)
5124 gcc_assert (atomic_code
->ext
.omp_clauses
->capture
5125 && code
->block
->block
->next
->op
== EXEC_ASSIGN
);
5127 aop
= OMP_ATOMIC_CAPTURE_OLD
;
5128 capture_expr1
= code
->block
->block
->next
->expr1
;
5129 capture_expr2
= code
->block
->block
->next
->expr2
;
5131 else if (atomic_code
->ext
.omp_clauses
->capture
)
5133 gcc_assert (code
->next
->op
== EXEC_ASSIGN
);
5134 aop
= OMP_ATOMIC_CAPTURE_NEW
;
5135 capture_expr1
= code
->next
->expr1
;
5136 capture_expr2
= code
->next
->expr2
;
5141 gcc_assert (atomic_code
->ext
.omp_clauses
->capture
5142 && code
->op
== EXEC_ASSIGN
5143 && code
->next
->op
== EXEC_IF
);
5144 aop
= OMP_ATOMIC_CAPTURE_OLD
;
5145 capture_expr1
= code
->expr1
;
5146 capture_expr2
= code
->expr2
;
5147 expr1
= code
->next
->block
->next
->expr1
;
5148 expr2
= code
->next
->block
->next
->expr2
;
5149 comp_expr
= code
->next
->block
->expr1
;
5151 gfc_init_se (&lse
, NULL
);
5152 gfc_conv_expr (&lse
, comp_expr
->value
.op
.op2
);
5153 gfc_add_block_to_block (&block
, &lse
.pre
);
5155 var
= expr1
->symtree
->n
.sym
;
5159 gcc_assert (code
->op
== EXEC_ASSIGN
);
5160 expr1
= code
->expr1
;
5161 expr2
= code
->expr2
;
5162 if (atomic_code
->ext
.omp_clauses
->capture
5163 && (expr2
->expr_type
== EXPR_VARIABLE
5164 || (expr2
->expr_type
== EXPR_FUNCTION
5165 && expr2
->value
.function
.isym
5166 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
5167 && (expr2
->value
.function
.actual
->expr
->expr_type
5168 == EXPR_VARIABLE
))))
5170 capture_expr1
= expr1
;
5171 capture_expr2
= expr2
;
5172 expr1
= code
->next
->expr1
;
5173 expr2
= code
->next
->expr2
;
5174 aop
= OMP_ATOMIC_CAPTURE_OLD
;
5176 else if (atomic_code
->ext
.omp_clauses
->capture
)
5178 aop
= OMP_ATOMIC_CAPTURE_NEW
;
5179 capture_expr1
= code
->next
->expr1
;
5180 capture_expr2
= code
->next
->expr2
;
5182 var
= expr1
->symtree
->n
.sym
;
5185 gfc_init_se (&lse
, NULL
);
5186 gfc_init_se (&rse
, NULL
);
5187 gfc_init_se (&vse
, NULL
);
5188 gfc_start_block (&block
);
5190 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
5191 != GFC_OMP_ATOMIC_WRITE
)
5192 && expr2
->expr_type
== EXPR_FUNCTION
5193 && expr2
->value
.function
.isym
5194 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5195 expr2
= expr2
->value
.function
.actual
->expr
;
5197 if ((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
5198 == GFC_OMP_ATOMIC_READ
)
5200 gfc_conv_expr (&vse
, expr1
);
5201 gfc_add_block_to_block (&block
, &vse
.pre
);
5203 gfc_conv_expr (&lse
, expr2
);
5204 gfc_add_block_to_block (&block
, &lse
.pre
);
5205 type
= TREE_TYPE (lse
.expr
);
5206 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
5208 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
5209 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
5210 x
= convert (TREE_TYPE (vse
.expr
), x
);
5211 gfc_add_modify (&block
, vse
.expr
, x
);
5213 gfc_add_block_to_block (&block
, &lse
.pre
);
5214 gfc_add_block_to_block (&block
, &rse
.pre
);
5216 return gfc_finish_block (&block
);
5220 && capture_expr2
->expr_type
== EXPR_FUNCTION
5221 && capture_expr2
->value
.function
.isym
5222 && capture_expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5223 capture_expr2
= capture_expr2
->value
.function
.actual
->expr
;
5224 gcc_assert (!capture_expr2
|| capture_expr2
->expr_type
== EXPR_VARIABLE
);
5226 if (aop
== OMP_ATOMIC_CAPTURE_OLD
)
5228 gfc_conv_expr (&vse
, capture_expr1
);
5229 gfc_add_block_to_block (&block
, &vse
.pre
);
5230 gfc_conv_expr (&lse
, capture_expr2
);
5231 gfc_add_block_to_block (&block
, &lse
.pre
);
5232 gfc_init_se (&lse
, NULL
);
5235 gfc_conv_expr (&lse
, expr1
);
5236 gfc_add_block_to_block (&block
, &lse
.pre
);
5237 type
= TREE_TYPE (lse
.expr
);
5238 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
5240 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
5241 == GFC_OMP_ATOMIC_WRITE
)
5242 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
)
5245 gfc_conv_expr (&rse
, expr2
);
5246 gfc_add_block_to_block (&block
, &rse
.pre
);
5248 else if (expr2
->expr_type
== EXPR_OP
)
5251 switch (expr2
->value
.op
.op
)
5253 case INTRINSIC_PLUS
:
5256 case INTRINSIC_TIMES
:
5259 case INTRINSIC_MINUS
:
5262 case INTRINSIC_DIVIDE
:
5263 if (expr2
->ts
.type
== BT_INTEGER
)
5264 op
= TRUNC_DIV_EXPR
;
5269 op
= TRUTH_ANDIF_EXPR
;
5272 op
= TRUTH_ORIF_EXPR
;
5277 case INTRINSIC_NEQV
:
5283 e
= expr2
->value
.op
.op1
;
5284 if (e
->expr_type
== EXPR_FUNCTION
5285 && e
->value
.function
.isym
5286 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5287 e
= e
->value
.function
.actual
->expr
;
5288 if (e
->expr_type
== EXPR_VARIABLE
5289 && e
->symtree
!= NULL
5290 && e
->symtree
->n
.sym
== var
)
5292 expr2
= expr2
->value
.op
.op2
;
5297 e
= expr2
->value
.op
.op2
;
5298 if (e
->expr_type
== EXPR_FUNCTION
5299 && e
->value
.function
.isym
5300 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5301 e
= e
->value
.function
.actual
->expr
;
5302 gcc_assert (e
->expr_type
== EXPR_VARIABLE
5303 && e
->symtree
!= NULL
5304 && e
->symtree
->n
.sym
== var
);
5305 expr2
= expr2
->value
.op
.op1
;
5306 var_on_left
= false;
5308 gfc_conv_expr (&rse
, expr2
);
5309 gfc_add_block_to_block (&block
, &rse
.pre
);
5313 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
5314 switch (expr2
->value
.function
.isym
->id
)
5334 e
= expr2
->value
.function
.actual
->expr
;
5335 if (e
->expr_type
== EXPR_FUNCTION
5336 && e
->value
.function
.isym
5337 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
5338 e
= e
->value
.function
.actual
->expr
;
5339 gcc_assert (e
->expr_type
== EXPR_VARIABLE
5340 && e
->symtree
!= NULL
5341 && e
->symtree
->n
.sym
== var
);
5343 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
5344 gfc_add_block_to_block (&block
, &rse
.pre
);
5345 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
5347 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
5348 gfc_actual_arglist
*arg
;
5350 gfc_add_modify (&block
, accum
, rse
.expr
);
5351 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
5354 gfc_init_block (&rse
.pre
);
5355 gfc_conv_expr (&rse
, arg
->expr
);
5356 gfc_add_block_to_block (&block
, &rse
.pre
);
5357 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
5359 gfc_add_modify (&block
, accum
, x
);
5365 expr2
= expr2
->value
.function
.actual
->next
->expr
;
5368 lhsaddr
= save_expr (lhsaddr
);
5369 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
5370 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
5371 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
5373 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
5374 it even after unsharing function body. */
5375 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
5376 DECL_CONTEXT (var
) = current_function_decl
;
5377 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
5378 NULL_TREE
, NULL_TREE
);
5383 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
5384 DECL_CONTEXT (var
) = current_function_decl
;
5385 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
, NULL
,
5387 lse
.expr
= build_fold_indirect_ref_loc (input_location
, lhsaddr
);
5388 compare
= convert (TREE_TYPE (lse
.expr
), compare
);
5389 compare
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
5393 if (expr2
->expr_type
== EXPR_VARIABLE
|| compare
)
5396 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
5398 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
5399 == GFC_OMP_ATOMIC_WRITE
)
5400 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
)
5405 x
= convert (TREE_TYPE (rhs
),
5406 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
5408 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
5410 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
5413 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
5414 && TREE_CODE (type
) != COMPLEX_TYPE
)
5415 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
5416 TREE_TYPE (TREE_TYPE (rhs
)), x
);
5418 gfc_add_block_to_block (&block
, &lse
.pre
);
5419 gfc_add_block_to_block (&block
, &rse
.pre
);
5421 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
5423 gfc_conv_expr (&vse
, capture_expr1
);
5424 gfc_add_block_to_block (&block
, &vse
.pre
);
5425 gfc_add_block_to_block (&block
, &lse
.pre
);
5428 if (compare
&& else_branch
)
5430 tree var2
= create_tmp_var_raw (boolean_type_node
);
5431 DECL_CONTEXT (var2
) = current_function_decl
;
5432 comp_tgt
= build4 (TARGET_EXPR
, boolean_type_node
, var2
,
5433 boolean_false_node
, NULL
, NULL
);
5434 compare
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (var2
),
5436 TREE_OPERAND (compare
, 0) = comp_tgt
;
5437 compare
= omit_one_operand_loc (input_location
, boolean_type_node
,
5442 x
= build3_loc (input_location
, COND_EXPR
, type
, compare
,
5443 convert (type
, x
), lse
.expr
);
5445 if (aop
== OMP_ATOMIC
)
5447 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
5448 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
5449 OMP_ATOMIC_WEAK (x
) = atomic_code
->ext
.omp_clauses
->weak
;
5450 gfc_add_expr_to_block (&block
, x
);
5454 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
5455 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
5456 OMP_ATOMIC_WEAK (x
) = atomic_code
->ext
.omp_clauses
->weak
;
5457 if (compare
&& else_branch
)
5459 tree vtmp
= create_tmp_var_raw (TREE_TYPE (x
));
5460 DECL_CONTEXT (vtmp
) = current_function_decl
;
5461 x
= fold_build2_loc (input_location
, MODIFY_EXPR
,
5462 TREE_TYPE (vtmp
), vtmp
, x
);
5463 vtmp
= build4 (TARGET_EXPR
, TREE_TYPE (vtmp
), vtmp
,
5464 build_zero_cst (TREE_TYPE (vtmp
)), NULL
, NULL
);
5465 TREE_OPERAND (x
, 0) = vtmp
;
5466 tree x2
= convert (TREE_TYPE (vse
.expr
), vtmp
);
5467 x2
= fold_build2_loc (input_location
, MODIFY_EXPR
,
5468 TREE_TYPE (vse
.expr
), vse
.expr
, x2
);
5469 x2
= build3_loc (input_location
, COND_EXPR
, void_type_node
, comp_tgt
,
5471 x
= omit_one_operand_loc (input_location
, TREE_TYPE (x2
), x2
, x
);
5472 gfc_add_expr_to_block (&block
, x
);
5476 x
= convert (TREE_TYPE (vse
.expr
), x
);
5477 gfc_add_modify (&block
, vse
.expr
, x
);
5481 return gfc_finish_block (&block
);
5485 gfc_trans_omp_barrier (void)
5487 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
5488 return build_call_expr_loc (input_location
, decl
, 0);
5492 gfc_trans_omp_cancel (gfc_code
*code
)
5495 tree ifc
= boolean_true_node
;
5497 switch (code
->ext
.omp_clauses
->cancel
)
5499 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
5500 case OMP_CANCEL_DO
: mask
= 2; break;
5501 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
5502 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
5503 default: gcc_unreachable ();
5505 gfc_start_block (&block
);
5506 if (code
->ext
.omp_clauses
->if_expr
5507 || code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
])
5512 gcc_assert ((code
->ext
.omp_clauses
->if_expr
== NULL
)
5513 ^ (code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
] == NULL
));
5514 gfc_init_se (&se
, NULL
);
5515 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
!= NULL
5516 ? code
->ext
.omp_clauses
->if_expr
5517 : code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
]);
5518 gfc_add_block_to_block (&block
, &se
.pre
);
5519 if_var
= gfc_evaluate_now (se
.expr
, &block
);
5520 gfc_add_block_to_block (&block
, &se
.post
);
5521 tree type
= TREE_TYPE (if_var
);
5522 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
5523 boolean_type_node
, if_var
,
5524 build_zero_cst (type
));
5526 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
5527 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
5528 ifc
= fold_convert (c_bool_type
, ifc
);
5529 gfc_add_expr_to_block (&block
,
5530 build_call_expr_loc (input_location
, decl
, 2,
5531 build_int_cst (integer_type_node
,
5533 return gfc_finish_block (&block
);
5537 gfc_trans_omp_cancellation_point (gfc_code
*code
)
5540 switch (code
->ext
.omp_clauses
->cancel
)
5542 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
5543 case OMP_CANCEL_DO
: mask
= 2; break;
5544 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
5545 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
5546 default: gcc_unreachable ();
5548 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
5549 return build_call_expr_loc (input_location
, decl
, 1,
5550 build_int_cst (integer_type_node
, mask
));
5554 gfc_trans_omp_critical (gfc_code
*code
)
5557 tree stmt
, name
= NULL_TREE
;
5558 if (code
->ext
.omp_clauses
->critical_name
!= NULL
)
5559 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
5560 gfc_start_block (&block
);
5561 stmt
= make_node (OMP_CRITICAL
);
5562 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
5563 TREE_TYPE (stmt
) = void_type_node
;
5564 OMP_CRITICAL_BODY (stmt
) = gfc_trans_code (code
->block
->next
);
5565 OMP_CRITICAL_NAME (stmt
) = name
;
5566 OMP_CRITICAL_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
5567 code
->ext
.omp_clauses
,
5569 gfc_add_expr_to_block (&block
, stmt
);
5570 return gfc_finish_block (&block
);
5573 typedef struct dovar_init_d
{
5581 gfc_nonrect_loop_expr (stmtblock_t
*pblock
, gfc_se
*sep
, int loop_n
,
5582 gfc_code
*code
, gfc_expr
*expr
, vec
<dovar_init
> *inits
,
5583 int simple
, gfc_expr
*curr_loop_var
)
5586 for (i
= 0; i
< loop_n
; i
++)
5588 gcc_assert (code
->ext
.iterator
->var
->expr_type
== EXPR_VARIABLE
);
5589 if (gfc_find_sym_in_expr (code
->ext
.iterator
->var
->symtree
->n
.sym
, expr
))
5591 code
= code
->block
->next
;
5596 /* Canonical format: TREE_VEC with [var, multiplier, offset]. */
5597 gfc_symbol
*var
= code
->ext
.iterator
->var
->symtree
->n
.sym
;
5599 tree tree_var
= NULL_TREE
;
5600 tree a1
= integer_one_node
;
5601 tree a2
= integer_zero_node
;
5605 /* FIXME: Handle non-const iter steps, cf. PR fortran/110735. */
5606 sorry_at (gfc_get_location (&curr_loop_var
->where
),
5607 "non-rectangular loop nest with non-constant step for %qs",
5608 curr_loop_var
->symtree
->n
.sym
->name
);
5614 FOR_EACH_VEC_ELT (*inits
, ix
, di
)
5617 if (!di
->non_unit_iter
)
5619 tree_var
= di
->init
;
5620 gcc_assert (DECL_P (tree_var
));
5625 /* FIXME: Handle non-const iter steps, cf. PR fortran/110735. */
5626 sorry_at (gfc_get_location (&code
->loc
),
5627 "non-rectangular loop nest with non-constant step "
5628 "for %qs", var
->name
);
5629 inform (gfc_get_location (&expr
->where
), "Used here");
5633 if (tree_var
== NULL_TREE
)
5634 tree_var
= var
->backend_decl
;
5636 if (expr
->expr_type
== EXPR_VARIABLE
)
5637 gcc_assert (expr
->symtree
->n
.sym
== var
);
5638 else if (expr
->expr_type
!= EXPR_OP
5639 || (expr
->value
.op
.op
!= INTRINSIC_TIMES
5640 && expr
->value
.op
.op
!= INTRINSIC_PLUS
5641 && expr
->value
.op
.op
!= INTRINSIC_MINUS
))
5646 gfc_expr
*et
= NULL
, *eo
= NULL
, *e
= expr
;
5647 if (expr
->value
.op
.op
!= INTRINSIC_TIMES
)
5649 if (gfc_find_sym_in_expr (var
, expr
->value
.op
.op1
))
5651 e
= expr
->value
.op
.op1
;
5652 eo
= expr
->value
.op
.op2
;
5656 eo
= expr
->value
.op
.op1
;
5657 e
= expr
->value
.op
.op2
;
5660 if (e
->value
.op
.op
== INTRINSIC_TIMES
)
5662 if (e
->value
.op
.op1
->expr_type
== EXPR_VARIABLE
5663 && e
->value
.op
.op1
->symtree
->n
.sym
== var
)
5664 et
= e
->value
.op
.op2
;
5667 et
= e
->value
.op
.op1
;
5668 gcc_assert (e
->value
.op
.op2
->expr_type
== EXPR_VARIABLE
5669 && e
->value
.op
.op2
->symtree
->n
.sym
== var
);
5673 gcc_assert (e
->expr_type
== EXPR_VARIABLE
&& e
->symtree
->n
.sym
== var
);
5676 gfc_init_se (&se
, NULL
);
5677 gfc_conv_expr_val (&se
, et
);
5678 gfc_add_block_to_block (pblock
, &se
.pre
);
5683 gfc_init_se (&se
, NULL
);
5684 gfc_conv_expr_val (&se
, eo
);
5685 gfc_add_block_to_block (pblock
, &se
.pre
);
5687 if (expr
->value
.op
.op
== INTRINSIC_MINUS
&& expr
->value
.op
.op2
== eo
)
5688 /* outer-var - a2. */
5689 a2
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (a2
), a2
);
5690 else if (expr
->value
.op
.op
== INTRINSIC_MINUS
)
5691 /* a2 - outer-var. */
5692 a1
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (a1
), a1
);
5694 a1
= DECL_P (a1
) ? a1
: gfc_evaluate_now (a1
, pblock
);
5695 a2
= DECL_P (a2
) ? a2
: gfc_evaluate_now (a2
, pblock
);
5698 gfc_init_se (sep
, NULL
);
5699 sep
->expr
= make_tree_vec (3);
5700 TREE_VEC_ELT (sep
->expr
, 0) = tree_var
;
5701 TREE_VEC_ELT (sep
->expr
, 1) = fold_convert (TREE_TYPE (tree_var
), a1
);
5702 TREE_VEC_ELT (sep
->expr
, 2) = fold_convert (TREE_TYPE (tree_var
), a2
);
5708 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
5709 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
5712 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
5713 tree local_dovar
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
5716 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
5717 int i
, collapse
= clauses
->collapse
;
5718 vec
<dovar_init
> inits
= vNULL
;
5721 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
5722 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
5723 gfc_code
*orig_code
= code
;
5725 /* Both collapsed and tiled loops are lowered the same way. In
5726 OpenACC, those clauses are not compatible, so prioritize the tile
5727 clause, if present. */
5731 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
5735 doacross_steps
= NULL
;
5736 if (clauses
->orderedc
)
5737 collapse
= clauses
->orderedc
;
5741 code
= code
->block
->next
;
5742 gcc_assert (code
->op
== EXEC_DO
);
5744 init
= make_tree_vec (collapse
);
5745 cond
= make_tree_vec (collapse
);
5746 incr
= make_tree_vec (collapse
);
5747 orig_decls
= clauses
->ordered
? make_tree_vec (collapse
) : NULL_TREE
;
5751 gfc_start_block (&block
);
5755 /* simd schedule modifier is only useful for composite do simd and other
5756 constructs including that, where gfc_trans_omp_do is only called
5757 on the simd construct and DO's clauses are translated elsewhere. */
5758 do_clauses
->sched_simd
= false;
5760 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
5762 for (i
= 0; i
< collapse
; i
++)
5765 int dovar_found
= 0;
5770 gfc_omp_namelist
*n
= NULL
;
5771 if (op
== EXEC_OMP_SIMD
&& collapse
== 1)
5772 for (n
= clauses
->lists
[OMP_LIST_LINEAR
];
5773 n
!= NULL
; n
= n
->next
)
5774 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
5779 if (n
== NULL
&& op
!= EXEC_OMP_DISTRIBUTE
)
5780 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
];
5781 n
!= NULL
; n
= n
->next
)
5782 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
5788 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
5789 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
5796 /* Evaluate all the expressions in the iterator. */
5797 gfc_init_se (&se
, NULL
);
5798 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
5799 gfc_add_block_to_block (pblock
, &se
.pre
);
5800 local_dovar
= dovar_decl
= dovar
= se
.expr
;
5801 type
= TREE_TYPE (dovar
);
5802 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
5804 gfc_init_se (&se
, NULL
);
5805 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
5806 gfc_add_block_to_block (pblock
, &se
.pre
);
5807 step
= gfc_evaluate_now (se
.expr
, pblock
);
5809 if (TREE_CODE (step
) == INTEGER_CST
)
5810 simple
= tree_int_cst_sgn (step
);
5812 gfc_init_se (&se
, NULL
);
5813 if (!clauses
->non_rectangular
5814 || !gfc_nonrect_loop_expr (pblock
, &se
, i
, orig_code
->block
->next
,
5815 code
->ext
.iterator
->start
, &inits
, simple
,
5816 code
->ext
.iterator
->var
))
5818 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
5819 gfc_add_block_to_block (pblock
, &se
.pre
);
5820 if (!DECL_P (se
.expr
))
5821 se
.expr
= gfc_evaluate_now (se
.expr
, pblock
);
5825 gfc_init_se (&se
, NULL
);
5826 if (!clauses
->non_rectangular
5827 || !gfc_nonrect_loop_expr (pblock
, &se
, i
, orig_code
->block
->next
,
5828 code
->ext
.iterator
->end
, &inits
, simple
,
5829 code
->ext
.iterator
->var
))
5831 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
5832 gfc_add_block_to_block (pblock
, &se
.pre
);
5833 if (!DECL_P (se
.expr
))
5834 se
.expr
= gfc_evaluate_now (se
.expr
, pblock
);
5838 if (!DECL_P (dovar
))
5840 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
5842 if (simple
&& !DECL_P (dovar
))
5844 const char *name
= code
->ext
.iterator
->var
->symtree
->n
.sym
->name
;
5845 local_dovar
= gfc_create_var (type
, name
);
5846 dovar_init e
= {code
->ext
.iterator
->var
->symtree
->n
.sym
,
5847 dovar
, local_dovar
, false};
5848 inits
.safe_push (e
);
5853 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, local_dovar
, from
);
5854 /* The condition should not be folded. */
5855 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
5856 ? LE_EXPR
: GE_EXPR
,
5857 logical_type_node
, local_dovar
,
5859 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
5860 type
, local_dovar
, step
);
5861 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
5864 TREE_VEC_ELT (incr
, i
));
5865 if (orig_decls
&& !clauses
->orderedc
)
5867 else if (orig_decls
)
5868 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
5872 /* STEP is not 1 or -1. Use:
5873 for (count = 0; count < (to + step - from) / step; count++)
5875 dovar = from + count * step;
5879 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
5880 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
5881 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
5883 tmp
= gfc_evaluate_now (tmp
, pblock
);
5884 local_dovar
= gfc_create_var (type
, "count");
5885 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, local_dovar
,
5886 build_int_cst (type
, 0));
5887 /* The condition should not be folded. */
5888 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
5891 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
5893 build_int_cst (type
, 1));
5894 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
5897 TREE_VEC_ELT (incr
, i
));
5899 /* Initialize DOVAR. */
5900 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, local_dovar
,
5902 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
5903 dovar_init e
= {code
->ext
.iterator
->var
->symtree
->n
.sym
,
5905 inits
.safe_push (e
);
5906 if (clauses
->orderedc
)
5908 if (doacross_steps
== NULL
)
5909 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
, true);
5910 (*doacross_steps
)[i
] = step
;
5913 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
5916 if (dovar_found
== 3
5917 && op
== EXEC_OMP_SIMD
5919 && local_dovar
!= dovar
)
5921 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
5922 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
5923 && OMP_CLAUSE_DECL (tmp
) == dovar
)
5925 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5929 if (!dovar_found
&& op
== EXEC_OMP_SIMD
)
5933 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5934 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
5935 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5936 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5937 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5938 if (local_dovar
!= dovar
)
5942 else if (!dovar_found
&& local_dovar
!= dovar
)
5944 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5945 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5946 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5948 if (dovar_found
> 1)
5953 if (local_dovar
!= dovar
)
5955 /* If dovar is lastprivate, but different counter is used,
5956 dovar += step needs to be added to
5957 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
5958 will have the value on entry of the last loop, rather
5959 than value after iterator increment. */
5960 if (clauses
->orderedc
)
5962 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
5965 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
5967 build_one_cst (type
));
5968 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
5970 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5974 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5976 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
5978 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5979 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5980 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5982 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
5985 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5986 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5988 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
5992 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
5994 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5995 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5996 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5998 tree l
= build_omp_clause (input_location
,
5999 OMP_CLAUSE_LASTPRIVATE
);
6000 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6001 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (l
) = 1;
6002 OMP_CLAUSE_DECL (l
) = dovar_decl
;
6003 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
6004 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
6006 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
6010 gcc_assert (local_dovar
== dovar
|| c
!= NULL
);
6012 if (local_dovar
!= dovar
)
6014 if (op
!= EXEC_OMP_SIMD
|| dovar_found
== 1)
6015 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
6016 else if (collapse
== 1)
6018 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
6019 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
6020 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
6021 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
6024 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
6025 OMP_CLAUSE_DECL (tmp
) = local_dovar
;
6026 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
6029 if (i
+ 1 < collapse
)
6030 code
= code
->block
->next
;
6033 if (pblock
!= &block
)
6036 gfc_start_block (&block
);
6039 gfc_start_block (&body
);
6041 FOR_EACH_VEC_ELT (inits
, ix
, di
)
6042 gfc_add_modify (&body
, di
->var
, di
->init
);
6045 /* Cycle statement is implemented with a goto. Exit statement must not be
6046 present for this loop. */
6047 cycle_label
= gfc_build_label_decl (NULL_TREE
);
6049 /* Put these labels where they can be found later. */
6051 code
->cycle_label
= cycle_label
;
6052 code
->exit_label
= NULL_TREE
;
6054 /* Main loop body. */
6055 if (clauses
->lists
[OMP_LIST_REDUCTION_INSCAN
])
6057 gfc_code
*code1
, *scan
, *code2
, *tmpcode
;
6058 code1
= tmpcode
= code
->block
->next
;
6059 if (tmpcode
&& tmpcode
->op
!= EXEC_OMP_SCAN
)
6060 while (tmpcode
&& tmpcode
->next
&& tmpcode
->next
->op
!= EXEC_OMP_SCAN
)
6061 tmpcode
= tmpcode
->next
;
6062 scan
= tmpcode
->op
== EXEC_OMP_SCAN
? tmpcode
: tmpcode
->next
;
6064 tmpcode
->next
= NULL
;
6066 gcc_assert (scan
->op
== EXEC_OMP_SCAN
);
6067 location_t loc
= gfc_get_location (&scan
->loc
);
6069 tmp
= code1
!= scan
? gfc_trans_code (code1
) : build_empty_stmt (loc
);
6070 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, NULL_TREE
);
6071 SET_EXPR_LOCATION (tmp
, loc
);
6072 gfc_add_expr_to_block (&body
, tmp
);
6073 input_location
= loc
;
6074 tree c
= gfc_trans_omp_clauses (&body
, scan
->ext
.omp_clauses
, scan
->loc
);
6075 tmp
= code2
? gfc_trans_code (code2
) : build_empty_stmt (loc
);
6076 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, c
);
6077 SET_EXPR_LOCATION (tmp
, loc
);
6079 tmpcode
->next
= scan
;
6082 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
6083 gfc_add_expr_to_block (&body
, tmp
);
6085 /* Label for cycle statements (if needed). */
6086 if (TREE_USED (cycle_label
))
6088 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
6089 gfc_add_expr_to_block (&body
, tmp
);
6092 /* End of loop body. */
6095 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
6096 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
6097 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
6098 case EXEC_OMP_LOOP
: stmt
= make_node (OMP_LOOP
); break;
6099 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
6100 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
6101 default: gcc_unreachable ();
6104 SET_EXPR_LOCATION (stmt
, gfc_get_location (&orig_code
->loc
));
6105 TREE_TYPE (stmt
) = void_type_node
;
6106 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
6107 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
6108 OMP_FOR_INIT (stmt
) = init
;
6109 OMP_FOR_COND (stmt
) = cond
;
6110 OMP_FOR_INCR (stmt
) = incr
;
6112 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
6113 OMP_FOR_NON_RECTANGULAR (stmt
) = clauses
->non_rectangular
;
6114 gfc_add_expr_to_block (&block
, stmt
);
6116 vec_free (doacross_steps
);
6117 doacross_steps
= saved_doacross_steps
;
6119 return gfc_finish_block (&block
);
6122 /* Translate combined OpenACC 'parallel loop', 'kernels loop', 'serial loop'
6126 gfc_trans_oacc_combined_directive (gfc_code
*code
)
6128 stmtblock_t block
, *pblock
= NULL
;
6129 gfc_omp_clauses construct_clauses
, loop_clauses
;
6130 tree stmt
, oacc_clauses
= NULL_TREE
;
6131 enum tree_code construct_code
;
6132 location_t loc
= input_location
;
6136 case EXEC_OACC_PARALLEL_LOOP
:
6137 construct_code
= OACC_PARALLEL
;
6139 case EXEC_OACC_KERNELS_LOOP
:
6140 construct_code
= OACC_KERNELS
;
6142 case EXEC_OACC_SERIAL_LOOP
:
6143 construct_code
= OACC_SERIAL
;
6149 gfc_start_block (&block
);
6151 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
6152 if (code
->ext
.omp_clauses
!= NULL
)
6154 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
6155 sizeof (construct_clauses
));
6156 loop_clauses
.collapse
= construct_clauses
.collapse
;
6157 loop_clauses
.gang
= construct_clauses
.gang
;
6158 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
6159 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
6160 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
6161 loop_clauses
.vector
= construct_clauses
.vector
;
6162 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
6163 loop_clauses
.worker
= construct_clauses
.worker
;
6164 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
6165 loop_clauses
.seq
= construct_clauses
.seq
;
6166 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
6167 loop_clauses
.independent
= construct_clauses
.independent
;
6168 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
6169 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
6170 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
6171 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
6172 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
6173 construct_clauses
.gang
= false;
6174 construct_clauses
.gang_static
= false;
6175 construct_clauses
.gang_num_expr
= NULL
;
6176 construct_clauses
.gang_static_expr
= NULL
;
6177 construct_clauses
.vector
= false;
6178 construct_clauses
.vector_expr
= NULL
;
6179 construct_clauses
.worker
= false;
6180 construct_clauses
.worker_expr
= NULL
;
6181 construct_clauses
.seq
= false;
6182 construct_clauses
.par_auto
= false;
6183 construct_clauses
.independent
= false;
6184 construct_clauses
.independent
= false;
6185 construct_clauses
.tile_list
= NULL
;
6186 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
6187 if (construct_code
== OACC_KERNELS
)
6188 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
6189 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
6190 code
->loc
, false, true);
6192 if (!loop_clauses
.seq
)
6196 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
6197 protected_set_expr_location (stmt
, loc
);
6198 if (TREE_CODE (stmt
) != BIND_EXPR
)
6199 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6202 stmt
= build2_loc (loc
, construct_code
, void_type_node
, stmt
, oacc_clauses
);
6203 gfc_add_expr_to_block (&block
, stmt
);
6204 return gfc_finish_block (&block
);
6208 gfc_trans_omp_depobj (gfc_code
*code
)
6212 gfc_init_se (&se
, NULL
);
6213 gfc_init_block (&block
);
6214 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->depobj
);
6215 gcc_assert (se
.pre
.head
== NULL
&& se
.post
.head
== NULL
);
6216 tree depobj
= se
.expr
;
6217 location_t loc
= EXPR_LOCATION (depobj
);
6218 if (!POINTER_TYPE_P (TREE_TYPE (depobj
)))
6219 depobj
= gfc_build_addr_expr (NULL
, depobj
);
6220 depobj
= fold_convert (build_pointer_type_for_mode (ptr_type_node
,
6221 TYPE_MODE (ptr_type_node
),
6223 gfc_omp_namelist
*n
= code
->ext
.omp_clauses
->lists
[OMP_LIST_DEPEND
];
6227 if (!n
->sym
) /* omp_all_memory. */
6228 var
= null_pointer_node
;
6229 else if (n
->expr
&& n
->expr
->ref
->u
.ar
.type
!= AR_FULL
)
6231 gfc_init_se (&se
, NULL
);
6232 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
6234 gfc_conv_expr_reference (&se
, n
->expr
);
6239 gfc_conv_expr_descriptor (&se
, n
->expr
);
6240 var
= gfc_conv_array_data (se
.expr
);
6242 gfc_add_block_to_block (&block
, &se
.pre
);
6243 gfc_add_block_to_block (&block
, &se
.post
);
6244 gcc_assert (POINTER_TYPE_P (TREE_TYPE (var
)));
6248 var
= gfc_get_symbol_decl (n
->sym
);
6249 if (POINTER_TYPE_P (TREE_TYPE (var
))
6250 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (var
))))
6251 var
= build_fold_indirect_ref (var
);
6252 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (var
)))
6254 var
= gfc_conv_descriptor_data_get (var
);
6255 gcc_assert (POINTER_TYPE_P (TREE_TYPE (var
)));
6257 else if ((n
->sym
->attr
.allocatable
|| n
->sym
->attr
.pointer
)
6258 && n
->sym
->attr
.dummy
)
6259 var
= build_fold_indirect_ref (var
);
6260 else if (!POINTER_TYPE_P (TREE_TYPE (var
))
6261 || (n
->sym
->ts
.f90_type
== BT_VOID
6262 && !POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (var
)))
6263 && !GFC_ARRAY_TYPE_P (TREE_TYPE (TREE_TYPE (var
)))))
6265 TREE_ADDRESSABLE (var
) = 1;
6266 var
= gfc_build_addr_expr (NULL
, var
);
6269 depobj
= save_expr (depobj
);
6270 tree r
= build_fold_indirect_ref_loc (loc
, depobj
);
6271 gfc_add_expr_to_block (&block
,
6272 build2 (MODIFY_EXPR
, void_type_node
, r
, var
));
6275 /* Only one may be set. */
6276 gcc_assert (((int)(n
!= NULL
) + (int)(code
->ext
.omp_clauses
->destroy
)
6277 + (int)(code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
))
6279 int k
= -1; /* omp_clauses->destroy */
6280 if (!code
->ext
.omp_clauses
->destroy
)
6281 switch (code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
6282 ? code
->ext
.omp_clauses
->depobj_update
: n
->u
.depend_doacross_op
)
6284 case OMP_DEPEND_IN
: k
= GOMP_DEPEND_IN
; break;
6285 case OMP_DEPEND_OUT
: k
= GOMP_DEPEND_OUT
; break;
6286 case OMP_DEPEND_INOUT
: k
= GOMP_DEPEND_INOUT
; break;
6287 case OMP_DEPEND_INOUTSET
: k
= GOMP_DEPEND_INOUTSET
; break;
6288 case OMP_DEPEND_MUTEXINOUTSET
: k
= GOMP_DEPEND_MUTEXINOUTSET
; break;
6289 default: gcc_unreachable ();
6291 tree t
= build_int_cst (ptr_type_node
, k
);
6292 depobj
= build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (depobj
), depobj
,
6293 TYPE_SIZE_UNIT (ptr_type_node
));
6294 depobj
= build_fold_indirect_ref_loc (loc
, depobj
);
6295 gfc_add_expr_to_block (&block
, build2 (MODIFY_EXPR
, void_type_node
, depobj
, t
));
6297 return gfc_finish_block (&block
);
6301 gfc_trans_omp_error (gfc_code
*code
)
6306 bool fatal
= code
->ext
.omp_clauses
->severity
== OMP_SEVERITY_FATAL
;
6307 tree fndecl
= builtin_decl_explicit (fatal
? BUILT_IN_GOMP_ERROR
6308 : BUILT_IN_GOMP_WARNING
);
6309 gfc_start_block (&block
);
6310 gfc_init_se (&se
, NULL
);
6311 if (!code
->ext
.omp_clauses
->message
)
6313 message
= null_pointer_node
;
6314 len
= build_int_cst (size_type_node
, 0);
6318 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->message
);
6320 if (!POINTER_TYPE_P (TREE_TYPE (message
)))
6321 /* To ensure an ARRAY_TYPE is not passed as such. */
6322 message
= gfc_build_addr_expr (NULL
, message
);
6323 len
= se
.string_length
;
6325 gfc_add_block_to_block (&block
, &se
.pre
);
6326 gfc_add_expr_to_block (&block
, build_call_expr_loc (input_location
, fndecl
,
6328 gfc_add_block_to_block (&block
, &se
.post
);
6329 return gfc_finish_block (&block
);
6333 gfc_trans_omp_flush (gfc_code
*code
)
6336 if (!code
->ext
.omp_clauses
6337 || code
->ext
.omp_clauses
->memorder
== OMP_MEMORDER_UNSET
6338 || code
->ext
.omp_clauses
->memorder
== OMP_MEMORDER_SEQ_CST
)
6340 call
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
6341 call
= build_call_expr_loc (input_location
, call
, 0);
6345 enum memmodel mo
= MEMMODEL_LAST
;
6346 switch (code
->ext
.omp_clauses
->memorder
)
6348 case OMP_MEMORDER_ACQ_REL
: mo
= MEMMODEL_ACQ_REL
; break;
6349 case OMP_MEMORDER_RELEASE
: mo
= MEMMODEL_RELEASE
; break;
6350 case OMP_MEMORDER_ACQUIRE
: mo
= MEMMODEL_ACQUIRE
; break;
6351 default: gcc_unreachable (); break;
6353 call
= builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE
);
6354 call
= build_call_expr_loc (input_location
, call
, 1,
6355 build_int_cst (integer_type_node
, mo
));
6361 gfc_trans_omp_master (gfc_code
*code
)
6363 tree stmt
= gfc_trans_code (code
->block
->next
);
6364 if (IS_EMPTY_STMT (stmt
))
6366 return build1_v (OMP_MASTER
, stmt
);
6370 gfc_trans_omp_masked (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6373 tree body
= gfc_trans_code (code
->block
->next
);
6374 if (IS_EMPTY_STMT (body
))
6377 clauses
= code
->ext
.omp_clauses
;
6378 gfc_start_block (&block
);
6379 tree omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
6380 tree stmt
= make_node (OMP_MASKED
);
6381 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
6382 TREE_TYPE (stmt
) = void_type_node
;
6383 OMP_MASKED_BODY (stmt
) = body
;
6384 OMP_MASKED_CLAUSES (stmt
) = omp_clauses
;
6385 gfc_add_expr_to_block (&block
, stmt
);
6386 return gfc_finish_block (&block
);
6391 gfc_trans_omp_ordered (gfc_code
*code
)
6395 if (!code
->ext
.omp_clauses
->simd
)
6396 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
6397 code
->ext
.omp_clauses
->threads
= 0;
6399 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
6401 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
6402 code
->block
? gfc_trans_code (code
->block
->next
)
6403 : NULL_TREE
, omp_clauses
);
6407 gfc_trans_omp_parallel (gfc_code
*code
)
6410 tree stmt
, omp_clauses
;
6412 gfc_start_block (&block
);
6413 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6416 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6417 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6418 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
6420 gfc_add_expr_to_block (&block
, stmt
);
6421 return gfc_finish_block (&block
);
6428 GFC_OMP_SPLIT_PARALLEL
,
6429 GFC_OMP_SPLIT_DISTRIBUTE
,
6430 GFC_OMP_SPLIT_TEAMS
,
6431 GFC_OMP_SPLIT_TARGET
,
6432 GFC_OMP_SPLIT_TASKLOOP
,
6433 GFC_OMP_SPLIT_MASKED
,
6439 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
6440 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
6441 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
6442 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
6443 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
6444 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
6445 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
),
6446 GFC_OMP_MASK_MASKED
= (1 << GFC_OMP_SPLIT_MASKED
)
6449 /* If a var is in lastprivate/firstprivate/reduction but not in a
6450 data mapping/sharing clause, add it to 'map(tofrom:' if is_target
6451 and to 'shared' otherwise. */
6453 gfc_add_clause_implicitly (gfc_omp_clauses
*clauses_out
,
6454 gfc_omp_clauses
*clauses_in
,
6455 bool is_target
, bool is_parallel_do
)
6457 int clauselist_to_add
= is_target
? OMP_LIST_MAP
: OMP_LIST_SHARED
;
6458 gfc_omp_namelist
*tail
= NULL
;
6459 for (int i
= 0; i
< 5; ++i
)
6461 gfc_omp_namelist
*n
;
6464 case 0: n
= clauses_in
->lists
[OMP_LIST_FIRSTPRIVATE
]; break;
6465 case 1: n
= clauses_in
->lists
[OMP_LIST_LASTPRIVATE
]; break;
6466 case 2: n
= clauses_in
->lists
[OMP_LIST_REDUCTION
]; break;
6467 case 3: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_INSCAN
]; break;
6468 case 4: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_TASK
]; break;
6469 default: gcc_unreachable ();
6471 for (; n
!= NULL
; n
= n
->next
)
6473 gfc_omp_namelist
*n2
, **n_firstp
= NULL
, **n_lastp
= NULL
;
6474 for (int j
= 0; j
< 6; ++j
)
6476 gfc_omp_namelist
**n2ref
= NULL
, *prev2
= NULL
;
6480 n2ref
= &clauses_out
->lists
[clauselist_to_add
];
6483 n2ref
= &clauses_out
->lists
[OMP_LIST_FIRSTPRIVATE
];
6487 n2ref
= &clauses_in
->lists
[OMP_LIST_LASTPRIVATE
];
6489 n2ref
= &clauses_out
->lists
[OMP_LIST_LASTPRIVATE
];
6491 case 3: n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION
]; break;
6493 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_INSCAN
];
6496 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_TASK
];
6498 default: gcc_unreachable ();
6500 for (n2
= *n2ref
; n2
!= NULL
; prev2
= n2
, n2
= n2
->next
)
6501 if (n2
->sym
== n
->sym
)
6505 if (j
== 0 /* clauselist_to_add */)
6506 break; /* Already present. */
6507 if (j
== 1 /* OMP_LIST_FIRSTPRIVATE */)
6509 n_firstp
= prev2
? &prev2
->next
: n2ref
;
6512 if (j
== 2 /* OMP_LIST_LASTPRIVATE */)
6514 n_lastp
= prev2
? &prev2
->next
: n2ref
;
6520 if (n_firstp
&& n_lastp
)
6522 /* For parallel do, GCC puts firstprivate/lastprivate
6526 *n_firstp
= (*n_firstp
)->next
;
6528 *n_lastp
= (*n_lastp
)->next
;
6530 else if (is_target
&& n_lastp
)
6532 else if (n2
|| n_firstp
|| n_lastp
)
6534 if (clauses_out
->lists
[clauselist_to_add
]
6535 && (clauses_out
->lists
[clauselist_to_add
]
6536 == clauses_in
->lists
[clauselist_to_add
]))
6538 gfc_omp_namelist
*p
= NULL
;
6539 for (n2
= clauses_in
->lists
[clauselist_to_add
]; n2
; n2
= n2
->next
)
6543 p
->next
= gfc_get_omp_namelist ();
6548 p
= gfc_get_omp_namelist ();
6549 clauses_out
->lists
[clauselist_to_add
] = p
;
6556 tail
= clauses_out
->lists
[clauselist_to_add
];
6557 for (; tail
&& tail
->next
; tail
= tail
->next
)
6560 n2
= gfc_get_omp_namelist ();
6561 n2
->where
= n
->where
;
6564 n2
->u
.map_op
= OMP_MAP_TOFROM
;
6571 clauses_out
->lists
[clauselist_to_add
] = n2
;
6576 /* Kind of opposite to above, add firstprivate to CLAUSES_OUT if it is mapped
6577 in CLAUSES_IN's FIRSTPRIVATE list but not its MAP list. */
6580 gfc_add_firstprivate_if_unmapped (gfc_omp_clauses
*clauses_out
,
6581 gfc_omp_clauses
*clauses_in
)
6583 gfc_omp_namelist
*n
= clauses_in
->lists
[OMP_LIST_FIRSTPRIVATE
];
6584 gfc_omp_namelist
**tail
= NULL
;
6586 for (; n
!= NULL
; n
= n
->next
)
6588 gfc_omp_namelist
*n2
= clauses_out
->lists
[OMP_LIST_MAP
];
6589 for (; n2
!= NULL
; n2
= n2
->next
)
6590 if (n
->sym
== n2
->sym
)
6594 gfc_omp_namelist
*dup
= gfc_get_omp_namelist ();
6599 tail
= &clauses_out
->lists
[OMP_LIST_FIRSTPRIVATE
];
6600 while (*tail
&& (*tail
)->next
)
6601 tail
= &(*tail
)->next
;
6604 tail
= &(*tail
)->next
;
6610 gfc_free_split_omp_clauses (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
6612 for (int i
= 0; i
< GFC_OMP_SPLIT_NUM
; ++i
)
6613 for (int j
= 0; j
< OMP_LIST_NUM
; ++j
)
6614 if (clausesa
[i
].lists
[j
] && clausesa
[i
].lists
[j
] != code
->ext
.omp_clauses
->lists
[j
])
6615 for (gfc_omp_namelist
*n
= clausesa
[i
].lists
[j
]; n
;)
6617 gfc_omp_namelist
*p
= n
;
6624 gfc_split_omp_clauses (gfc_code
*code
,
6625 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
6627 int mask
= 0, innermost
= 0;
6628 bool is_loop
= false;
6629 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
6632 case EXEC_OMP_DISTRIBUTE
:
6633 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
6635 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
6636 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6637 innermost
= GFC_OMP_SPLIT_DO
;
6639 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
6640 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
6641 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6642 innermost
= GFC_OMP_SPLIT_SIMD
;
6644 case EXEC_OMP_DISTRIBUTE_SIMD
:
6645 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
6646 innermost
= GFC_OMP_SPLIT_SIMD
;
6650 innermost
= GFC_OMP_SPLIT_DO
;
6652 case EXEC_OMP_DO_SIMD
:
6653 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6654 innermost
= GFC_OMP_SPLIT_SIMD
;
6656 case EXEC_OMP_PARALLEL
:
6657 innermost
= GFC_OMP_SPLIT_PARALLEL
;
6659 case EXEC_OMP_PARALLEL_DO
:
6660 case EXEC_OMP_PARALLEL_LOOP
:
6661 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6662 innermost
= GFC_OMP_SPLIT_DO
;
6664 case EXEC_OMP_PARALLEL_DO_SIMD
:
6665 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6666 innermost
= GFC_OMP_SPLIT_SIMD
;
6668 case EXEC_OMP_PARALLEL_MASKED
:
6669 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
;
6670 innermost
= GFC_OMP_SPLIT_MASKED
;
6672 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
6673 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
6674 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
6675 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6677 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
6678 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6679 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6681 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
6682 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
6683 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
6684 innermost
= GFC_OMP_SPLIT_SIMD
;
6686 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
6687 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6688 innermost
= GFC_OMP_SPLIT_SIMD
;
6691 innermost
= GFC_OMP_SPLIT_SIMD
;
6693 case EXEC_OMP_TARGET
:
6694 innermost
= GFC_OMP_SPLIT_TARGET
;
6696 case EXEC_OMP_TARGET_PARALLEL
:
6697 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
6698 innermost
= GFC_OMP_SPLIT_PARALLEL
;
6700 case EXEC_OMP_TARGET_PARALLEL_DO
:
6701 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
6702 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6703 innermost
= GFC_OMP_SPLIT_DO
;
6705 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
6706 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
6707 | GFC_OMP_MASK_SIMD
;
6708 innermost
= GFC_OMP_SPLIT_SIMD
;
6710 case EXEC_OMP_TARGET_SIMD
:
6711 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
6712 innermost
= GFC_OMP_SPLIT_SIMD
;
6714 case EXEC_OMP_TARGET_TEAMS
:
6715 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
6716 innermost
= GFC_OMP_SPLIT_TEAMS
;
6718 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
6719 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
6720 | GFC_OMP_MASK_DISTRIBUTE
;
6721 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
6723 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6724 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6725 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6726 innermost
= GFC_OMP_SPLIT_DO
;
6728 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6729 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6730 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6731 innermost
= GFC_OMP_SPLIT_SIMD
;
6733 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
6734 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
6735 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
6736 innermost
= GFC_OMP_SPLIT_SIMD
;
6738 case EXEC_OMP_TARGET_TEAMS_LOOP
:
6739 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
6740 innermost
= GFC_OMP_SPLIT_DO
;
6742 case EXEC_OMP_MASKED_TASKLOOP
:
6743 mask
= GFC_OMP_MASK_MASKED
| GFC_OMP_MASK_TASKLOOP
;
6744 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6746 case EXEC_OMP_MASTER_TASKLOOP
:
6747 case EXEC_OMP_TASKLOOP
:
6748 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
6750 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
6751 mask
= GFC_OMP_MASK_MASKED
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6752 innermost
= GFC_OMP_SPLIT_SIMD
;
6754 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
6755 case EXEC_OMP_TASKLOOP_SIMD
:
6756 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
6757 innermost
= GFC_OMP_SPLIT_SIMD
;
6759 case EXEC_OMP_TEAMS
:
6760 innermost
= GFC_OMP_SPLIT_TEAMS
;
6762 case EXEC_OMP_TEAMS_DISTRIBUTE
:
6763 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
6764 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
6766 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6767 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6768 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
6769 innermost
= GFC_OMP_SPLIT_DO
;
6771 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6772 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
6773 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
6774 innermost
= GFC_OMP_SPLIT_SIMD
;
6776 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
6777 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
6778 innermost
= GFC_OMP_SPLIT_SIMD
;
6780 case EXEC_OMP_TEAMS_LOOP
:
6781 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
6782 innermost
= GFC_OMP_SPLIT_DO
;
6789 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
6792 /* Loops are similar to DO but still a bit different. */
6796 case EXEC_OMP_PARALLEL_LOOP
:
6797 case EXEC_OMP_TEAMS_LOOP
:
6798 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
6799 case EXEC_OMP_TARGET_TEAMS_LOOP
:
6804 if (code
->ext
.omp_clauses
!= NULL
)
6806 if (mask
& GFC_OMP_MASK_TARGET
)
6808 /* First the clauses that are unique to some constructs. */
6809 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
6810 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
6811 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
6812 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
6813 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_HAS_DEVICE_ADDR
]
6814 = code
->ext
.omp_clauses
->lists
[OMP_LIST_HAS_DEVICE_ADDR
];
6815 clausesa
[GFC_OMP_SPLIT_TARGET
].device
6816 = code
->ext
.omp_clauses
->device
;
6817 clausesa
[GFC_OMP_SPLIT_TARGET
].thread_limit
6818 = code
->ext
.omp_clauses
->thread_limit
;
6819 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_USES_ALLOCATORS
]
6820 = code
->ext
.omp_clauses
->lists
[OMP_LIST_USES_ALLOCATORS
];
6821 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
6822 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
[i
]
6823 = code
->ext
.omp_clauses
->defaultmap
[i
];
6824 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
6825 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
6826 /* And this is copied to all. */
6827 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
6828 = code
->ext
.omp_clauses
->if_expr
;
6829 clausesa
[GFC_OMP_SPLIT_TARGET
].self_expr
6830 = code
->ext
.omp_clauses
->self_expr
;
6831 clausesa
[GFC_OMP_SPLIT_TARGET
].nowait
6832 = code
->ext
.omp_clauses
->nowait
;
6834 if (mask
& GFC_OMP_MASK_TEAMS
)
6836 /* First the clauses that are unique to some constructs. */
6837 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_lower
6838 = code
->ext
.omp_clauses
->num_teams_lower
;
6839 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
6840 = code
->ext
.omp_clauses
->num_teams_upper
;
6841 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
6842 = code
->ext
.omp_clauses
->thread_limit
;
6843 /* Shared and default clauses are allowed on parallel, teams
6845 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
6846 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
6847 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
6848 = code
->ext
.omp_clauses
->default_sharing
;
6850 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6852 /* First the clauses that are unique to some constructs. */
6853 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
6854 = code
->ext
.omp_clauses
->dist_sched_kind
;
6855 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
6856 = code
->ext
.omp_clauses
->dist_chunk_size
;
6857 /* Duplicate collapse. */
6858 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
6859 = code
->ext
.omp_clauses
->collapse
;
6860 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_concurrent
6861 = code
->ext
.omp_clauses
->order_concurrent
;
6862 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_unconstrained
6863 = code
->ext
.omp_clauses
->order_unconstrained
;
6864 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_reproducible
6865 = code
->ext
.omp_clauses
->order_reproducible
;
6867 if (mask
& GFC_OMP_MASK_PARALLEL
)
6869 /* First the clauses that are unique to some constructs. */
6870 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
6871 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
6872 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
6873 = code
->ext
.omp_clauses
->num_threads
;
6874 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
6875 = code
->ext
.omp_clauses
->proc_bind
;
6876 /* Shared and default clauses are allowed on parallel, teams
6878 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
6879 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
6880 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
6881 = code
->ext
.omp_clauses
->default_sharing
;
6882 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
6883 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
6884 /* And this is copied to all. */
6885 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
6886 = code
->ext
.omp_clauses
->if_expr
;
6888 if (mask
& GFC_OMP_MASK_MASKED
)
6889 clausesa
[GFC_OMP_SPLIT_MASKED
].filter
= code
->ext
.omp_clauses
->filter
;
6890 if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
6892 /* First the clauses that are unique to some constructs. */
6893 clausesa
[GFC_OMP_SPLIT_DO
].ordered
6894 = code
->ext
.omp_clauses
->ordered
;
6895 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
6896 = code
->ext
.omp_clauses
->orderedc
;
6897 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
6898 = code
->ext
.omp_clauses
->sched_kind
;
6899 if (innermost
== GFC_OMP_SPLIT_SIMD
)
6900 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
6901 = code
->ext
.omp_clauses
->sched_simd
;
6902 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
6903 = code
->ext
.omp_clauses
->sched_monotonic
;
6904 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
6905 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
6906 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
6907 = code
->ext
.omp_clauses
->chunk_size
;
6908 clausesa
[GFC_OMP_SPLIT_DO
].nowait
6909 = code
->ext
.omp_clauses
->nowait
;
6911 if (mask
& GFC_OMP_MASK_DO
)
6913 clausesa
[GFC_OMP_SPLIT_DO
].bind
6914 = code
->ext
.omp_clauses
->bind
;
6915 /* Duplicate collapse. */
6916 clausesa
[GFC_OMP_SPLIT_DO
].collapse
6917 = code
->ext
.omp_clauses
->collapse
;
6918 clausesa
[GFC_OMP_SPLIT_DO
].order_concurrent
6919 = code
->ext
.omp_clauses
->order_concurrent
;
6920 clausesa
[GFC_OMP_SPLIT_DO
].order_unconstrained
6921 = code
->ext
.omp_clauses
->order_unconstrained
;
6922 clausesa
[GFC_OMP_SPLIT_DO
].order_reproducible
6923 = code
->ext
.omp_clauses
->order_reproducible
;
6925 if (mask
& GFC_OMP_MASK_SIMD
)
6927 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
6928 = code
->ext
.omp_clauses
->safelen_expr
;
6929 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
6930 = code
->ext
.omp_clauses
->simdlen_expr
;
6931 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
6932 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
6933 /* Duplicate collapse. */
6934 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
6935 = code
->ext
.omp_clauses
->collapse
;
6936 clausesa
[GFC_OMP_SPLIT_SIMD
].if_exprs
[OMP_IF_SIMD
]
6937 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_SIMD
];
6938 clausesa
[GFC_OMP_SPLIT_SIMD
].order_concurrent
6939 = code
->ext
.omp_clauses
->order_concurrent
;
6940 clausesa
[GFC_OMP_SPLIT_SIMD
].order_unconstrained
6941 = code
->ext
.omp_clauses
->order_unconstrained
;
6942 clausesa
[GFC_OMP_SPLIT_SIMD
].order_reproducible
6943 = code
->ext
.omp_clauses
->order_reproducible
;
6944 /* And this is copied to all. */
6945 clausesa
[GFC_OMP_SPLIT_SIMD
].if_expr
6946 = code
->ext
.omp_clauses
->if_expr
;
6948 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6950 /* First the clauses that are unique to some constructs. */
6951 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
6952 = code
->ext
.omp_clauses
->nogroup
;
6953 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
6954 = code
->ext
.omp_clauses
->grainsize
;
6955 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize_strict
6956 = code
->ext
.omp_clauses
->grainsize_strict
;
6957 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
6958 = code
->ext
.omp_clauses
->num_tasks
;
6959 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks_strict
6960 = code
->ext
.omp_clauses
->num_tasks_strict
;
6961 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
6962 = code
->ext
.omp_clauses
->priority
;
6963 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
6964 = code
->ext
.omp_clauses
->final_expr
;
6965 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
6966 = code
->ext
.omp_clauses
->untied
;
6967 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
6968 = code
->ext
.omp_clauses
->mergeable
;
6969 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
6970 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
6971 /* And this is copied to all. */
6972 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
6973 = code
->ext
.omp_clauses
->if_expr
;
6974 /* Shared and default clauses are allowed on parallel, teams
6976 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
6977 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
6978 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
6979 = code
->ext
.omp_clauses
->default_sharing
;
6980 /* Duplicate collapse. */
6981 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
6982 = code
->ext
.omp_clauses
->collapse
;
6984 /* Private clause is supported on all constructs but master/masked,
6985 it is enough to put it on the innermost one except for master/masked. For
6986 !$ omp parallel do put it on parallel though,
6987 as that's what we did for OpenMP 3.1. */
6988 clausesa
[((innermost
== GFC_OMP_SPLIT_DO
&& !is_loop
)
6989 || code
->op
== EXEC_OMP_PARALLEL_MASTER
6990 || code
->op
== EXEC_OMP_PARALLEL_MASKED
)
6991 ? (int) GFC_OMP_SPLIT_PARALLEL
6992 : innermost
].lists
[OMP_LIST_PRIVATE
]
6993 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
6994 /* Firstprivate clause is supported on all constructs but
6995 simd and masked/master. Put it on the outermost of those and duplicate
6996 on parallel and teams. */
6997 if (mask
& GFC_OMP_MASK_TARGET
)
6998 gfc_add_firstprivate_if_unmapped (&clausesa
[GFC_OMP_SPLIT_TARGET
],
6999 code
->ext
.omp_clauses
);
7000 if (mask
& GFC_OMP_MASK_TEAMS
)
7001 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
7002 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
7003 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
7004 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
7005 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
7006 if (mask
& GFC_OMP_MASK_TASKLOOP
)
7007 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_FIRSTPRIVATE
]
7008 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
7009 if ((mask
& GFC_OMP_MASK_PARALLEL
)
7010 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
7011 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
7012 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
7013 else if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
7014 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
7015 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
7016 /* Lastprivate is allowed on distribute, do, simd, taskloop and loop.
7017 In parallel do{, simd} we actually want to put it on
7018 parallel rather than do. */
7019 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
7020 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
7021 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
7022 if (mask
& GFC_OMP_MASK_TASKLOOP
)
7023 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_LASTPRIVATE
]
7024 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
7025 if ((mask
& GFC_OMP_MASK_PARALLEL
) && !is_loop
7026 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
7027 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
7028 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
7029 else if (mask
& GFC_OMP_MASK_DO
)
7030 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
7031 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
7032 if (mask
& GFC_OMP_MASK_SIMD
)
7033 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
7034 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
7035 /* Reduction is allowed on simd, do, parallel, teams, taskloop, and loop.
7036 Duplicate it on all of them, but
7037 - omit on do if parallel is present;
7038 - omit on task and parallel if loop is present;
7039 additionally, inscan applies to do/simd only. */
7040 for (int i
= OMP_LIST_REDUCTION
; i
<= OMP_LIST_REDUCTION_TASK
; i
++)
7042 if (mask
& GFC_OMP_MASK_TASKLOOP
7043 && i
!= OMP_LIST_REDUCTION_INSCAN
)
7044 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[i
]
7045 = code
->ext
.omp_clauses
->lists
[i
];
7046 if (mask
& GFC_OMP_MASK_TEAMS
7047 && i
!= OMP_LIST_REDUCTION_INSCAN
7049 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[i
]
7050 = code
->ext
.omp_clauses
->lists
[i
];
7051 if (mask
& GFC_OMP_MASK_PARALLEL
7052 && i
!= OMP_LIST_REDUCTION_INSCAN
7053 && !(mask
& GFC_OMP_MASK_TASKLOOP
)
7055 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[i
]
7056 = code
->ext
.omp_clauses
->lists
[i
];
7057 else if (mask
& GFC_OMP_MASK_DO
)
7058 clausesa
[GFC_OMP_SPLIT_DO
].lists
[i
]
7059 = code
->ext
.omp_clauses
->lists
[i
];
7060 if (mask
& GFC_OMP_MASK_SIMD
)
7061 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[i
]
7062 = code
->ext
.omp_clauses
->lists
[i
];
7064 if (mask
& GFC_OMP_MASK_TARGET
)
7065 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IN_REDUCTION
]
7066 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
7067 if (mask
& GFC_OMP_MASK_TASKLOOP
)
7068 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_IN_REDUCTION
]
7069 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
7070 /* Linear clause is supported on do and simd,
7071 put it on the innermost one. */
7072 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
7073 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
7075 /* Propagate firstprivate/lastprivate/reduction vars to
7076 shared (parallel, teams) and map-tofrom (target). */
7077 if (mask
& GFC_OMP_MASK_TARGET
)
7078 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TARGET
],
7079 code
->ext
.omp_clauses
, true, false);
7080 if ((mask
& GFC_OMP_MASK_PARALLEL
) && innermost
!= GFC_OMP_MASK_PARALLEL
)
7081 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7082 code
->ext
.omp_clauses
, false,
7083 mask
& GFC_OMP_MASK_DO
);
7084 if (mask
& GFC_OMP_MASK_TEAMS
&& innermost
!= GFC_OMP_MASK_TEAMS
)
7085 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TEAMS
],
7086 code
->ext
.omp_clauses
, false, false);
7087 if (((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
7088 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
7090 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
7092 /* Distribute allocate clause to do, parallel, distribute, teams, target
7093 and taskloop. The code below iterates over variables in the
7094 allocate list and checks if that available is also in any
7095 privatization clause on those construct. If yes, then we add it
7096 to the list of 'allocate'ed variables for that construct. If a
7097 variable is found in none of them then we issue an error. */
7099 if (code
->ext
.omp_clauses
->lists
[OMP_LIST_ALLOCATE
])
7101 gfc_omp_namelist
*alloc_nl
, *priv_nl
;
7102 gfc_omp_namelist
*tails
[GFC_OMP_SPLIT_NUM
];
7103 for (alloc_nl
= code
->ext
.omp_clauses
->lists
[OMP_LIST_ALLOCATE
];
7104 alloc_nl
; alloc_nl
= alloc_nl
->next
)
7107 for (int i
= GFC_OMP_SPLIT_DO
; i
<= GFC_OMP_SPLIT_TASKLOOP
; i
++)
7109 gfc_omp_namelist
*p
;
7111 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
7115 case OMP_LIST_PRIVATE
:
7116 case OMP_LIST_FIRSTPRIVATE
:
7117 case OMP_LIST_LASTPRIVATE
:
7118 case OMP_LIST_REDUCTION
:
7119 case OMP_LIST_REDUCTION_INSCAN
:
7120 case OMP_LIST_REDUCTION_TASK
:
7121 case OMP_LIST_IN_REDUCTION
:
7122 case OMP_LIST_TASK_REDUCTION
:
7123 case OMP_LIST_LINEAR
:
7124 for (priv_nl
= clausesa
[i
].lists
[list
]; priv_nl
;
7125 priv_nl
= priv_nl
->next
)
7126 if (alloc_nl
->sym
== priv_nl
->sym
)
7129 p
= gfc_get_omp_namelist ();
7130 p
->sym
= alloc_nl
->sym
;
7131 p
->expr
= alloc_nl
->expr
;
7132 p
->u
.align
= alloc_nl
->u
.align
;
7133 p
->u2
.allocator
= alloc_nl
->u2
.allocator
;
7134 p
->where
= alloc_nl
->where
;
7135 if (clausesa
[i
].lists
[OMP_LIST_ALLOCATE
] == NULL
)
7137 clausesa
[i
].lists
[OMP_LIST_ALLOCATE
] = p
;
7143 tails
[i
] = tails
[i
]->next
;
7153 gfc_error ("%qs specified in 'allocate' clause at %L but not "
7154 "in an explicit privatization clause",
7155 alloc_nl
->sym
->name
, &alloc_nl
->where
);
7161 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
7162 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
7165 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7166 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
7167 bool free_clausesa
= false;
7170 gfc_start_block (&block
);
7172 gfc_init_block (&block
);
7174 if (clausesa
== NULL
)
7176 clausesa
= clausesa_buf
;
7177 gfc_split_omp_clauses (code
, clausesa
);
7178 free_clausesa
= true;
7182 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
7183 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
7184 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
7187 if (TREE_CODE (body
) != BIND_EXPR
)
7188 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
7192 else if (TREE_CODE (body
) != BIND_EXPR
)
7193 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
7196 stmt
= make_node (OMP_FOR
);
7197 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7198 TREE_TYPE (stmt
) = void_type_node
;
7199 OMP_FOR_BODY (stmt
) = body
;
7200 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
7204 gfc_add_expr_to_block (&block
, stmt
);
7206 gfc_free_split_omp_clauses (code
, clausesa
);
7207 return gfc_finish_block (&block
);
7211 gfc_trans_omp_parallel_do (gfc_code
*code
, bool is_loop
, stmtblock_t
*pblock
,
7212 gfc_omp_clauses
*clausesa
)
7214 stmtblock_t block
, *new_pblock
= pblock
;
7215 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7216 tree stmt
, omp_clauses
= NULL_TREE
;
7217 bool free_clausesa
= false;
7220 gfc_start_block (&block
);
7222 gfc_init_block (&block
);
7224 if (clausesa
== NULL
)
7226 clausesa
= clausesa_buf
;
7227 gfc_split_omp_clauses (code
, clausesa
);
7228 free_clausesa
= true;
7231 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7235 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
7236 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
7237 new_pblock
= &block
;
7241 stmt
= gfc_trans_omp_do (code
, is_loop
? EXEC_OMP_LOOP
: EXEC_OMP_DO
,
7242 new_pblock
, &clausesa
[GFC_OMP_SPLIT_DO
],
7246 if (TREE_CODE (stmt
) != BIND_EXPR
)
7247 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7251 else if (TREE_CODE (stmt
) != BIND_EXPR
)
7252 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
7253 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7254 void_type_node
, stmt
, omp_clauses
);
7255 OMP_PARALLEL_COMBINED (stmt
) = 1;
7256 gfc_add_expr_to_block (&block
, stmt
);
7258 gfc_free_split_omp_clauses (code
, clausesa
);
7259 return gfc_finish_block (&block
);
7263 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
7264 gfc_omp_clauses
*clausesa
)
7267 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7268 tree stmt
, omp_clauses
= NULL_TREE
;
7269 bool free_clausesa
= false;
7272 gfc_start_block (&block
);
7274 gfc_init_block (&block
);
7276 if (clausesa
== NULL
)
7278 clausesa
= clausesa_buf
;
7279 gfc_split_omp_clauses (code
, clausesa
);
7280 free_clausesa
= true;
7284 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7288 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
7291 if (TREE_CODE (stmt
) != BIND_EXPR
)
7292 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7296 else if (TREE_CODE (stmt
) != BIND_EXPR
)
7297 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
7300 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7301 void_type_node
, stmt
, omp_clauses
);
7302 OMP_PARALLEL_COMBINED (stmt
) = 1;
7304 gfc_add_expr_to_block (&block
, stmt
);
7306 gfc_free_split_omp_clauses (code
, clausesa
);
7307 return gfc_finish_block (&block
);
7311 gfc_trans_omp_parallel_sections (gfc_code
*code
)
7314 gfc_omp_clauses section_clauses
;
7315 tree stmt
, omp_clauses
;
7317 memset (§ion_clauses
, 0, sizeof (section_clauses
));
7318 section_clauses
.nowait
= true;
7320 gfc_start_block (&block
);
7321 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7324 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
7325 if (TREE_CODE (stmt
) != BIND_EXPR
)
7326 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7329 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7330 void_type_node
, stmt
, omp_clauses
);
7331 OMP_PARALLEL_COMBINED (stmt
) = 1;
7332 gfc_add_expr_to_block (&block
, stmt
);
7333 return gfc_finish_block (&block
);
7337 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
7340 gfc_omp_clauses workshare_clauses
;
7341 tree stmt
, omp_clauses
;
7343 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
7344 workshare_clauses
.nowait
= true;
7346 gfc_start_block (&block
);
7347 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7350 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
7351 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7352 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7353 void_type_node
, stmt
, omp_clauses
);
7354 OMP_PARALLEL_COMBINED (stmt
) = 1;
7355 gfc_add_expr_to_block (&block
, stmt
);
7356 return gfc_finish_block (&block
);
7360 gfc_trans_omp_scope (gfc_code
*code
)
7363 tree body
= gfc_trans_code (code
->block
->next
);
7364 if (IS_EMPTY_STMT (body
))
7366 gfc_start_block (&block
);
7367 tree omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7369 tree stmt
= make_node (OMP_SCOPE
);
7370 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7371 TREE_TYPE (stmt
) = void_type_node
;
7372 OMP_SCOPE_BODY (stmt
) = body
;
7373 OMP_SCOPE_CLAUSES (stmt
) = omp_clauses
;
7374 gfc_add_expr_to_block (&block
, stmt
);
7375 return gfc_finish_block (&block
);
7379 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
7381 stmtblock_t block
, body
;
7382 tree omp_clauses
, stmt
;
7383 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
7384 location_t loc
= gfc_get_location (&code
->loc
);
7386 gfc_start_block (&block
);
7388 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
7390 gfc_init_block (&body
);
7391 for (code
= code
->block
; code
; code
= code
->block
)
7393 /* Last section is special because of lastprivate, so even if it
7394 is empty, chain it in. */
7395 stmt
= gfc_trans_omp_code (code
->next
,
7396 has_lastprivate
&& code
->block
== NULL
);
7397 if (! IS_EMPTY_STMT (stmt
))
7399 stmt
= build1_v (OMP_SECTION
, stmt
);
7400 gfc_add_expr_to_block (&body
, stmt
);
7403 stmt
= gfc_finish_block (&body
);
7405 stmt
= build2_loc (loc
, OMP_SECTIONS
, void_type_node
, stmt
, omp_clauses
);
7406 gfc_add_expr_to_block (&block
, stmt
);
7408 return gfc_finish_block (&block
);
7412 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
7415 gfc_start_block (&block
);
7416 tree omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
7417 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7418 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_SINGLE
, void_type_node
,
7420 gfc_add_expr_to_block (&block
, stmt
);
7421 return gfc_finish_block (&block
);
7425 gfc_trans_omp_task (gfc_code
*code
)
7428 tree stmt
, omp_clauses
;
7430 gfc_start_block (&block
);
7431 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7434 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7435 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7436 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TASK
, void_type_node
,
7438 gfc_add_expr_to_block (&block
, stmt
);
7439 return gfc_finish_block (&block
);
7443 gfc_trans_omp_taskgroup (gfc_code
*code
)
7446 gfc_start_block (&block
);
7447 tree body
= gfc_trans_code (code
->block
->next
);
7448 tree stmt
= make_node (OMP_TASKGROUP
);
7449 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7450 TREE_TYPE (stmt
) = void_type_node
;
7451 OMP_TASKGROUP_BODY (stmt
) = body
;
7452 OMP_TASKGROUP_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
7453 code
->ext
.omp_clauses
,
7455 gfc_add_expr_to_block (&block
, stmt
);
7456 return gfc_finish_block (&block
);
7460 gfc_trans_omp_taskwait (gfc_code
*code
)
7462 if (!code
->ext
.omp_clauses
)
7464 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
7465 return build_call_expr_loc (input_location
, decl
, 0);
7468 gfc_start_block (&block
);
7469 tree stmt
= make_node (OMP_TASK
);
7470 SET_EXPR_LOCATION (stmt
, gfc_get_location (&code
->loc
));
7471 TREE_TYPE (stmt
) = void_type_node
;
7472 OMP_TASK_BODY (stmt
) = NULL_TREE
;
7473 OMP_TASK_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
7474 code
->ext
.omp_clauses
,
7476 gfc_add_expr_to_block (&block
, stmt
);
7477 return gfc_finish_block (&block
);
7481 gfc_trans_omp_taskyield (void)
7483 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
7484 return build_call_expr_loc (input_location
, decl
, 0);
7488 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
7491 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7492 tree stmt
, omp_clauses
= NULL_TREE
;
7493 bool free_clausesa
= false;
7495 gfc_start_block (&block
);
7496 if (clausesa
== NULL
)
7498 clausesa
= clausesa_buf
;
7499 gfc_split_omp_clauses (code
, clausesa
);
7500 free_clausesa
= true;
7504 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
7508 case EXEC_OMP_DISTRIBUTE
:
7509 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
7510 case EXEC_OMP_TEAMS_DISTRIBUTE
:
7511 /* This is handled in gfc_trans_omp_do. */
7514 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
7515 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7516 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7517 stmt
= gfc_trans_omp_parallel_do (code
, false, &block
, clausesa
);
7518 if (TREE_CODE (stmt
) != BIND_EXPR
)
7519 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7523 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
7524 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7525 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7526 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
7527 if (TREE_CODE (stmt
) != BIND_EXPR
)
7528 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7532 case EXEC_OMP_DISTRIBUTE_SIMD
:
7533 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
7534 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
7535 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
7536 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
7537 if (TREE_CODE (stmt
) != BIND_EXPR
)
7538 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7547 tree distribute
= make_node (OMP_DISTRIBUTE
);
7548 SET_EXPR_LOCATION (distribute
, gfc_get_location (&code
->loc
));
7549 TREE_TYPE (distribute
) = void_type_node
;
7550 OMP_FOR_BODY (distribute
) = stmt
;
7551 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
7554 gfc_add_expr_to_block (&block
, stmt
);
7556 gfc_free_split_omp_clauses (code
, clausesa
);
7557 return gfc_finish_block (&block
);
7561 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
7565 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
7567 bool combined
= true, free_clausesa
= false;
7569 gfc_start_block (&block
);
7570 if (clausesa
== NULL
)
7572 clausesa
= clausesa_buf
;
7573 gfc_split_omp_clauses (code
, clausesa
);
7574 free_clausesa
= true;
7579 = chainon (omp_clauses
,
7580 gfc_trans_omp_clauses (&block
,
7581 &clausesa
[GFC_OMP_SPLIT_TEAMS
],
7587 case EXEC_OMP_TARGET_TEAMS
:
7588 case EXEC_OMP_TEAMS
:
7589 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7592 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
7593 case EXEC_OMP_TEAMS_DISTRIBUTE
:
7594 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
7595 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
7598 case EXEC_OMP_TARGET_TEAMS_LOOP
:
7599 case EXEC_OMP_TEAMS_LOOP
:
7600 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_LOOP
, NULL
,
7601 &clausesa
[GFC_OMP_SPLIT_DO
],
7605 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
7610 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7611 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TEAMS
,
7612 void_type_node
, stmt
, omp_clauses
);
7614 OMP_TEAMS_COMBINED (stmt
) = 1;
7616 gfc_add_expr_to_block (&block
, stmt
);
7618 gfc_free_split_omp_clauses (code
, clausesa
);
7619 return gfc_finish_block (&block
);
7623 gfc_trans_omp_target (gfc_code
*code
)
7626 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7627 tree stmt
, omp_clauses
= NULL_TREE
;
7629 gfc_start_block (&block
);
7630 gfc_split_omp_clauses (code
, clausesa
);
7633 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
7637 case EXEC_OMP_TARGET
:
7639 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7640 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7642 case EXEC_OMP_TARGET_PARALLEL
:
7647 gfc_start_block (&iblock
);
7649 = gfc_trans_omp_clauses (&iblock
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7651 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7652 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
7654 gfc_add_expr_to_block (&iblock
, stmt
);
7655 stmt
= gfc_finish_block (&iblock
);
7656 if (TREE_CODE (stmt
) != BIND_EXPR
)
7657 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7662 case EXEC_OMP_TARGET_PARALLEL_DO
:
7663 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
7664 stmt
= gfc_trans_omp_parallel_do (code
,
7666 == EXEC_OMP_TARGET_PARALLEL_LOOP
),
7668 if (TREE_CODE (stmt
) != BIND_EXPR
)
7669 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7673 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
7674 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
7675 if (TREE_CODE (stmt
) != BIND_EXPR
)
7676 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7680 case EXEC_OMP_TARGET_SIMD
:
7681 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
7682 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
7683 if (TREE_CODE (stmt
) != BIND_EXPR
)
7684 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7690 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
7691 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
7693 gfc_omp_clauses clausesb
;
7695 /* For combined !$omp target teams, the num_teams and
7696 thread_limit clauses are evaluated before entering the
7697 target construct. */
7698 memset (&clausesb
, '\0', sizeof (clausesb
));
7699 clausesb
.num_teams_lower
7700 = clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_lower
;
7701 clausesb
.num_teams_upper
7702 = clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
;
7703 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
7704 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_lower
= NULL
;
7705 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams_upper
= NULL
;
7706 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
7708 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
7710 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
7715 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
7717 if (TREE_CODE (stmt
) != BIND_EXPR
)
7718 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7725 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET
,
7726 void_type_node
, stmt
, omp_clauses
);
7727 if (code
->op
!= EXEC_OMP_TARGET
)
7728 OMP_TARGET_COMBINED (stmt
) = 1;
7729 cfun
->has_omp_target
= true;
7731 gfc_add_expr_to_block (&block
, stmt
);
7732 gfc_free_split_omp_clauses (code
, clausesa
);
7733 return gfc_finish_block (&block
);
7737 gfc_trans_omp_taskloop (gfc_code
*code
, gfc_exec_op op
)
7740 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7741 tree stmt
, omp_clauses
= NULL_TREE
;
7743 gfc_start_block (&block
);
7744 gfc_split_omp_clauses (code
, clausesa
);
7747 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
7751 case EXEC_OMP_TASKLOOP
:
7752 /* This is handled in gfc_trans_omp_do. */
7755 case EXEC_OMP_TASKLOOP_SIMD
:
7756 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
7757 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
7758 if (TREE_CODE (stmt
) != BIND_EXPR
)
7759 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7768 tree taskloop
= make_node (OMP_TASKLOOP
);
7769 SET_EXPR_LOCATION (taskloop
, gfc_get_location (&code
->loc
));
7770 TREE_TYPE (taskloop
) = void_type_node
;
7771 OMP_FOR_BODY (taskloop
) = stmt
;
7772 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
7775 gfc_add_expr_to_block (&block
, stmt
);
7776 gfc_free_split_omp_clauses (code
, clausesa
);
7777 return gfc_finish_block (&block
);
7781 gfc_trans_omp_master_masked_taskloop (gfc_code
*code
, gfc_exec_op op
)
7783 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7787 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
7788 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
7789 gfc_split_omp_clauses (code
, clausesa
);
7792 if (op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
7793 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
)
7794 stmt
= gfc_trans_omp_taskloop (code
, EXEC_OMP_TASKLOOP_SIMD
);
7797 gcc_assert (op
== EXEC_OMP_MASKED_TASKLOOP
7798 || op
== EXEC_OMP_MASTER_TASKLOOP
);
7799 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_TASKLOOP
, NULL
,
7800 code
->op
!= EXEC_OMP_MASTER_TASKLOOP
7801 ? &clausesa
[GFC_OMP_SPLIT_TASKLOOP
]
7802 : code
->ext
.omp_clauses
, NULL
);
7804 if (TREE_CODE (stmt
) != BIND_EXPR
)
7805 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7808 gfc_start_block (&block
);
7809 if (op
== EXEC_OMP_MASKED_TASKLOOP
|| op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
)
7811 tree clauses
= gfc_trans_omp_clauses (&block
,
7812 &clausesa
[GFC_OMP_SPLIT_MASKED
],
7814 tree msk
= make_node (OMP_MASKED
);
7815 SET_EXPR_LOCATION (msk
, gfc_get_location (&code
->loc
));
7816 TREE_TYPE (msk
) = void_type_node
;
7817 OMP_MASKED_BODY (msk
) = stmt
;
7818 OMP_MASKED_CLAUSES (msk
) = clauses
;
7819 OMP_MASKED_COMBINED (msk
) = 1;
7820 gfc_add_expr_to_block (&block
, msk
);
7824 gcc_assert (op
== EXEC_OMP_MASTER_TASKLOOP
7825 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
);
7826 stmt
= build1_v (OMP_MASTER
, stmt
);
7827 gfc_add_expr_to_block (&block
, stmt
);
7829 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
7830 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
7831 gfc_free_split_omp_clauses (code
, clausesa
);
7832 return gfc_finish_block (&block
);
7836 gfc_trans_omp_parallel_master_masked (gfc_code
*code
)
7839 tree stmt
, omp_clauses
;
7840 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
7841 bool parallel_combined
= false;
7843 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
7844 gfc_split_omp_clauses (code
, clausesa
);
7846 gfc_start_block (&block
);
7847 omp_clauses
= gfc_trans_omp_clauses (&block
,
7848 code
->op
== EXEC_OMP_PARALLEL_MASTER
7849 ? code
->ext
.omp_clauses
7850 : &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
7853 if (code
->op
== EXEC_OMP_PARALLEL_MASTER
)
7854 stmt
= gfc_trans_omp_master (code
);
7855 else if (code
->op
== EXEC_OMP_PARALLEL_MASKED
)
7856 stmt
= gfc_trans_omp_masked (code
, &clausesa
[GFC_OMP_SPLIT_MASKED
]);
7862 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
7863 op
= EXEC_OMP_MASKED_TASKLOOP
;
7865 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
7866 op
= EXEC_OMP_MASKED_TASKLOOP_SIMD
;
7868 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
7869 op
= EXEC_OMP_MASTER_TASKLOOP
;
7871 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
7872 op
= EXEC_OMP_MASTER_TASKLOOP_SIMD
;
7877 stmt
= gfc_trans_omp_master_masked_taskloop (code
, op
);
7878 parallel_combined
= true;
7880 if (TREE_CODE (stmt
) != BIND_EXPR
)
7881 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
7884 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
7885 void_type_node
, stmt
, omp_clauses
);
7886 /* masked does have just filter clause, but during gimplification
7887 isn't represented by a gimplification omp context, so for
7888 !$omp parallel masked don't set OMP_PARALLEL_COMBINED,
7890 !$omp parallel masked
7891 !$omp taskloop simd lastprivate (x)
7893 !$omp parallel masked taskloop simd lastprivate (x) */
7894 if (parallel_combined
)
7895 OMP_PARALLEL_COMBINED (stmt
) = 1;
7896 gfc_add_expr_to_block (&block
, stmt
);
7897 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
7898 gfc_free_split_omp_clauses (code
, clausesa
);
7899 return gfc_finish_block (&block
);
7903 gfc_trans_omp_target_data (gfc_code
*code
)
7906 tree stmt
, omp_clauses
;
7908 gfc_start_block (&block
);
7909 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7911 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7912 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET_DATA
,
7913 void_type_node
, stmt
, omp_clauses
);
7914 gfc_add_expr_to_block (&block
, stmt
);
7915 return gfc_finish_block (&block
);
7919 gfc_trans_omp_target_enter_data (gfc_code
*code
)
7922 tree stmt
, omp_clauses
;
7924 gfc_start_block (&block
);
7925 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7927 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
7929 gfc_add_expr_to_block (&block
, stmt
);
7930 return gfc_finish_block (&block
);
7934 gfc_trans_omp_target_exit_data (gfc_code
*code
)
7937 tree stmt
, omp_clauses
;
7939 gfc_start_block (&block
);
7940 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7941 code
->loc
, false, false, code
->op
);
7942 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
7944 gfc_add_expr_to_block (&block
, stmt
);
7945 return gfc_finish_block (&block
);
7949 gfc_trans_omp_target_update (gfc_code
*code
)
7952 tree stmt
, omp_clauses
;
7954 gfc_start_block (&block
);
7955 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
7957 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
7959 gfc_add_expr_to_block (&block
, stmt
);
7960 return gfc_finish_block (&block
);
7964 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
7966 tree res
, tmp
, stmt
;
7967 stmtblock_t block
, *pblock
= NULL
;
7968 stmtblock_t singleblock
;
7969 int saved_ompws_flags
;
7970 bool singleblock_in_progress
= false;
7971 /* True if previous gfc_code in workshare construct is not workshared. */
7972 bool prev_singleunit
;
7973 location_t loc
= gfc_get_location (&code
->loc
);
7975 code
= code
->block
->next
;
7979 gfc_start_block (&block
);
7982 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
7983 prev_singleunit
= false;
7985 /* Translate statements one by one to trees until we reach
7986 the end of the workshare construct. Adjacent gfc_codes that
7987 are a single unit of work are clustered and encapsulated in a
7988 single OMP_SINGLE construct. */
7989 for (; code
; code
= code
->next
)
7991 if (code
->here
!= 0)
7993 res
= gfc_trans_label_here (code
);
7994 gfc_add_expr_to_block (pblock
, res
);
7997 /* No dependence analysis, use for clauses with wait.
7998 If this is the last gfc_code, use default omp_clauses. */
7999 if (code
->next
== NULL
&& clauses
->nowait
)
8000 ompws_flags
|= OMPWS_NOWAIT
;
8002 /* By default, every gfc_code is a single unit of work. */
8003 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
8004 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
8013 res
= gfc_trans_assign (code
);
8016 case EXEC_POINTER_ASSIGN
:
8017 res
= gfc_trans_pointer_assign (code
);
8020 case EXEC_INIT_ASSIGN
:
8021 res
= gfc_trans_init_assign (code
);
8025 res
= gfc_trans_forall (code
);
8029 res
= gfc_trans_where (code
);
8032 case EXEC_OMP_ATOMIC
:
8033 res
= gfc_trans_omp_directive (code
);
8036 case EXEC_OMP_PARALLEL
:
8037 case EXEC_OMP_PARALLEL_DO
:
8038 case EXEC_OMP_PARALLEL_MASTER
:
8039 case EXEC_OMP_PARALLEL_SECTIONS
:
8040 case EXEC_OMP_PARALLEL_WORKSHARE
:
8041 case EXEC_OMP_CRITICAL
:
8042 saved_ompws_flags
= ompws_flags
;
8044 res
= gfc_trans_omp_directive (code
);
8045 ompws_flags
= saved_ompws_flags
;
8049 res
= gfc_trans_block_construct (code
);
8053 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
8056 gfc_set_backend_locus (&code
->loc
);
8058 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
8060 if (prev_singleunit
)
8062 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
8063 /* Add current gfc_code to single block. */
8064 gfc_add_expr_to_block (&singleblock
, res
);
8067 /* Finish single block and add it to pblock. */
8068 tmp
= gfc_finish_block (&singleblock
);
8069 tmp
= build2_loc (loc
, OMP_SINGLE
,
8070 void_type_node
, tmp
, NULL_TREE
);
8071 gfc_add_expr_to_block (pblock
, tmp
);
8072 /* Add current gfc_code to pblock. */
8073 gfc_add_expr_to_block (pblock
, res
);
8074 singleblock_in_progress
= false;
8079 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
8081 /* Start single block. */
8082 gfc_init_block (&singleblock
);
8083 gfc_add_expr_to_block (&singleblock
, res
);
8084 singleblock_in_progress
= true;
8085 loc
= gfc_get_location (&code
->loc
);
8088 /* Add the new statement to the block. */
8089 gfc_add_expr_to_block (pblock
, res
);
8091 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
8095 /* Finish remaining SINGLE block, if we were in the middle of one. */
8096 if (singleblock_in_progress
)
8098 /* Finish single block and add it to pblock. */
8099 tmp
= gfc_finish_block (&singleblock
);
8100 tmp
= build2_loc (loc
, OMP_SINGLE
, void_type_node
, tmp
,
8102 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
8104 gfc_add_expr_to_block (pblock
, tmp
);
8107 stmt
= gfc_finish_block (pblock
);
8108 if (TREE_CODE (stmt
) != BIND_EXPR
)
8110 if (!IS_EMPTY_STMT (stmt
))
8112 tree bindblock
= poplevel (1, 0);
8113 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
8121 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
8122 stmt
= gfc_trans_omp_barrier ();
8129 gfc_trans_oacc_declare (gfc_code
*code
)
8132 tree stmt
, oacc_clauses
;
8133 enum tree_code construct_code
;
8135 construct_code
= OACC_DATA
;
8137 gfc_start_block (&block
);
8139 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
8140 code
->loc
, false, true);
8141 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
8142 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
8144 gfc_add_expr_to_block (&block
, stmt
);
8146 return gfc_finish_block (&block
);
8150 gfc_trans_oacc_directive (gfc_code
*code
)
8154 case EXEC_OACC_PARALLEL_LOOP
:
8155 case EXEC_OACC_KERNELS_LOOP
:
8156 case EXEC_OACC_SERIAL_LOOP
:
8157 return gfc_trans_oacc_combined_directive (code
);
8158 case EXEC_OACC_PARALLEL
:
8159 case EXEC_OACC_KERNELS
:
8160 case EXEC_OACC_SERIAL
:
8161 case EXEC_OACC_DATA
:
8162 case EXEC_OACC_HOST_DATA
:
8163 return gfc_trans_oacc_construct (code
);
8164 case EXEC_OACC_LOOP
:
8165 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
8167 case EXEC_OACC_UPDATE
:
8168 case EXEC_OACC_CACHE
:
8169 case EXEC_OACC_ENTER_DATA
:
8170 case EXEC_OACC_EXIT_DATA
:
8171 return gfc_trans_oacc_executable_directive (code
);
8172 case EXEC_OACC_WAIT
:
8173 return gfc_trans_oacc_wait_directive (code
);
8174 case EXEC_OACC_ATOMIC
:
8175 return gfc_trans_omp_atomic (code
);
8176 case EXEC_OACC_DECLARE
:
8177 return gfc_trans_oacc_declare (code
);
8184 gfc_trans_omp_directive (gfc_code
*code
)
8188 case EXEC_OMP_ALLOCATE
:
8189 case EXEC_OMP_ALLOCATORS
:
8190 return gfc_trans_omp_allocators (code
);
8191 case EXEC_OMP_ASSUME
:
8192 return gfc_trans_omp_assume (code
);
8193 case EXEC_OMP_ATOMIC
:
8194 return gfc_trans_omp_atomic (code
);
8195 case EXEC_OMP_BARRIER
:
8196 return gfc_trans_omp_barrier ();
8197 case EXEC_OMP_CANCEL
:
8198 return gfc_trans_omp_cancel (code
);
8199 case EXEC_OMP_CANCELLATION_POINT
:
8200 return gfc_trans_omp_cancellation_point (code
);
8201 case EXEC_OMP_CRITICAL
:
8202 return gfc_trans_omp_critical (code
);
8203 case EXEC_OMP_DEPOBJ
:
8204 return gfc_trans_omp_depobj (code
);
8205 case EXEC_OMP_DISTRIBUTE
:
8209 case EXEC_OMP_TASKLOOP
:
8210 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
8212 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
8213 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
8214 case EXEC_OMP_DISTRIBUTE_SIMD
:
8215 return gfc_trans_omp_distribute (code
, NULL
);
8216 case EXEC_OMP_DO_SIMD
:
8217 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
8218 case EXEC_OMP_ERROR
:
8219 return gfc_trans_omp_error (code
);
8220 case EXEC_OMP_FLUSH
:
8221 return gfc_trans_omp_flush (code
);
8222 case EXEC_OMP_MASKED
:
8223 return gfc_trans_omp_masked (code
, NULL
);
8224 case EXEC_OMP_MASTER
:
8225 return gfc_trans_omp_master (code
);
8226 case EXEC_OMP_MASKED_TASKLOOP
:
8227 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
8228 case EXEC_OMP_MASTER_TASKLOOP
:
8229 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
8230 return gfc_trans_omp_master_masked_taskloop (code
, code
->op
);
8231 case EXEC_OMP_ORDERED
:
8232 return gfc_trans_omp_ordered (code
);
8233 case EXEC_OMP_PARALLEL
:
8234 return gfc_trans_omp_parallel (code
);
8235 case EXEC_OMP_PARALLEL_DO
:
8236 return gfc_trans_omp_parallel_do (code
, false, NULL
, NULL
);
8237 case EXEC_OMP_PARALLEL_LOOP
:
8238 return gfc_trans_omp_parallel_do (code
, true, NULL
, NULL
);
8239 case EXEC_OMP_PARALLEL_DO_SIMD
:
8240 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
8241 case EXEC_OMP_PARALLEL_MASKED
:
8242 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
8243 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
8244 case EXEC_OMP_PARALLEL_MASTER
:
8245 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
8246 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
8247 return gfc_trans_omp_parallel_master_masked (code
);
8248 case EXEC_OMP_PARALLEL_SECTIONS
:
8249 return gfc_trans_omp_parallel_sections (code
);
8250 case EXEC_OMP_PARALLEL_WORKSHARE
:
8251 return gfc_trans_omp_parallel_workshare (code
);
8252 case EXEC_OMP_SCOPE
:
8253 return gfc_trans_omp_scope (code
);
8254 case EXEC_OMP_SECTIONS
:
8255 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
8256 case EXEC_OMP_SINGLE
:
8257 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
8258 case EXEC_OMP_TARGET
:
8259 case EXEC_OMP_TARGET_PARALLEL
:
8260 case EXEC_OMP_TARGET_PARALLEL_DO
:
8261 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
8262 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
8263 case EXEC_OMP_TARGET_SIMD
:
8264 case EXEC_OMP_TARGET_TEAMS
:
8265 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
8266 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
8267 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
8268 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
8269 case EXEC_OMP_TARGET_TEAMS_LOOP
:
8270 return gfc_trans_omp_target (code
);
8271 case EXEC_OMP_TARGET_DATA
:
8272 return gfc_trans_omp_target_data (code
);
8273 case EXEC_OMP_TARGET_ENTER_DATA
:
8274 return gfc_trans_omp_target_enter_data (code
);
8275 case EXEC_OMP_TARGET_EXIT_DATA
:
8276 return gfc_trans_omp_target_exit_data (code
);
8277 case EXEC_OMP_TARGET_UPDATE
:
8278 return gfc_trans_omp_target_update (code
);
8280 return gfc_trans_omp_task (code
);
8281 case EXEC_OMP_TASKGROUP
:
8282 return gfc_trans_omp_taskgroup (code
);
8283 case EXEC_OMP_TASKLOOP_SIMD
:
8284 return gfc_trans_omp_taskloop (code
, code
->op
);
8285 case EXEC_OMP_TASKWAIT
:
8286 return gfc_trans_omp_taskwait (code
);
8287 case EXEC_OMP_TASKYIELD
:
8288 return gfc_trans_omp_taskyield ();
8289 case EXEC_OMP_TEAMS
:
8290 case EXEC_OMP_TEAMS_DISTRIBUTE
:
8291 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
8292 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
8293 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
8294 case EXEC_OMP_TEAMS_LOOP
:
8295 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
8296 case EXEC_OMP_WORKSHARE
:
8297 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
8304 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
8309 gfc_omp_declare_simd
*ods
;
8310 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
8312 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
8313 tree fndecl
= ns
->proc_name
->backend_decl
;
8315 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
8316 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
8317 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
8318 DECL_ATTRIBUTES (fndecl
) = c
;
8323 gfc_trans_omp_declare_variant (gfc_namespace
*ns
)
8325 tree base_fn_decl
= ns
->proc_name
->backend_decl
;
8326 gfc_namespace
*search_ns
= ns
;
8327 gfc_omp_declare_variant
*next
;
8329 for (gfc_omp_declare_variant
*odv
= search_ns
->omp_declare_variant
;
8330 search_ns
; odv
= next
)
8332 /* Look in the parent namespace if there are no more directives in the
8333 current namespace. */
8336 search_ns
= search_ns
->parent
;
8338 next
= search_ns
->omp_declare_variant
;
8347 /* Check directive the first time it is encountered. */
8348 bool error_found
= true;
8351 error_found
= false;
8352 if (odv
->base_proc_symtree
== NULL
)
8354 if (!search_ns
->proc_name
->attr
.function
8355 && !search_ns
->proc_name
->attr
.subroutine
)
8356 gfc_error ("The base name for 'declare variant' must be "
8357 "specified at %L ", &odv
->where
);
8359 error_found
= false;
8363 if (!search_ns
->contained
8364 && strcmp (odv
->base_proc_symtree
->name
,
8365 ns
->proc_name
->name
))
8366 gfc_error ("The base name at %L does not match the name of the "
8367 "current procedure", &odv
->where
);
8368 else if (odv
->base_proc_symtree
->n
.sym
->attr
.entry
)
8369 gfc_error ("The base name at %L must not be an entry name",
8371 else if (odv
->base_proc_symtree
->n
.sym
->attr
.generic
)
8372 gfc_error ("The base name at %L must not be a generic name",
8374 else if (odv
->base_proc_symtree
->n
.sym
->attr
.proc_pointer
)
8375 gfc_error ("The base name at %L must not be a procedure pointer",
8377 else if (odv
->base_proc_symtree
->n
.sym
->attr
.implicit_type
)
8378 gfc_error ("The base procedure at %L must have an explicit "
8379 "interface", &odv
->where
);
8381 error_found
= false;
8384 odv
->checked_p
= true;
8387 odv
->error_p
= true;
8391 /* Ignore directives that do not apply to the current procedure. */
8392 if ((odv
->base_proc_symtree
== NULL
&& search_ns
!= ns
)
8393 || (odv
->base_proc_symtree
!= NULL
8394 && strcmp (odv
->base_proc_symtree
->name
, ns
->proc_name
->name
)))
8397 tree set_selectors
= NULL_TREE
;
8398 gfc_omp_set_selector
*oss
;
8400 for (oss
= odv
->set_selectors
; oss
; oss
= oss
->next
)
8402 tree selectors
= NULL_TREE
;
8403 gfc_omp_selector
*os
;
8404 enum omp_tss_code set
= oss
->code
;
8405 gcc_assert (set
!= OMP_TRAIT_SET_INVALID
);
8407 for (os
= oss
->trait_selectors
; os
; os
= os
->next
)
8409 tree scoreval
= NULL_TREE
;
8410 tree properties
= NULL_TREE
;
8411 gfc_omp_trait_property
*otp
;
8412 enum omp_ts_code sel
= os
->code
;
8414 /* Per the spec, "Implementations can ignore specified
8415 selectors that are not those described in this section";
8416 however, we must record such selectors because they
8417 cause match failures. */
8418 if (sel
== OMP_TRAIT_INVALID
)
8420 selectors
= make_trait_selector (sel
, NULL_TREE
, NULL_TREE
,
8425 for (otp
= os
->properties
; otp
; otp
= otp
->next
)
8427 switch (otp
->property_kind
)
8429 case OMP_TRAIT_PROPERTY_EXPR
:
8432 gfc_init_se (&se
, NULL
);
8433 gfc_conv_expr (&se
, otp
->expr
);
8434 properties
= make_trait_property (NULL_TREE
, se
.expr
,
8438 case OMP_TRAIT_PROPERTY_ID
:
8440 = make_trait_property (get_identifier (otp
->name
),
8441 NULL_TREE
, properties
);
8443 case OMP_TRAIT_PROPERTY_NAME_LIST
:
8445 tree prop
= OMP_TP_NAMELIST_NODE
;
8446 tree value
= NULL_TREE
;
8448 value
= get_identifier (otp
->name
);
8450 value
= gfc_conv_constant_to_tree (otp
->expr
);
8452 properties
= make_trait_property (prop
, value
,
8456 case OMP_TRAIT_PROPERTY_CLAUSE_LIST
:
8457 properties
= gfc_trans_omp_clauses (NULL
, otp
->clauses
,
8468 gfc_init_se (&se
, NULL
);
8469 gfc_conv_expr (&se
, os
->score
);
8473 selectors
= make_trait_selector (sel
, scoreval
,
8474 properties
, selectors
);
8476 set_selectors
= make_trait_set_selector (set
, selectors
,
8480 const char *variant_proc_name
= odv
->variant_proc_symtree
->name
;
8481 gfc_symbol
*variant_proc_sym
= odv
->variant_proc_symtree
->n
.sym
;
8482 if (variant_proc_sym
== NULL
|| variant_proc_sym
->attr
.implicit_type
)
8484 gfc_symtree
*proc_st
;
8485 gfc_find_sym_tree (variant_proc_name
, gfc_current_ns
, 1, &proc_st
);
8486 variant_proc_sym
= proc_st
->n
.sym
;
8488 if (variant_proc_sym
== NULL
)
8490 gfc_error ("Cannot find symbol %qs", variant_proc_name
);
8493 set_selectors
= omp_check_context_selector
8494 (gfc_get_location (&odv
->where
), set_selectors
);
8495 if (set_selectors
!= error_mark_node
)
8497 if (!variant_proc_sym
->attr
.implicit_type
8498 && !variant_proc_sym
->attr
.subroutine
8499 && !variant_proc_sym
->attr
.function
)
8501 gfc_error ("variant %qs at %L is not a function or subroutine",
8502 variant_proc_name
, &odv
->where
);
8503 variant_proc_sym
= NULL
;
8505 else if (omp_get_context_selector (set_selectors
,
8506 OMP_TRAIT_SET_CONSTRUCT
,
8507 OMP_TRAIT_CONSTRUCT_SIMD
)
8511 if (!gfc_compare_interfaces (ns
->proc_name
, variant_proc_sym
,
8512 variant_proc_sym
->name
, 0, 1,
8513 err
, sizeof (err
), NULL
, NULL
))
8515 gfc_error ("variant %qs and base %qs at %L have "
8516 "incompatible types: %s",
8517 variant_proc_name
, ns
->proc_name
->name
,
8519 variant_proc_sym
= NULL
;
8522 if (variant_proc_sym
!= NULL
)
8524 gfc_set_sym_referenced (variant_proc_sym
);
8526 = omp_get_context_selector_list (set_selectors
,
8527 OMP_TRAIT_SET_CONSTRUCT
);
8528 omp_mark_declare_variant (gfc_get_location (&odv
->where
),
8529 gfc_get_symbol_decl (variant_proc_sym
),
8531 if (omp_context_selector_matches (set_selectors
))
8533 tree id
= get_identifier ("omp declare variant base");
8534 tree variant
= gfc_get_symbol_decl (variant_proc_sym
);
8535 DECL_ATTRIBUTES (base_fn_decl
)
8536 = tree_cons (id
, build_tree_list (variant
, set_selectors
),
8537 DECL_ATTRIBUTES (base_fn_decl
));
8544 /* Add ptr for tracking as being allocated by GOMP_alloc. */
8547 gfc_omp_call_add_alloc (tree ptr
)
8549 static tree fn
= NULL_TREE
;
8550 if (fn
== NULL_TREE
)
8552 fn
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
8553 tree att
= build_tree_list (NULL_TREE
, build_string (4, ". R "));
8554 att
= tree_cons (get_identifier ("fn spec"), att
, TYPE_ATTRIBUTES (fn
));
8555 fn
= build_type_attribute_variant (fn
, att
);
8556 fn
= build_fn_decl ("GOMP_add_alloc", fn
);
8558 return build_call_expr_loc (input_location
, fn
, 1, ptr
);
8561 /* Generated function returns true when it was tracked via GOMP_add_alloc and
8562 removes it from the tracking. As called just before GOMP_free or omp_realloc
8563 the pointer is or might become invalid, thus, it is always removed. */
8566 gfc_omp_call_is_alloc (tree ptr
)
8568 static tree fn
= NULL_TREE
;
8569 if (fn
== NULL_TREE
)
8571 fn
= build_function_type_list (boolean_type_node
, ptr_type_node
,
8573 tree att
= build_tree_list (NULL_TREE
, build_string (4, ". R "));
8574 att
= tree_cons (get_identifier ("fn spec"), att
, TYPE_ATTRIBUTES (fn
));
8575 fn
= build_type_attribute_variant (fn
, att
);
8576 fn
= build_fn_decl ("GOMP_is_alloc", fn
);
8578 return build_call_expr_loc (input_location
, fn
, 1, ptr
);