1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "gomp-constants.h"
39 #include "omp-general.h"
42 #define GCC_DIAG_STYLE __gcc_tdiag__
43 #include "diagnostic-core.h"
45 #define GCC_DIAG_STYLE __gcc_gfc__
49 /* True if OpenMP should privatize what this DECL points to rather
50 than the DECL itself. */
53 gfc_omp_privatize_by_reference (const_tree decl
)
55 tree type
= TREE_TYPE (decl
);
57 if (TREE_CODE (type
) == REFERENCE_TYPE
58 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
61 if (TREE_CODE (type
) == POINTER_TYPE
)
63 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
64 that have POINTER_TYPE type and aren't scalar pointers, scalar
65 allocatables, Cray pointees or C pointers are supposed to be
66 privatized by reference. */
67 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
68 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
69 || GFC_DECL_CRAY_POINTEE (decl
)
70 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
71 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
74 if (!DECL_ARTIFICIAL (decl
)
75 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
78 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
80 if (DECL_LANG_SPECIFIC (decl
)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
88 /* True if OpenMP sharing attribute of DECL is predetermined. */
90 enum omp_clause_default_kind
91 gfc_omp_predetermined_sharing (tree decl
)
93 /* Associate names preserve the association established during ASSOCIATE.
94 As they are implemented either as pointers to the selector or array
95 descriptor and shouldn't really change in the ASSOCIATE region,
96 this decl can be either shared or firstprivate. If it is a pointer,
97 use firstprivate, as it is cheaper that way, otherwise make it shared. */
98 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
100 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
101 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
103 return OMP_CLAUSE_DEFAULT_SHARED
;
106 if (DECL_ARTIFICIAL (decl
)
107 && ! GFC_DECL_RESULT (decl
)
108 && ! (DECL_LANG_SPECIFIC (decl
)
109 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
110 return OMP_CLAUSE_DEFAULT_SHARED
;
112 /* Cray pointees shouldn't be listed in any clauses and should be
113 gimplified to dereference of the corresponding Cray pointer.
114 Make them all private, so that they are emitted in the debug
116 if (GFC_DECL_CRAY_POINTEE (decl
))
117 return OMP_CLAUSE_DEFAULT_PRIVATE
;
119 /* Assumed-size arrays are predetermined shared. */
120 if (TREE_CODE (decl
) == PARM_DECL
121 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
122 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
123 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
124 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
126 return OMP_CLAUSE_DEFAULT_SHARED
;
128 /* Dummy procedures aren't considered variables by OpenMP, thus are
129 disallowed in OpenMP clauses. They are represented as PARM_DECLs
130 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
131 to avoid complaining about their uses with default(none). */
132 if (TREE_CODE (decl
) == PARM_DECL
133 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
134 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
135 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
137 /* COMMON and EQUIVALENCE decls are shared. They
138 are only referenced through DECL_VALUE_EXPR of the variables
139 contained in them. If those are privatized, they will not be
140 gimplified to the COMMON or EQUIVALENCE decls. */
141 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
142 return OMP_CLAUSE_DEFAULT_SHARED
;
144 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
145 return OMP_CLAUSE_DEFAULT_SHARED
;
147 /* These are either array or derived parameters, or vtables.
148 In the former cases, the OpenMP standard doesn't consider them to be
149 variables at all (they can't be redefined), but they can nevertheless appear
150 in parallel/task regions and for default(none) purposes treat them as shared.
151 For vtables likely the same handling is desirable. */
152 if (VAR_P (decl
) && TREE_READONLY (decl
) && TREE_STATIC (decl
))
153 return OMP_CLAUSE_DEFAULT_SHARED
;
155 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
158 /* Return decl that should be used when reporting DEFAULT(NONE)
162 gfc_omp_report_decl (tree decl
)
164 if (DECL_ARTIFICIAL (decl
)
165 && DECL_LANG_SPECIFIC (decl
)
166 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
167 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
172 /* Return true if TYPE has any allocatable components. */
175 gfc_has_alloc_comps (tree type
, tree decl
)
179 if (POINTER_TYPE_P (type
))
181 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
182 type
= TREE_TYPE (type
);
183 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
187 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
188 type
= gfc_get_element_type (type
);
190 if (TREE_CODE (type
) != RECORD_TYPE
)
193 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
195 ftype
= TREE_TYPE (field
);
196 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
198 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
199 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
201 if (gfc_has_alloc_comps (ftype
, field
))
207 /* Return true if DECL in private clause needs
208 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
210 gfc_omp_private_outer_ref (tree decl
)
212 tree type
= TREE_TYPE (decl
);
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (GFC_DESCRIPTOR_TYPE_P (type
)
218 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
221 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
224 if (gfc_has_alloc_comps (type
, decl
))
230 /* Callback for gfc_omp_unshare_expr. */
233 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
236 enum tree_code code
= TREE_CODE (t
);
238 /* Stop at types, decls, constants like copy_tree_r. */
239 if (TREE_CODE_CLASS (code
) == tcc_type
240 || TREE_CODE_CLASS (code
) == tcc_declaration
241 || TREE_CODE_CLASS (code
) == tcc_constant
244 else if (handled_component_p (t
)
245 || TREE_CODE (t
) == MEM_REF
)
247 *tp
= unshare_expr (t
);
254 /* Unshare in expr anything that the FE which normally doesn't
255 care much about tree sharing (because during gimplification
256 everything is unshared) could cause problems with tree sharing
257 at omp-low.c time. */
260 gfc_omp_unshare_expr (tree expr
)
262 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
266 enum walk_alloc_comps
268 WALK_ALLOC_COMPS_DTOR
,
269 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
270 WALK_ALLOC_COMPS_COPY_CTOR
273 /* Handle allocatable components in OpenMP clauses. */
276 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
277 enum walk_alloc_comps kind
)
279 stmtblock_t block
, tmpblock
;
280 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
281 gfc_init_block (&block
);
283 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
285 if (GFC_DESCRIPTOR_TYPE_P (type
))
287 gfc_init_block (&tmpblock
);
288 tem
= gfc_full_array_size (&tmpblock
, decl
,
289 GFC_TYPE_ARRAY_RANK (type
));
290 then_b
= gfc_finish_block (&tmpblock
);
291 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
292 tem
= gfc_omp_unshare_expr (tem
);
293 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
294 gfc_array_index_type
, tem
,
299 if (!TYPE_DOMAIN (type
)
300 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
301 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
302 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
304 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
305 TYPE_SIZE_UNIT (type
),
306 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
307 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
310 tem
= array_type_nelts (type
);
311 tem
= fold_convert (gfc_array_index_type
, tem
);
314 tree nelems
= gfc_evaluate_now (tem
, &block
);
315 tree index
= gfc_create_var (gfc_array_index_type
, "S");
317 gfc_init_block (&tmpblock
);
318 tem
= gfc_conv_array_data (decl
);
319 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
320 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
321 tree destvar
, destvref
= NULL_TREE
;
324 tem
= gfc_conv_array_data (dest
);
325 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
326 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
328 gfc_add_expr_to_block (&tmpblock
,
329 gfc_walk_alloc_comps (declvref
, destvref
,
333 gfc_init_loopinfo (&loop
);
335 loop
.from
[0] = gfc_index_zero_node
;
336 loop
.loopvar
[0] = index
;
338 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
339 gfc_add_block_to_block (&block
, &loop
.pre
);
340 return gfc_finish_block (&block
);
342 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
344 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
346 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
347 type
= TREE_TYPE (decl
);
350 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
351 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
353 tree ftype
= TREE_TYPE (field
);
354 tree declf
, destf
= NULL_TREE
;
355 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
356 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
357 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
358 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
361 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
362 decl
, field
, NULL_TREE
);
364 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
365 dest
, field
, NULL_TREE
);
370 case WALK_ALLOC_COMPS_DTOR
:
372 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
373 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
374 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
376 gfc_add_modify (&block
, unshare_expr (destf
),
377 unshare_expr (declf
));
378 tem
= gfc_duplicate_allocatable_nocopy
379 (destf
, declf
, ftype
,
380 GFC_TYPE_ARRAY_RANK (ftype
));
382 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
383 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
385 case WALK_ALLOC_COMPS_COPY_CTOR
:
386 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
387 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
388 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
389 GFC_TYPE_ARRAY_RANK (ftype
),
391 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
392 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
397 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
400 gfc_init_block (&tmpblock
);
401 gfc_add_expr_to_block (&tmpblock
,
402 gfc_walk_alloc_comps (declf
, destf
,
404 then_b
= gfc_finish_block (&tmpblock
);
405 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
406 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
407 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
408 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
409 tem
= unshare_expr (declf
);
414 tem
= fold_convert (pvoid_type_node
, tem
);
415 tem
= fold_build2_loc (input_location
, NE_EXPR
,
416 boolean_type_node
, tem
,
418 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
420 build_empty_stmt (input_location
));
422 gfc_add_expr_to_block (&block
, then_b
);
424 if (kind
== WALK_ALLOC_COMPS_DTOR
)
426 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
427 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
429 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
430 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
431 NULL_TREE
, NULL_TREE
, true,
433 GFC_CAF_COARRAY_NOCOARRAY
);
434 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
438 tem
= gfc_call_free (unshare_expr (declf
));
439 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
444 return gfc_finish_block (&block
);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
451 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
453 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
454 stmtblock_t block
, cond_block
;
456 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
462 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
465 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
468 gfc_start_block (&block
);
469 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
470 OMP_CLAUSE_DECL (clause
),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
472 gfc_add_expr_to_block (&block
, tem
);
473 return gfc_finish_block (&block
);
478 gcc_assert (outer
!= NULL_TREE
);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block
);
485 gfc_init_block (&cond_block
);
487 if (GFC_DESCRIPTOR_TYPE_P (type
))
489 gfc_add_modify (&cond_block
, decl
, outer
);
490 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
491 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
492 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
494 gfc_conv_descriptor_lbound_get (decl
, rank
));
495 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
496 size
, gfc_index_one_node
);
497 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
498 size
= fold_build2_loc (input_location
, MULT_EXPR
,
499 gfc_array_index_type
, size
,
500 gfc_conv_descriptor_stride_get (decl
, rank
));
501 tree esize
= fold_convert (gfc_array_index_type
,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
503 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
505 size
= unshare_expr (size
);
506 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
510 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
511 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
512 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
513 if (GFC_DESCRIPTOR_TYPE_P (type
))
514 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
516 gfc_add_modify (&cond_block
, unshare_expr (decl
),
517 fold_convert (TREE_TYPE (decl
), ptr
));
518 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
520 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
521 OMP_CLAUSE_DECL (clause
),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
523 gfc_add_expr_to_block (&cond_block
, tem
);
525 then_b
= gfc_finish_block (&cond_block
);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
530 gfc_init_block (&cond_block
);
531 if (GFC_DESCRIPTOR_TYPE_P (type
))
532 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
535 gfc_add_modify (&cond_block
, unshare_expr (decl
),
536 build_zero_cst (TREE_TYPE (decl
)));
537 else_b
= gfc_finish_block (&cond_block
);
539 tree tem
= fold_convert (pvoid_type_node
,
540 GFC_DESCRIPTOR_TYPE_P (type
)
541 ? gfc_conv_descriptor_data_get (outer
) : outer
);
542 tem
= unshare_expr (tem
);
543 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
544 tem
, null_pointer_node
);
545 gfc_add_expr_to_block (&block
,
546 build3_loc (input_location
, COND_EXPR
,
547 void_type_node
, cond
, then_b
,
551 gfc_add_expr_to_block (&block
, then_b
);
553 return gfc_finish_block (&block
);
556 /* Build and return code for a copy constructor from SRC to DEST. */
559 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
561 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
562 tree cond
, then_b
, else_b
;
563 stmtblock_t block
, cond_block
;
565 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
569 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
572 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
574 gfc_start_block (&block
);
575 gfc_add_modify (&block
, dest
, src
);
576 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
577 WALK_ALLOC_COMPS_COPY_CTOR
);
578 gfc_add_expr_to_block (&block
, tem
);
579 return gfc_finish_block (&block
);
582 return build2_v (MODIFY_EXPR
, dest
, src
);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block
);
589 gfc_init_block (&cond_block
);
591 gfc_add_modify (&cond_block
, dest
, src
);
592 if (GFC_DESCRIPTOR_TYPE_P (type
))
594 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
595 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
596 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
598 gfc_conv_descriptor_lbound_get (dest
, rank
));
599 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
600 size
, gfc_index_one_node
);
601 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
602 size
= fold_build2_loc (input_location
, MULT_EXPR
,
603 gfc_array_index_type
, size
,
604 gfc_conv_descriptor_stride_get (dest
, rank
));
605 tree esize
= fold_convert (gfc_array_index_type
,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
607 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
609 size
= unshare_expr (size
);
610 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
614 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
615 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
616 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
617 if (GFC_DESCRIPTOR_TYPE_P (type
))
618 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
620 gfc_add_modify (&cond_block
, unshare_expr (dest
),
621 fold_convert (TREE_TYPE (dest
), ptr
));
623 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
624 ? gfc_conv_descriptor_data_get (src
) : src
;
625 srcptr
= unshare_expr (srcptr
);
626 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
627 call
= build_call_expr_loc (input_location
,
628 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
630 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
631 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
633 tree tem
= gfc_walk_alloc_comps (src
, dest
,
634 OMP_CLAUSE_DECL (clause
),
635 WALK_ALLOC_COMPS_COPY_CTOR
);
636 gfc_add_expr_to_block (&cond_block
, tem
);
638 then_b
= gfc_finish_block (&cond_block
);
640 gfc_init_block (&cond_block
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
645 gfc_add_modify (&cond_block
, unshare_expr (dest
),
646 build_zero_cst (TREE_TYPE (dest
)));
647 else_b
= gfc_finish_block (&cond_block
);
649 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
650 unshare_expr (srcptr
), null_pointer_node
);
651 gfc_add_expr_to_block (&block
,
652 build3_loc (input_location
, COND_EXPR
,
653 void_type_node
, cond
, then_b
, else_b
));
655 return gfc_finish_block (&block
);
658 /* Similarly, except use an intrinsic or pointer assignment operator
662 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
664 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
665 tree cond
, then_b
, else_b
;
666 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
669 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
672 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
674 gfc_start_block (&block
);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
677 OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_DTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block
, dest
, src
);
682 /* Finally allocate any allocatable components and copy. */
683 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
684 WALK_ALLOC_COMPS_COPY_CTOR
);
685 gfc_add_expr_to_block (&block
, tem
);
686 return gfc_finish_block (&block
);
689 return build2_v (MODIFY_EXPR
, dest
, src
);
692 gfc_start_block (&block
);
694 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
696 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
697 WALK_ALLOC_COMPS_DTOR
);
698 tree tem
= fold_convert (pvoid_type_node
,
699 GFC_DESCRIPTOR_TYPE_P (type
)
700 ? gfc_conv_descriptor_data_get (dest
) : dest
);
701 tem
= unshare_expr (tem
);
702 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
703 tem
, null_pointer_node
);
704 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
705 then_b
, build_empty_stmt (input_location
));
706 gfc_add_expr_to_block (&block
, tem
);
709 gfc_init_block (&cond_block
);
711 if (GFC_DESCRIPTOR_TYPE_P (type
))
713 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
714 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
715 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
717 gfc_conv_descriptor_lbound_get (src
, rank
));
718 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
719 size
, gfc_index_one_node
);
720 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
721 size
= fold_build2_loc (input_location
, MULT_EXPR
,
722 gfc_array_index_type
, size
,
723 gfc_conv_descriptor_stride_get (src
, rank
));
724 tree esize
= fold_convert (gfc_array_index_type
,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
726 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
728 size
= unshare_expr (size
);
729 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
733 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
734 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
736 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
737 ? gfc_conv_descriptor_data_get (dest
) : dest
;
738 destptr
= unshare_expr (destptr
);
739 destptr
= fold_convert (pvoid_type_node
, destptr
);
740 gfc_add_modify (&cond_block
, ptr
, destptr
);
742 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
743 destptr
, null_pointer_node
);
745 if (GFC_DESCRIPTOR_TYPE_P (type
))
748 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
750 tree rank
= gfc_rank_cst
[i
];
751 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
752 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
753 gfc_array_index_type
, tem
,
754 gfc_conv_descriptor_lbound_get (src
, rank
));
755 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
756 gfc_array_index_type
, tem
,
757 gfc_conv_descriptor_lbound_get (dest
, rank
));
758 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
759 tem
, gfc_conv_descriptor_ubound_get (dest
,
761 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
762 boolean_type_node
, cond
, tem
);
766 gfc_init_block (&cond_block2
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
770 gfc_init_block (&inner_block
);
771 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
772 then_b
= gfc_finish_block (&inner_block
);
774 gfc_init_block (&inner_block
);
775 gfc_add_modify (&inner_block
, ptr
,
776 gfc_call_realloc (&inner_block
, ptr
, size
));
777 else_b
= gfc_finish_block (&inner_block
);
779 gfc_add_expr_to_block (&cond_block2
,
780 build3_loc (input_location
, COND_EXPR
,
782 unshare_expr (nonalloc
),
784 gfc_add_modify (&cond_block2
, dest
, src
);
785 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
789 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
790 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
791 fold_convert (type
, ptr
));
793 then_b
= gfc_finish_block (&cond_block2
);
794 else_b
= build_empty_stmt (input_location
);
796 gfc_add_expr_to_block (&cond_block
,
797 build3_loc (input_location
, COND_EXPR
,
798 void_type_node
, unshare_expr (cond
),
801 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
802 ? gfc_conv_descriptor_data_get (src
) : src
;
803 srcptr
= unshare_expr (srcptr
);
804 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
805 call
= build_call_expr_loc (input_location
,
806 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
808 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
809 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
811 tree tem
= gfc_walk_alloc_comps (src
, dest
,
812 OMP_CLAUSE_DECL (clause
),
813 WALK_ALLOC_COMPS_COPY_CTOR
);
814 gfc_add_expr_to_block (&cond_block
, tem
);
816 then_b
= gfc_finish_block (&cond_block
);
818 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
820 gfc_init_block (&cond_block
);
821 if (GFC_DESCRIPTOR_TYPE_P (type
))
823 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
824 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
825 NULL_TREE
, NULL_TREE
, true, NULL
,
826 GFC_CAF_COARRAY_NOCOARRAY
);
827 gfc_add_expr_to_block (&cond_block
, tmp
);
831 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
832 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
833 gfc_add_modify (&cond_block
, unshare_expr (dest
),
834 build_zero_cst (TREE_TYPE (dest
)));
836 else_b
= gfc_finish_block (&cond_block
);
838 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
839 unshare_expr (srcptr
), null_pointer_node
);
840 gfc_add_expr_to_block (&block
,
841 build3_loc (input_location
, COND_EXPR
,
842 void_type_node
, cond
,
846 gfc_add_expr_to_block (&block
, then_b
);
848 return gfc_finish_block (&block
);
852 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
853 tree add
, tree nelems
)
855 stmtblock_t tmpblock
;
856 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
857 nelems
= gfc_evaluate_now (nelems
, block
);
859 gfc_init_block (&tmpblock
);
860 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
862 desta
= gfc_build_array_ref (dest
, index
, NULL
);
863 srca
= gfc_build_array_ref (src
, index
, NULL
);
867 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
868 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
869 fold_convert (sizetype
, index
),
870 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
871 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
872 TREE_TYPE (dest
), dest
,
874 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
875 TREE_TYPE (src
), src
,
878 gfc_add_modify (&tmpblock
, desta
,
879 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
883 gfc_init_loopinfo (&loop
);
885 loop
.from
[0] = gfc_index_zero_node
;
886 loop
.loopvar
[0] = index
;
888 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
889 gfc_add_block_to_block (block
, &loop
.pre
);
892 /* Build and return code for a constructor of DEST that initializes
893 it to SRC plus ADD (ADD is scalar integer). */
896 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
898 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
901 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
903 gfc_start_block (&block
);
904 add
= gfc_evaluate_now (add
, &block
);
906 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
907 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
908 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
910 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
911 if (!TYPE_DOMAIN (type
)
912 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
913 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
914 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
916 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
917 TYPE_SIZE_UNIT (type
),
918 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
919 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
922 nelems
= array_type_nelts (type
);
923 nelems
= fold_convert (gfc_array_index_type
, nelems
);
925 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
926 return gfc_finish_block (&block
);
929 /* Allocatable arrays in LINEAR clauses need to be allocated
930 and copied from SRC. */
931 gfc_add_modify (&block
, dest
, src
);
932 if (GFC_DESCRIPTOR_TYPE_P (type
))
934 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
935 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
936 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
938 gfc_conv_descriptor_lbound_get (dest
, rank
));
939 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
940 size
, gfc_index_one_node
);
941 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
942 size
= fold_build2_loc (input_location
, MULT_EXPR
,
943 gfc_array_index_type
, size
,
944 gfc_conv_descriptor_stride_get (dest
, rank
));
945 tree esize
= fold_convert (gfc_array_index_type
,
946 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
947 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
948 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
949 nelems
, unshare_expr (esize
));
950 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
952 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
953 gfc_array_index_type
, nelems
,
957 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
958 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
959 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
960 if (GFC_DESCRIPTOR_TYPE_P (type
))
962 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
963 tree etype
= gfc_get_element_type (type
);
964 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
965 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
966 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
967 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
971 gfc_add_modify (&block
, unshare_expr (dest
),
972 fold_convert (TREE_TYPE (dest
), ptr
));
973 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
974 tree dstm
= build_fold_indirect_ref (ptr
);
975 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
976 gfc_add_modify (&block
, dstm
,
977 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
979 return gfc_finish_block (&block
);
982 /* Build and return code destructing DECL. Return NULL if nothing
986 gfc_omp_clause_dtor (tree clause
, tree decl
)
988 tree type
= TREE_TYPE (decl
), tem
;
990 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
991 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
992 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
994 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
996 OMP_CLAUSE_DECL (clause
),
997 WALK_ALLOC_COMPS_DTOR
);
1001 if (GFC_DESCRIPTOR_TYPE_P (type
))
1003 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1004 to be deallocated if they were allocated. */
1005 tem
= gfc_conv_descriptor_data_get (decl
);
1006 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1007 NULL_TREE
, true, NULL
,
1008 GFC_CAF_COARRAY_NOCOARRAY
);
1011 tem
= gfc_call_free (decl
);
1012 tem
= gfc_omp_unshare_expr (tem
);
1014 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1019 gfc_init_block (&block
);
1020 gfc_add_expr_to_block (&block
,
1021 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1022 OMP_CLAUSE_DECL (clause
),
1023 WALK_ALLOC_COMPS_DTOR
));
1024 gfc_add_expr_to_block (&block
, tem
);
1025 then_b
= gfc_finish_block (&block
);
1027 tem
= fold_convert (pvoid_type_node
,
1028 GFC_DESCRIPTOR_TYPE_P (type
)
1029 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1030 tem
= unshare_expr (tem
);
1031 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1032 tem
, null_pointer_node
);
1033 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1034 then_b
, build_empty_stmt (input_location
));
1041 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1043 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1046 tree decl
= OMP_CLAUSE_DECL (c
);
1048 /* Assumed-size arrays can't be mapped implicitly, they have to be
1049 mapped explicitly using array sections. */
1050 if (TREE_CODE (decl
) == PARM_DECL
1051 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1052 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1053 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1054 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1057 error_at (OMP_CLAUSE_LOCATION (c
),
1058 "implicit mapping of assumed size array %qD", decl
);
1062 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1063 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1065 if (!gfc_omp_privatize_by_reference (decl
)
1066 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1067 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1068 && !GFC_DECL_CRAY_POINTEE (decl
)
1069 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1071 tree orig_decl
= decl
;
1072 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1073 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1074 OMP_CLAUSE_DECL (c4
) = decl
;
1075 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1076 decl
= build_fold_indirect_ref (decl
);
1077 OMP_CLAUSE_DECL (c
) = decl
;
1078 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1079 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1080 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1081 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1083 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1084 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1085 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1086 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1087 decl
= build_fold_indirect_ref (decl
);
1088 OMP_CLAUSE_DECL (c
) = decl
;
1091 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1094 gfc_start_block (&block
);
1095 tree type
= TREE_TYPE (decl
);
1096 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1097 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1098 ptr
= build_fold_indirect_ref (ptr
);
1099 OMP_CLAUSE_DECL (c
) = ptr
;
1100 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1101 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1102 OMP_CLAUSE_DECL (c2
) = decl
;
1103 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1104 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1105 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1106 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1107 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1108 tree size
= create_tmp_var (gfc_array_index_type
);
1109 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1110 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1111 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1112 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1114 stmtblock_t cond_block
;
1115 tree tem
, then_b
, else_b
, zero
, cond
;
1117 gfc_init_block (&cond_block
);
1118 tem
= gfc_full_array_size (&cond_block
, decl
,
1119 GFC_TYPE_ARRAY_RANK (type
));
1120 gfc_add_modify (&cond_block
, size
, tem
);
1121 gfc_add_modify (&cond_block
, size
,
1122 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1124 then_b
= gfc_finish_block (&cond_block
);
1125 gfc_init_block (&cond_block
);
1126 zero
= build_int_cst (gfc_array_index_type
, 0);
1127 gfc_add_modify (&cond_block
, size
, zero
);
1128 else_b
= gfc_finish_block (&cond_block
);
1129 tem
= gfc_conv_descriptor_data_get (decl
);
1130 tem
= fold_convert (pvoid_type_node
, tem
);
1131 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1132 boolean_type_node
, tem
, null_pointer_node
);
1133 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1134 void_type_node
, cond
,
1139 gfc_add_modify (&block
, size
,
1140 gfc_full_array_size (&block
, decl
,
1141 GFC_TYPE_ARRAY_RANK (type
)));
1142 gfc_add_modify (&block
, size
,
1143 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1146 OMP_CLAUSE_SIZE (c
) = size
;
1147 tree stmt
= gfc_finish_block (&block
);
1148 gimplify_and_add (stmt
, pre_p
);
1151 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1153 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1154 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1157 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1158 OMP_CLAUSE_CHAIN (last
) = c2
;
1163 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1164 OMP_CLAUSE_CHAIN (last
) = c3
;
1169 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1170 OMP_CLAUSE_CHAIN (last
) = c4
;
1176 /* Return true if DECL is a scalar variable (for the purpose of
1177 implicit firstprivatization). */
1180 gfc_omp_scalar_p (tree decl
)
1182 tree type
= TREE_TYPE (decl
);
1183 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1184 type
= TREE_TYPE (type
);
1185 if (TREE_CODE (type
) == POINTER_TYPE
)
1187 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1188 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1189 type
= TREE_TYPE (type
);
1190 if (GFC_ARRAY_TYPE_P (type
)
1191 || GFC_CLASS_TYPE_P (type
))
1194 if (TYPE_STRING_FLAG (type
))
1196 if (INTEGRAL_TYPE_P (type
)
1197 || SCALAR_FLOAT_TYPE_P (type
)
1198 || COMPLEX_FLOAT_TYPE_P (type
))
1204 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1205 disregarded in OpenMP construct, because it is going to be
1206 remapped during OpenMP lowering. SHARED is true if DECL
1207 is going to be shared, false if it is going to be privatized. */
1210 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1212 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1213 && DECL_HAS_VALUE_EXPR_P (decl
))
1215 tree value
= DECL_VALUE_EXPR (decl
);
1217 if (TREE_CODE (value
) == COMPONENT_REF
1218 && VAR_P (TREE_OPERAND (value
, 0))
1219 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1221 /* If variable in COMMON or EQUIVALENCE is privatized, return
1222 true, as just that variable is supposed to be privatized,
1223 not the whole COMMON or whole EQUIVALENCE.
1224 For shared variables in COMMON or EQUIVALENCE, let them be
1225 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1226 from the same COMMON or EQUIVALENCE just one sharing of the
1227 whole COMMON or EQUIVALENCE is enough. */
1232 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1238 /* Return true if DECL that is shared iff SHARED is true should
1239 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1243 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1245 if (GFC_DECL_CRAY_POINTEE (decl
))
1248 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1249 && DECL_HAS_VALUE_EXPR_P (decl
))
1251 tree value
= DECL_VALUE_EXPR (decl
);
1253 if (TREE_CODE (value
) == COMPONENT_REF
1254 && VAR_P (TREE_OPERAND (value
, 0))
1255 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1262 /* Register language specific type size variables as potentially OpenMP
1263 firstprivate variables. */
1266 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1268 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1272 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1273 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1275 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1276 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1277 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1279 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1280 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1286 gfc_trans_add_clause (tree node
, tree tail
)
1288 OMP_CLAUSE_CHAIN (node
) = tail
;
1293 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1298 gfc_symbol
*proc_sym
;
1299 gfc_formal_arglist
*f
;
1301 gcc_assert (sym
->attr
.dummy
);
1302 proc_sym
= sym
->ns
->proc_name
;
1303 if (proc_sym
->attr
.entry_master
)
1305 if (gfc_return_by_reference (proc_sym
))
1308 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1311 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1317 return build_int_cst (integer_type_node
, cnt
);
1320 tree t
= gfc_get_symbol_decl (sym
);
1324 bool alternate_entry
;
1327 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1328 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1329 && sym
->result
== sym
;
1330 entry_master
= sym
->attr
.result
1331 && sym
->ns
->proc_name
->attr
.entry_master
1332 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1333 parent_decl
= current_function_decl
1334 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1336 if ((t
== parent_decl
&& return_value
)
1337 || (sym
->ns
&& sym
->ns
->proc_name
1338 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1339 && (alternate_entry
|| entry_master
)))
1344 /* Special case for assigning the return value of a function.
1345 Self recursive functions must have an explicit return value. */
1346 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1347 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1349 /* Similarly for alternate entry points. */
1350 else if (alternate_entry
1351 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1354 gfc_entry_list
*el
= NULL
;
1356 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1359 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1364 else if (entry_master
1365 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1367 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1373 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1374 gfc_omp_namelist
*namelist
, tree list
,
1377 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1378 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1380 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1381 if (t
!= error_mark_node
)
1383 tree node
= build_omp_clause (input_location
, code
);
1384 OMP_CLAUSE_DECL (node
) = t
;
1385 list
= gfc_trans_add_clause (node
, list
);
1391 struct omp_udr_find_orig_data
1393 gfc_omp_udr
*omp_udr
;
1398 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1401 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1402 if ((*e
)->expr_type
== EXPR_VARIABLE
1403 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1404 cd
->omp_orig_seen
= true;
1410 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1412 gfc_symbol
*sym
= n
->sym
;
1413 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1414 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1415 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1416 gfc_symbol omp_var_copy
[4];
1417 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1419 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1420 locus old_loc
= gfc_current_locus
;
1423 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1425 decl
= OMP_CLAUSE_DECL (c
);
1426 gfc_current_locus
= where
;
1427 type
= TREE_TYPE (decl
);
1428 outer_decl
= create_tmp_var_raw (type
);
1429 if (TREE_CODE (decl
) == PARM_DECL
1430 && TREE_CODE (type
) == REFERENCE_TYPE
1431 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1432 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1434 decl
= build_fold_indirect_ref (decl
);
1435 type
= TREE_TYPE (type
);
1438 /* Create a fake symbol for init value. */
1439 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1440 init_val_sym
.ns
= sym
->ns
;
1441 init_val_sym
.name
= sym
->name
;
1442 init_val_sym
.ts
= sym
->ts
;
1443 init_val_sym
.attr
.referenced
= 1;
1444 init_val_sym
.declared_at
= where
;
1445 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1446 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1447 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1448 else if (udr
->initializer_ns
)
1449 backend_decl
= NULL
;
1451 switch (sym
->ts
.type
)
1457 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1460 backend_decl
= NULL_TREE
;
1463 init_val_sym
.backend_decl
= backend_decl
;
1465 /* Create a fake symbol for the outer array reference. */
1468 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1469 outer_sym
.attr
.dummy
= 0;
1470 outer_sym
.attr
.result
= 0;
1471 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1472 outer_sym
.backend_decl
= outer_decl
;
1473 if (decl
!= OMP_CLAUSE_DECL (c
))
1474 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1476 /* Create fake symtrees for it. */
1477 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1478 symtree1
->n
.sym
= sym
;
1479 gcc_assert (symtree1
== root1
);
1481 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1482 symtree2
->n
.sym
= &init_val_sym
;
1483 gcc_assert (symtree2
== root2
);
1485 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1486 symtree3
->n
.sym
= &outer_sym
;
1487 gcc_assert (symtree3
== root3
);
1489 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1492 omp_var_copy
[0] = *udr
->omp_out
;
1493 omp_var_copy
[1] = *udr
->omp_in
;
1494 *udr
->omp_out
= outer_sym
;
1495 *udr
->omp_in
= *sym
;
1496 if (udr
->initializer_ns
)
1498 omp_var_copy
[2] = *udr
->omp_priv
;
1499 omp_var_copy
[3] = *udr
->omp_orig
;
1500 *udr
->omp_priv
= *sym
;
1501 *udr
->omp_orig
= outer_sym
;
1505 /* Create expressions. */
1506 e1
= gfc_get_expr ();
1507 e1
->expr_type
= EXPR_VARIABLE
;
1509 e1
->symtree
= symtree1
;
1511 if (sym
->attr
.dimension
)
1513 e1
->ref
= ref
= gfc_get_ref ();
1514 ref
->type
= REF_ARRAY
;
1515 ref
->u
.ar
.where
= where
;
1516 ref
->u
.ar
.as
= sym
->as
;
1517 ref
->u
.ar
.type
= AR_FULL
;
1518 ref
->u
.ar
.dimen
= 0;
1520 t
= gfc_resolve_expr (e1
);
1524 if (backend_decl
!= NULL_TREE
)
1526 e2
= gfc_get_expr ();
1527 e2
->expr_type
= EXPR_VARIABLE
;
1529 e2
->symtree
= symtree2
;
1531 t
= gfc_resolve_expr (e2
);
1534 else if (udr
->initializer_ns
== NULL
)
1536 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1537 e2
= gfc_default_initializer (&sym
->ts
);
1539 t
= gfc_resolve_expr (e2
);
1542 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1544 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1545 t
= gfc_resolve_expr (e2
);
1548 if (udr
&& udr
->initializer_ns
)
1550 struct omp_udr_find_orig_data cd
;
1552 cd
.omp_orig_seen
= false;
1553 gfc_code_walker (&n
->udr
->initializer
,
1554 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1555 if (cd
.omp_orig_seen
)
1556 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1559 e3
= gfc_copy_expr (e1
);
1560 e3
->symtree
= symtree3
;
1561 t
= gfc_resolve_expr (e3
);
1566 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1570 e4
= gfc_add (e3
, e1
);
1573 e4
= gfc_multiply (e3
, e1
);
1575 case TRUTH_ANDIF_EXPR
:
1576 e4
= gfc_and (e3
, e1
);
1578 case TRUTH_ORIF_EXPR
:
1579 e4
= gfc_or (e3
, e1
);
1582 e4
= gfc_eqv (e3
, e1
);
1585 e4
= gfc_neqv (e3
, e1
);
1603 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1606 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1607 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1608 t
= gfc_resolve_expr (e3
);
1610 t
= gfc_resolve_expr (e4
);
1619 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1620 intrinsic_sym
.ns
= sym
->ns
;
1621 intrinsic_sym
.name
= iname
;
1622 intrinsic_sym
.ts
= sym
->ts
;
1623 intrinsic_sym
.attr
.referenced
= 1;
1624 intrinsic_sym
.attr
.intrinsic
= 1;
1625 intrinsic_sym
.attr
.function
= 1;
1626 intrinsic_sym
.result
= &intrinsic_sym
;
1627 intrinsic_sym
.declared_at
= where
;
1629 symtree4
= gfc_new_symtree (&root4
, iname
);
1630 symtree4
->n
.sym
= &intrinsic_sym
;
1631 gcc_assert (symtree4
== root4
);
1633 e4
= gfc_get_expr ();
1634 e4
->expr_type
= EXPR_FUNCTION
;
1636 e4
->symtree
= symtree4
;
1637 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1638 e4
->value
.function
.actual
->expr
= e3
;
1639 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1640 e4
->value
.function
.actual
->next
->expr
= e1
;
1642 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1644 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1645 e1
= gfc_copy_expr (e1
);
1646 e3
= gfc_copy_expr (e3
);
1647 t
= gfc_resolve_expr (e4
);
1651 /* Create the init statement list. */
1654 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1656 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1657 NULL_TREE
, NULL_TREE
, false);
1658 if (TREE_CODE (stmt
) != BIND_EXPR
)
1659 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1662 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1664 /* Create the merge statement list. */
1667 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1669 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1670 NULL_TREE
, NULL_TREE
, false);
1671 if (TREE_CODE (stmt
) != BIND_EXPR
)
1672 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1675 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1677 /* And stick the placeholder VAR_DECL into the clause as well. */
1678 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1680 gfc_current_locus
= old_loc
;
1693 gfc_free_array_spec (outer_sym
.as
);
1697 *udr
->omp_out
= omp_var_copy
[0];
1698 *udr
->omp_in
= omp_var_copy
[1];
1699 if (udr
->initializer_ns
)
1701 *udr
->omp_priv
= omp_var_copy
[2];
1702 *udr
->omp_orig
= omp_var_copy
[3];
1708 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1709 locus where
, bool mark_addressable
)
1711 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1712 if (namelist
->sym
->attr
.referenced
)
1714 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1715 if (t
!= error_mark_node
)
1717 tree node
= build_omp_clause (where
.lb
->location
,
1718 OMP_CLAUSE_REDUCTION
);
1719 OMP_CLAUSE_DECL (node
) = t
;
1720 if (mark_addressable
)
1721 TREE_ADDRESSABLE (t
) = 1;
1722 switch (namelist
->u
.reduction_op
)
1724 case OMP_REDUCTION_PLUS
:
1725 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1727 case OMP_REDUCTION_MINUS
:
1728 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1730 case OMP_REDUCTION_TIMES
:
1731 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1733 case OMP_REDUCTION_AND
:
1734 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1736 case OMP_REDUCTION_OR
:
1737 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1739 case OMP_REDUCTION_EQV
:
1740 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1742 case OMP_REDUCTION_NEQV
:
1743 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1745 case OMP_REDUCTION_MAX
:
1746 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1748 case OMP_REDUCTION_MIN
:
1749 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1751 case OMP_REDUCTION_IAND
:
1752 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1754 case OMP_REDUCTION_IOR
:
1755 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1757 case OMP_REDUCTION_IEOR
:
1758 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1760 case OMP_REDUCTION_USER
:
1761 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1766 if (namelist
->sym
->attr
.dimension
1767 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1768 || namelist
->sym
->attr
.allocatable
)
1769 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1770 list
= gfc_trans_add_clause (node
, list
);
1777 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1782 gfc_init_se (&se
, NULL
);
1783 gfc_conv_expr (&se
, expr
);
1784 gfc_add_block_to_block (block
, &se
.pre
);
1785 result
= gfc_evaluate_now (se
.expr
, block
);
1786 gfc_add_block_to_block (block
, &se
.post
);
1791 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
1794 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1795 locus where
, bool declare_simd
= false)
1797 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1799 enum omp_clause_code clause_code
;
1802 if (clauses
== NULL
)
1805 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1807 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1813 case OMP_LIST_REDUCTION
:
1814 /* An OpenACC async clause indicates the need to set reduction
1815 arguments addressable, to allow asynchronous copy-out. */
1816 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1819 case OMP_LIST_PRIVATE
:
1820 clause_code
= OMP_CLAUSE_PRIVATE
;
1822 case OMP_LIST_SHARED
:
1823 clause_code
= OMP_CLAUSE_SHARED
;
1825 case OMP_LIST_FIRSTPRIVATE
:
1826 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1828 case OMP_LIST_LASTPRIVATE
:
1829 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1831 case OMP_LIST_COPYIN
:
1832 clause_code
= OMP_CLAUSE_COPYIN
;
1834 case OMP_LIST_COPYPRIVATE
:
1835 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1837 case OMP_LIST_UNIFORM
:
1838 clause_code
= OMP_CLAUSE_UNIFORM
;
1840 case OMP_LIST_USE_DEVICE
:
1841 case OMP_LIST_USE_DEVICE_PTR
:
1842 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1844 case OMP_LIST_IS_DEVICE_PTR
:
1845 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
1850 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1853 case OMP_LIST_ALIGNED
:
1854 for (; n
!= NULL
; n
= n
->next
)
1855 if (n
->sym
->attr
.referenced
|| declare_simd
)
1857 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1858 if (t
!= error_mark_node
)
1860 tree node
= build_omp_clause (input_location
,
1861 OMP_CLAUSE_ALIGNED
);
1862 OMP_CLAUSE_DECL (node
) = t
;
1868 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1871 gfc_init_se (&se
, NULL
);
1872 gfc_conv_expr (&se
, n
->expr
);
1873 gfc_add_block_to_block (block
, &se
.pre
);
1874 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1875 gfc_add_block_to_block (block
, &se
.post
);
1877 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1879 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1883 case OMP_LIST_LINEAR
:
1885 gfc_expr
*last_step_expr
= NULL
;
1886 tree last_step
= NULL_TREE
;
1887 bool last_step_parm
= false;
1889 for (; n
!= NULL
; n
= n
->next
)
1893 last_step_expr
= n
->expr
;
1894 last_step
= NULL_TREE
;
1895 last_step_parm
= false;
1897 if (n
->sym
->attr
.referenced
|| declare_simd
)
1899 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1900 if (t
!= error_mark_node
)
1902 tree node
= build_omp_clause (input_location
,
1904 OMP_CLAUSE_DECL (node
) = t
;
1905 omp_clause_linear_kind kind
;
1906 switch (n
->u
.linear_op
)
1908 case OMP_LINEAR_DEFAULT
:
1909 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
1911 case OMP_LINEAR_REF
:
1912 kind
= OMP_CLAUSE_LINEAR_REF
;
1914 case OMP_LINEAR_VAL
:
1915 kind
= OMP_CLAUSE_LINEAR_VAL
;
1917 case OMP_LINEAR_UVAL
:
1918 kind
= OMP_CLAUSE_LINEAR_UVAL
;
1923 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
1924 if (last_step_expr
&& last_step
== NULL_TREE
)
1928 gfc_init_se (&se
, NULL
);
1929 gfc_conv_expr (&se
, last_step_expr
);
1930 gfc_add_block_to_block (block
, &se
.pre
);
1931 last_step
= gfc_evaluate_now (se
.expr
, block
);
1932 gfc_add_block_to_block (block
, &se
.post
);
1934 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
1936 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
1937 last_step
= gfc_trans_omp_variable (s
, true);
1938 last_step_parm
= true;
1942 = gfc_conv_constant_to_tree (last_step_expr
);
1946 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
1947 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
1951 tree type
= gfc_typenode_for_spec (&n
->sym
->ts
);
1952 OMP_CLAUSE_LINEAR_STEP (node
)
1953 = fold_convert (type
, last_step
);
1955 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1956 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1957 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1963 case OMP_LIST_DEPEND
:
1964 for (; n
!= NULL
; n
= n
->next
)
1966 if (n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
1968 tree vec
= NULL_TREE
;
1972 tree addend
= integer_zero_node
, t
;
1976 addend
= gfc_conv_constant_to_tree (n
->expr
);
1977 if (TREE_CODE (addend
) == INTEGER_CST
1978 && tree_int_cst_sgn (addend
) == -1)
1981 addend
= const_unop (NEGATE_EXPR
,
1982 TREE_TYPE (addend
), addend
);
1985 t
= gfc_trans_omp_variable (n
->sym
, false);
1986 if (t
!= error_mark_node
)
1988 if (i
< vec_safe_length (doacross_steps
)
1989 && !integer_zerop (addend
)
1990 && (*doacross_steps
)[i
])
1992 tree step
= (*doacross_steps
)[i
];
1993 addend
= fold_convert (TREE_TYPE (step
), addend
);
1994 addend
= build2 (TRUNC_DIV_EXPR
,
1995 TREE_TYPE (step
), addend
, step
);
1997 vec
= tree_cons (addend
, t
, vec
);
1999 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2002 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2006 if (vec
== NULL_TREE
)
2009 tree node
= build_omp_clause (input_location
,
2011 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2012 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2013 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2017 if (!n
->sym
->attr
.referenced
)
2020 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
2021 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2023 tree decl
= gfc_get_symbol_decl (n
->sym
);
2024 if (gfc_omp_privatize_by_reference (decl
))
2025 decl
= build_fold_indirect_ref (decl
);
2026 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2028 decl
= gfc_conv_descriptor_data_get (decl
);
2029 decl
= fold_convert (build_pointer_type (char_type_node
),
2031 decl
= build_fold_indirect_ref (decl
);
2033 else if (DECL_P (decl
))
2034 TREE_ADDRESSABLE (decl
) = 1;
2035 OMP_CLAUSE_DECL (node
) = decl
;
2040 gfc_init_se (&se
, NULL
);
2041 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2043 gfc_conv_expr_reference (&se
, n
->expr
);
2048 gfc_conv_expr_descriptor (&se
, n
->expr
);
2049 ptr
= gfc_conv_array_data (se
.expr
);
2051 gfc_add_block_to_block (block
, &se
.pre
);
2052 gfc_add_block_to_block (block
, &se
.post
);
2053 ptr
= fold_convert (build_pointer_type (char_type_node
),
2055 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2057 switch (n
->u
.depend_op
)
2060 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2062 case OMP_DEPEND_OUT
:
2063 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2065 case OMP_DEPEND_INOUT
:
2066 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2071 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2075 for (; n
!= NULL
; n
= n
->next
)
2077 if (!n
->sym
->attr
.referenced
)
2080 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2081 tree node2
= NULL_TREE
;
2082 tree node3
= NULL_TREE
;
2083 tree node4
= NULL_TREE
;
2084 tree decl
= gfc_get_symbol_decl (n
->sym
);
2086 TREE_ADDRESSABLE (decl
) = 1;
2087 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2089 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2090 && (gfc_omp_privatize_by_reference (decl
)
2091 || GFC_DECL_GET_SCALAR_POINTER (decl
)
2092 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
2093 || GFC_DECL_CRAY_POINTEE (decl
)
2094 || GFC_DESCRIPTOR_TYPE_P
2095 (TREE_TYPE (TREE_TYPE (decl
)))))
2097 tree orig_decl
= decl
;
2098 node4
= build_omp_clause (input_location
,
2100 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2101 OMP_CLAUSE_DECL (node4
) = decl
;
2102 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2103 decl
= build_fold_indirect_ref (decl
);
2104 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
2105 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
2106 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
2108 node3
= build_omp_clause (input_location
,
2110 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2111 OMP_CLAUSE_DECL (node3
) = decl
;
2112 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2113 decl
= build_fold_indirect_ref (decl
);
2116 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2118 tree type
= TREE_TYPE (decl
);
2119 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2120 ptr
= fold_convert (build_pointer_type (char_type_node
),
2122 ptr
= build_fold_indirect_ref (ptr
);
2123 OMP_CLAUSE_DECL (node
) = ptr
;
2124 node2
= build_omp_clause (input_location
,
2126 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2127 OMP_CLAUSE_DECL (node2
) = decl
;
2128 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2129 node3
= build_omp_clause (input_location
,
2131 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2132 OMP_CLAUSE_DECL (node3
)
2133 = gfc_conv_descriptor_data_get (decl
);
2134 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2136 /* We have to check for n->sym->attr.dimension because
2137 of scalar coarrays. */
2138 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
2140 stmtblock_t cond_block
;
2142 = gfc_create_var (gfc_array_index_type
, NULL
);
2143 tree tem
, then_b
, else_b
, zero
, cond
;
2145 gfc_init_block (&cond_block
);
2147 = gfc_full_array_size (&cond_block
, decl
,
2148 GFC_TYPE_ARRAY_RANK (type
));
2149 gfc_add_modify (&cond_block
, size
, tem
);
2150 then_b
= gfc_finish_block (&cond_block
);
2151 gfc_init_block (&cond_block
);
2152 zero
= build_int_cst (gfc_array_index_type
, 0);
2153 gfc_add_modify (&cond_block
, size
, zero
);
2154 else_b
= gfc_finish_block (&cond_block
);
2155 tem
= gfc_conv_descriptor_data_get (decl
);
2156 tem
= fold_convert (pvoid_type_node
, tem
);
2157 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2159 tem
, null_pointer_node
);
2160 gfc_add_expr_to_block (block
,
2161 build3_loc (input_location
,
2166 OMP_CLAUSE_SIZE (node
) = size
;
2168 else if (n
->sym
->attr
.dimension
)
2169 OMP_CLAUSE_SIZE (node
)
2170 = gfc_full_array_size (block
, decl
,
2171 GFC_TYPE_ARRAY_RANK (type
));
2172 if (n
->sym
->attr
.dimension
)
2175 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2176 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2177 OMP_CLAUSE_SIZE (node
)
2178 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2179 OMP_CLAUSE_SIZE (node
), elemsz
);
2183 OMP_CLAUSE_DECL (node
) = decl
;
2188 gfc_init_se (&se
, NULL
);
2189 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2191 gfc_conv_expr_reference (&se
, n
->expr
);
2192 gfc_add_block_to_block (block
, &se
.pre
);
2194 OMP_CLAUSE_SIZE (node
)
2195 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2199 gfc_conv_expr_descriptor (&se
, n
->expr
);
2200 ptr
= gfc_conv_array_data (se
.expr
);
2201 tree type
= TREE_TYPE (se
.expr
);
2202 gfc_add_block_to_block (block
, &se
.pre
);
2203 OMP_CLAUSE_SIZE (node
)
2204 = gfc_full_array_size (block
, se
.expr
,
2205 GFC_TYPE_ARRAY_RANK (type
));
2207 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2208 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2209 OMP_CLAUSE_SIZE (node
)
2210 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2211 OMP_CLAUSE_SIZE (node
), elemsz
);
2213 gfc_add_block_to_block (block
, &se
.post
);
2214 ptr
= fold_convert (build_pointer_type (char_type_node
),
2216 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2218 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2219 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2221 node4
= build_omp_clause (input_location
,
2223 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2224 OMP_CLAUSE_DECL (node4
) = decl
;
2225 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2226 decl
= build_fold_indirect_ref (decl
);
2228 ptr
= fold_convert (sizetype
, ptr
);
2229 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2231 tree type
= TREE_TYPE (decl
);
2232 ptr2
= gfc_conv_descriptor_data_get (decl
);
2233 node2
= build_omp_clause (input_location
,
2235 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2236 OMP_CLAUSE_DECL (node2
) = decl
;
2237 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2238 node3
= build_omp_clause (input_location
,
2240 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2241 OMP_CLAUSE_DECL (node3
)
2242 = gfc_conv_descriptor_data_get (decl
);
2246 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2247 ptr2
= build_fold_addr_expr (decl
);
2250 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2253 node3
= build_omp_clause (input_location
,
2255 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2256 OMP_CLAUSE_DECL (node3
) = decl
;
2258 ptr2
= fold_convert (sizetype
, ptr2
);
2259 OMP_CLAUSE_SIZE (node3
)
2260 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2262 switch (n
->u
.map_op
)
2265 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2268 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2271 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2273 case OMP_MAP_TOFROM
:
2274 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2276 case OMP_MAP_ALWAYS_TO
:
2277 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2279 case OMP_MAP_ALWAYS_FROM
:
2280 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2282 case OMP_MAP_ALWAYS_TOFROM
:
2283 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2285 case OMP_MAP_RELEASE
:
2286 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2288 case OMP_MAP_DELETE
:
2289 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2291 case OMP_MAP_FORCE_ALLOC
:
2292 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2294 case OMP_MAP_FORCE_TO
:
2295 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2297 case OMP_MAP_FORCE_FROM
:
2298 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2300 case OMP_MAP_FORCE_TOFROM
:
2301 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2303 case OMP_MAP_FORCE_PRESENT
:
2304 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2306 case OMP_MAP_FORCE_DEVICEPTR
:
2307 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2312 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2314 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2316 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2318 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2323 case OMP_LIST_CACHE
:
2324 for (; n
!= NULL
; n
= n
->next
)
2326 if (!n
->sym
->attr
.referenced
)
2332 clause_code
= OMP_CLAUSE_TO
;
2335 clause_code
= OMP_CLAUSE_FROM
;
2337 case OMP_LIST_CACHE
:
2338 clause_code
= OMP_CLAUSE__CACHE_
;
2343 tree node
= build_omp_clause (input_location
, clause_code
);
2344 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2346 tree decl
= gfc_get_symbol_decl (n
->sym
);
2347 if (gfc_omp_privatize_by_reference (decl
))
2348 decl
= build_fold_indirect_ref (decl
);
2349 else if (DECL_P (decl
))
2350 TREE_ADDRESSABLE (decl
) = 1;
2351 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2353 tree type
= TREE_TYPE (decl
);
2354 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2355 ptr
= fold_convert (build_pointer_type (char_type_node
),
2357 ptr
= build_fold_indirect_ref (ptr
);
2358 OMP_CLAUSE_DECL (node
) = ptr
;
2359 OMP_CLAUSE_SIZE (node
)
2360 = gfc_full_array_size (block
, decl
,
2361 GFC_TYPE_ARRAY_RANK (type
));
2363 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2364 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2365 OMP_CLAUSE_SIZE (node
)
2366 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2367 OMP_CLAUSE_SIZE (node
), elemsz
);
2370 OMP_CLAUSE_DECL (node
) = decl
;
2375 gfc_init_se (&se
, NULL
);
2376 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2378 gfc_conv_expr_reference (&se
, n
->expr
);
2380 gfc_add_block_to_block (block
, &se
.pre
);
2381 OMP_CLAUSE_SIZE (node
)
2382 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2386 gfc_conv_expr_descriptor (&se
, n
->expr
);
2387 ptr
= gfc_conv_array_data (se
.expr
);
2388 tree type
= TREE_TYPE (se
.expr
);
2389 gfc_add_block_to_block (block
, &se
.pre
);
2390 OMP_CLAUSE_SIZE (node
)
2391 = gfc_full_array_size (block
, se
.expr
,
2392 GFC_TYPE_ARRAY_RANK (type
));
2394 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2395 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2396 OMP_CLAUSE_SIZE (node
)
2397 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2398 OMP_CLAUSE_SIZE (node
), elemsz
);
2400 gfc_add_block_to_block (block
, &se
.post
);
2401 ptr
= fold_convert (build_pointer_type (char_type_node
),
2403 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2405 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2413 if (clauses
->if_expr
)
2417 gfc_init_se (&se
, NULL
);
2418 gfc_conv_expr (&se
, clauses
->if_expr
);
2419 gfc_add_block_to_block (block
, &se
.pre
);
2420 if_var
= gfc_evaluate_now (se
.expr
, block
);
2421 gfc_add_block_to_block (block
, &se
.post
);
2423 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2424 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2425 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2426 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2428 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
2429 if (clauses
->if_exprs
[ifc
])
2433 gfc_init_se (&se
, NULL
);
2434 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
2435 gfc_add_block_to_block (block
, &se
.pre
);
2436 if_var
= gfc_evaluate_now (se
.expr
, block
);
2437 gfc_add_block_to_block (block
, &se
.post
);
2439 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2442 case OMP_IF_PARALLEL
:
2443 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
2446 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
2448 case OMP_IF_TASKLOOP
:
2449 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
2452 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
2454 case OMP_IF_TARGET_DATA
:
2455 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
2457 case OMP_IF_TARGET_UPDATE
:
2458 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
2460 case OMP_IF_TARGET_ENTER_DATA
:
2461 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
2463 case OMP_IF_TARGET_EXIT_DATA
:
2464 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
2469 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2470 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2473 if (clauses
->final_expr
)
2477 gfc_init_se (&se
, NULL
);
2478 gfc_conv_expr (&se
, clauses
->final_expr
);
2479 gfc_add_block_to_block (block
, &se
.pre
);
2480 final_var
= gfc_evaluate_now (se
.expr
, block
);
2481 gfc_add_block_to_block (block
, &se
.post
);
2483 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2484 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2485 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2488 if (clauses
->num_threads
)
2492 gfc_init_se (&se
, NULL
);
2493 gfc_conv_expr (&se
, clauses
->num_threads
);
2494 gfc_add_block_to_block (block
, &se
.pre
);
2495 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2496 gfc_add_block_to_block (block
, &se
.post
);
2498 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2499 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2500 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2503 chunk_size
= NULL_TREE
;
2504 if (clauses
->chunk_size
)
2506 gfc_init_se (&se
, NULL
);
2507 gfc_conv_expr (&se
, clauses
->chunk_size
);
2508 gfc_add_block_to_block (block
, &se
.pre
);
2509 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2510 gfc_add_block_to_block (block
, &se
.post
);
2513 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2515 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2516 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2517 switch (clauses
->sched_kind
)
2519 case OMP_SCHED_STATIC
:
2520 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2522 case OMP_SCHED_DYNAMIC
:
2523 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2525 case OMP_SCHED_GUIDED
:
2526 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2528 case OMP_SCHED_RUNTIME
:
2529 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2531 case OMP_SCHED_AUTO
:
2532 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2537 if (clauses
->sched_monotonic
)
2538 OMP_CLAUSE_SCHEDULE_KIND (c
)
2539 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2540 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
2541 else if (clauses
->sched_nonmonotonic
)
2542 OMP_CLAUSE_SCHEDULE_KIND (c
)
2543 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2544 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
2545 if (clauses
->sched_simd
)
2546 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
2547 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2550 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2552 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2553 switch (clauses
->default_sharing
)
2555 case OMP_DEFAULT_NONE
:
2556 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2558 case OMP_DEFAULT_SHARED
:
2559 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2561 case OMP_DEFAULT_PRIVATE
:
2562 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2564 case OMP_DEFAULT_FIRSTPRIVATE
:
2565 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2567 case OMP_DEFAULT_PRESENT
:
2568 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
2573 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2576 if (clauses
->nowait
)
2578 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2579 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2582 if (clauses
->ordered
)
2584 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2585 OMP_CLAUSE_ORDERED_EXPR (c
)
2586 = clauses
->orderedc
? build_int_cst (integer_type_node
,
2587 clauses
->orderedc
) : NULL_TREE
;
2588 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2591 if (clauses
->untied
)
2593 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2594 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2597 if (clauses
->mergeable
)
2599 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2600 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2603 if (clauses
->collapse
)
2605 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2606 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2607 = build_int_cst (integer_type_node
, clauses
->collapse
);
2608 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2611 if (clauses
->inbranch
)
2613 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2614 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2617 if (clauses
->notinbranch
)
2619 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2620 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2623 switch (clauses
->cancel
)
2625 case OMP_CANCEL_UNKNOWN
:
2627 case OMP_CANCEL_PARALLEL
:
2628 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2629 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2631 case OMP_CANCEL_SECTIONS
:
2632 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2633 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2636 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2637 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2639 case OMP_CANCEL_TASKGROUP
:
2640 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2641 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2645 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2647 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2648 switch (clauses
->proc_bind
)
2650 case OMP_PROC_BIND_MASTER
:
2651 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2653 case OMP_PROC_BIND_SPREAD
:
2654 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2656 case OMP_PROC_BIND_CLOSE
:
2657 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2662 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2665 if (clauses
->safelen_expr
)
2669 gfc_init_se (&se
, NULL
);
2670 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2671 gfc_add_block_to_block (block
, &se
.pre
);
2672 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2673 gfc_add_block_to_block (block
, &se
.post
);
2675 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2676 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2677 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2680 if (clauses
->simdlen_expr
)
2684 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2685 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2686 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2687 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2693 gfc_init_se (&se
, NULL
);
2694 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
2695 gfc_add_block_to_block (block
, &se
.pre
);
2696 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
2697 gfc_add_block_to_block (block
, &se
.post
);
2699 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2700 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
2701 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2705 if (clauses
->num_teams
)
2709 gfc_init_se (&se
, NULL
);
2710 gfc_conv_expr (&se
, clauses
->num_teams
);
2711 gfc_add_block_to_block (block
, &se
.pre
);
2712 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2713 gfc_add_block_to_block (block
, &se
.post
);
2715 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2716 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2717 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2720 if (clauses
->device
)
2724 gfc_init_se (&se
, NULL
);
2725 gfc_conv_expr (&se
, clauses
->device
);
2726 gfc_add_block_to_block (block
, &se
.pre
);
2727 device
= gfc_evaluate_now (se
.expr
, block
);
2728 gfc_add_block_to_block (block
, &se
.post
);
2730 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2731 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2732 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2735 if (clauses
->thread_limit
)
2739 gfc_init_se (&se
, NULL
);
2740 gfc_conv_expr (&se
, clauses
->thread_limit
);
2741 gfc_add_block_to_block (block
, &se
.pre
);
2742 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2743 gfc_add_block_to_block (block
, &se
.post
);
2745 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2746 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2747 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2750 chunk_size
= NULL_TREE
;
2751 if (clauses
->dist_chunk_size
)
2753 gfc_init_se (&se
, NULL
);
2754 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2755 gfc_add_block_to_block (block
, &se
.pre
);
2756 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2757 gfc_add_block_to_block (block
, &se
.post
);
2760 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2762 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2763 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2764 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2767 if (clauses
->grainsize
)
2771 gfc_init_se (&se
, NULL
);
2772 gfc_conv_expr (&se
, clauses
->grainsize
);
2773 gfc_add_block_to_block (block
, &se
.pre
);
2774 grainsize
= gfc_evaluate_now (se
.expr
, block
);
2775 gfc_add_block_to_block (block
, &se
.post
);
2777 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GRAINSIZE
);
2778 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
2779 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2782 if (clauses
->num_tasks
)
2786 gfc_init_se (&se
, NULL
);
2787 gfc_conv_expr (&se
, clauses
->num_tasks
);
2788 gfc_add_block_to_block (block
, &se
.pre
);
2789 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
2790 gfc_add_block_to_block (block
, &se
.post
);
2792 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TASKS
);
2793 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
2794 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2797 if (clauses
->priority
)
2801 gfc_init_se (&se
, NULL
);
2802 gfc_conv_expr (&se
, clauses
->priority
);
2803 gfc_add_block_to_block (block
, &se
.pre
);
2804 priority
= gfc_evaluate_now (se
.expr
, block
);
2805 gfc_add_block_to_block (block
, &se
.post
);
2807 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PRIORITY
);
2808 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
2809 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2816 gfc_init_se (&se
, NULL
);
2817 gfc_conv_expr (&se
, clauses
->hint
);
2818 gfc_add_block_to_block (block
, &se
.pre
);
2819 hint
= gfc_evaluate_now (se
.expr
, block
);
2820 gfc_add_block_to_block (block
, &se
.post
);
2822 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_HINT
);
2823 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
2824 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2829 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMD
);
2830 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2832 if (clauses
->threads
)
2834 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREADS
);
2835 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2837 if (clauses
->nogroup
)
2839 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOGROUP
);
2840 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2842 if (clauses
->defaultmap
)
2844 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULTMAP
);
2845 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2847 if (clauses
->depend_source
)
2849 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEPEND
);
2850 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
2851 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2856 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2857 if (clauses
->async_expr
)
2858 OMP_CLAUSE_ASYNC_EXPR (c
)
2859 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2861 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2862 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2866 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2867 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2869 if (clauses
->par_auto
)
2871 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2872 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2874 if (clauses
->independent
)
2876 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2877 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2879 if (clauses
->wait_list
)
2883 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2885 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2886 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2887 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2891 if (clauses
->num_gangs_expr
)
2894 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2895 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2896 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2897 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2899 if (clauses
->num_workers_expr
)
2901 tree num_workers_var
2902 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2903 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2904 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2905 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2907 if (clauses
->vector_length_expr
)
2909 tree vector_length_var
2910 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2911 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2912 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2913 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2915 if (clauses
->tile_list
)
2917 vec
<tree
, va_gc
> *tvec
;
2920 vec_alloc (tvec
, 4);
2922 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2923 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2925 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2926 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2927 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2930 if (clauses
->vector
)
2932 if (clauses
->vector_expr
)
2935 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2936 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2937 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2938 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2942 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2943 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2946 if (clauses
->worker
)
2948 if (clauses
->worker_expr
)
2951 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2952 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2953 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2954 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2958 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2959 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2965 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2966 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2967 if (clauses
->gang_num_expr
)
2969 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
2970 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
2972 if (clauses
->gang_static
)
2974 arg
= clauses
->gang_static_expr
2975 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
2976 : integer_minus_one_node
;
2977 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
2981 return nreverse (omp_clauses
);
2984 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2987 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2992 stmt
= gfc_trans_code (code
);
2993 if (TREE_CODE (stmt
) != BIND_EXPR
)
2995 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2997 tree block
= poplevel (1, 0);
2998 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
3008 /* Trans OpenACC directives. */
3009 /* parallel, kernels, data and host_data. */
3011 gfc_trans_oacc_construct (gfc_code
*code
)
3014 tree stmt
, oacc_clauses
;
3015 enum tree_code construct_code
;
3019 case EXEC_OACC_PARALLEL
:
3020 construct_code
= OACC_PARALLEL
;
3022 case EXEC_OACC_KERNELS
:
3023 construct_code
= OACC_KERNELS
;
3025 case EXEC_OACC_DATA
:
3026 construct_code
= OACC_DATA
;
3028 case EXEC_OACC_HOST_DATA
:
3029 construct_code
= OACC_HOST_DATA
;
3035 gfc_start_block (&block
);
3036 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3038 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3039 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3041 gfc_add_expr_to_block (&block
, stmt
);
3042 return gfc_finish_block (&block
);
3045 /* update, enter_data, exit_data, cache. */
3047 gfc_trans_oacc_executable_directive (gfc_code
*code
)
3050 tree stmt
, oacc_clauses
;
3051 enum tree_code construct_code
;
3055 case EXEC_OACC_UPDATE
:
3056 construct_code
= OACC_UPDATE
;
3058 case EXEC_OACC_ENTER_DATA
:
3059 construct_code
= OACC_ENTER_DATA
;
3061 case EXEC_OACC_EXIT_DATA
:
3062 construct_code
= OACC_EXIT_DATA
;
3064 case EXEC_OACC_CACHE
:
3065 construct_code
= OACC_CACHE
;
3071 gfc_start_block (&block
);
3072 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3074 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
3076 gfc_add_expr_to_block (&block
, stmt
);
3077 return gfc_finish_block (&block
);
3081 gfc_trans_oacc_wait_directive (gfc_code
*code
)
3085 vec
<tree
, va_gc
> *args
;
3088 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3089 location_t loc
= input_location
;
3091 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3094 vec_alloc (args
, nparms
+ 2);
3095 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
3097 gfc_start_block (&block
);
3099 if (clauses
->async_expr
)
3100 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
3102 t
= build_int_cst (integer_type_node
, -2);
3104 args
->quick_push (t
);
3105 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
3107 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3108 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
3110 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
3111 gfc_add_expr_to_block (&block
, stmt
);
3115 return gfc_finish_block (&block
);
3118 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
3119 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
3122 gfc_trans_omp_atomic (gfc_code
*code
)
3124 gfc_code
*atomic_code
= code
;
3128 gfc_expr
*expr2
, *e
;
3131 tree lhsaddr
, type
, rhs
, x
;
3132 enum tree_code op
= ERROR_MARK
;
3133 enum tree_code aop
= OMP_ATOMIC
;
3134 bool var_on_left
= false;
3135 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
3137 code
= code
->block
->next
;
3138 gcc_assert (code
->op
== EXEC_ASSIGN
);
3139 var
= code
->expr1
->symtree
->n
.sym
;
3141 gfc_init_se (&lse
, NULL
);
3142 gfc_init_se (&rse
, NULL
);
3143 gfc_init_se (&vse
, NULL
);
3144 gfc_start_block (&block
);
3146 expr2
= code
->expr2
;
3147 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3148 != GFC_OMP_ATOMIC_WRITE
)
3149 && (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
) == 0
3150 && expr2
->expr_type
== EXPR_FUNCTION
3151 && expr2
->value
.function
.isym
3152 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3153 expr2
= expr2
->value
.function
.actual
->expr
;
3155 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3157 case GFC_OMP_ATOMIC_READ
:
3158 gfc_conv_expr (&vse
, code
->expr1
);
3159 gfc_add_block_to_block (&block
, &vse
.pre
);
3161 gfc_conv_expr (&lse
, expr2
);
3162 gfc_add_block_to_block (&block
, &lse
.pre
);
3163 type
= TREE_TYPE (lse
.expr
);
3164 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3166 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
3167 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3168 x
= convert (TREE_TYPE (vse
.expr
), x
);
3169 gfc_add_modify (&block
, vse
.expr
, x
);
3171 gfc_add_block_to_block (&block
, &lse
.pre
);
3172 gfc_add_block_to_block (&block
, &rse
.pre
);
3174 return gfc_finish_block (&block
);
3175 case GFC_OMP_ATOMIC_CAPTURE
:
3176 aop
= OMP_ATOMIC_CAPTURE_NEW
;
3177 if (expr2
->expr_type
== EXPR_VARIABLE
)
3179 aop
= OMP_ATOMIC_CAPTURE_OLD
;
3180 gfc_conv_expr (&vse
, code
->expr1
);
3181 gfc_add_block_to_block (&block
, &vse
.pre
);
3183 gfc_conv_expr (&lse
, expr2
);
3184 gfc_add_block_to_block (&block
, &lse
.pre
);
3185 gfc_init_se (&lse
, NULL
);
3187 var
= code
->expr1
->symtree
->n
.sym
;
3188 expr2
= code
->expr2
;
3189 if (expr2
->expr_type
== EXPR_FUNCTION
3190 && expr2
->value
.function
.isym
3191 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3192 expr2
= expr2
->value
.function
.actual
->expr
;
3199 gfc_conv_expr (&lse
, code
->expr1
);
3200 gfc_add_block_to_block (&block
, &lse
.pre
);
3201 type
= TREE_TYPE (lse
.expr
);
3202 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3204 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3205 == GFC_OMP_ATOMIC_WRITE
)
3206 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3208 gfc_conv_expr (&rse
, expr2
);
3209 gfc_add_block_to_block (&block
, &rse
.pre
);
3211 else if (expr2
->expr_type
== EXPR_OP
)
3214 switch (expr2
->value
.op
.op
)
3216 case INTRINSIC_PLUS
:
3219 case INTRINSIC_TIMES
:
3222 case INTRINSIC_MINUS
:
3225 case INTRINSIC_DIVIDE
:
3226 if (expr2
->ts
.type
== BT_INTEGER
)
3227 op
= TRUNC_DIV_EXPR
;
3232 op
= TRUTH_ANDIF_EXPR
;
3235 op
= TRUTH_ORIF_EXPR
;
3240 case INTRINSIC_NEQV
:
3246 e
= expr2
->value
.op
.op1
;
3247 if (e
->expr_type
== EXPR_FUNCTION
3248 && e
->value
.function
.isym
3249 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3250 e
= e
->value
.function
.actual
->expr
;
3251 if (e
->expr_type
== EXPR_VARIABLE
3252 && e
->symtree
!= NULL
3253 && e
->symtree
->n
.sym
== var
)
3255 expr2
= expr2
->value
.op
.op2
;
3260 e
= expr2
->value
.op
.op2
;
3261 if (e
->expr_type
== EXPR_FUNCTION
3262 && e
->value
.function
.isym
3263 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3264 e
= e
->value
.function
.actual
->expr
;
3265 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3266 && e
->symtree
!= NULL
3267 && e
->symtree
->n
.sym
== var
);
3268 expr2
= expr2
->value
.op
.op1
;
3269 var_on_left
= false;
3271 gfc_conv_expr (&rse
, expr2
);
3272 gfc_add_block_to_block (&block
, &rse
.pre
);
3276 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
3277 switch (expr2
->value
.function
.isym
->id
)
3297 e
= expr2
->value
.function
.actual
->expr
;
3298 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3299 && e
->symtree
!= NULL
3300 && e
->symtree
->n
.sym
== var
);
3302 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
3303 gfc_add_block_to_block (&block
, &rse
.pre
);
3304 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
3306 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
3307 gfc_actual_arglist
*arg
;
3309 gfc_add_modify (&block
, accum
, rse
.expr
);
3310 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
3313 gfc_init_block (&rse
.pre
);
3314 gfc_conv_expr (&rse
, arg
->expr
);
3315 gfc_add_block_to_block (&block
, &rse
.pre
);
3316 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
3318 gfc_add_modify (&block
, accum
, x
);
3324 expr2
= expr2
->value
.function
.actual
->next
->expr
;
3327 lhsaddr
= save_expr (lhsaddr
);
3328 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3329 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3330 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3332 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3333 it even after unsharing function body. */
3334 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3335 DECL_CONTEXT (var
) = current_function_decl
;
3336 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3337 NULL_TREE
, NULL_TREE
);
3340 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3342 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3343 == GFC_OMP_ATOMIC_WRITE
)
3344 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3348 x
= convert (TREE_TYPE (rhs
),
3349 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3351 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3353 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3356 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3357 && TREE_CODE (type
) != COMPLEX_TYPE
)
3358 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3359 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3361 gfc_add_block_to_block (&block
, &lse
.pre
);
3362 gfc_add_block_to_block (&block
, &rse
.pre
);
3364 if (aop
== OMP_ATOMIC
)
3366 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3367 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3368 gfc_add_expr_to_block (&block
, x
);
3372 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3375 expr2
= code
->expr2
;
3376 if (expr2
->expr_type
== EXPR_FUNCTION
3377 && expr2
->value
.function
.isym
3378 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3379 expr2
= expr2
->value
.function
.actual
->expr
;
3381 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3382 gfc_conv_expr (&vse
, code
->expr1
);
3383 gfc_add_block_to_block (&block
, &vse
.pre
);
3385 gfc_init_se (&lse
, NULL
);
3386 gfc_conv_expr (&lse
, expr2
);
3387 gfc_add_block_to_block (&block
, &lse
.pre
);
3389 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3390 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3391 x
= convert (TREE_TYPE (vse
.expr
), x
);
3392 gfc_add_modify (&block
, vse
.expr
, x
);
3395 return gfc_finish_block (&block
);
3399 gfc_trans_omp_barrier (void)
3401 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3402 return build_call_expr_loc (input_location
, decl
, 0);
3406 gfc_trans_omp_cancel (gfc_code
*code
)
3409 tree ifc
= boolean_true_node
;
3411 switch (code
->ext
.omp_clauses
->cancel
)
3413 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3414 case OMP_CANCEL_DO
: mask
= 2; break;
3415 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3416 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3417 default: gcc_unreachable ();
3419 gfc_start_block (&block
);
3420 if (code
->ext
.omp_clauses
->if_expr
)
3425 gfc_init_se (&se
, NULL
);
3426 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3427 gfc_add_block_to_block (&block
, &se
.pre
);
3428 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3429 gfc_add_block_to_block (&block
, &se
.post
);
3430 tree type
= TREE_TYPE (if_var
);
3431 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3432 boolean_type_node
, if_var
,
3433 build_zero_cst (type
));
3435 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3436 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3437 ifc
= fold_convert (c_bool_type
, ifc
);
3438 gfc_add_expr_to_block (&block
,
3439 build_call_expr_loc (input_location
, decl
, 2,
3440 build_int_cst (integer_type_node
,
3442 return gfc_finish_block (&block
);
3446 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3449 switch (code
->ext
.omp_clauses
->cancel
)
3451 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3452 case OMP_CANCEL_DO
: mask
= 2; break;
3453 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3454 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3455 default: gcc_unreachable ();
3457 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3458 return build_call_expr_loc (input_location
, decl
, 1,
3459 build_int_cst (integer_type_node
, mask
));
3463 gfc_trans_omp_critical (gfc_code
*code
)
3465 tree name
= NULL_TREE
, stmt
;
3466 if (code
->ext
.omp_clauses
!= NULL
)
3467 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
3468 stmt
= gfc_trans_code (code
->block
->next
);
3469 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3473 typedef struct dovar_init_d
{
3480 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3481 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3484 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
3485 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3488 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3489 int i
, collapse
= clauses
->collapse
;
3490 vec
<dovar_init
> inits
= vNULL
;
3493 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
3494 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
3496 /* Both collapsed and tiled loops are lowered the same way. In
3497 OpenACC, those clauses are not compatible, so prioritize the tile
3498 clause, if present. */
3502 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
3506 doacross_steps
= NULL
;
3507 if (clauses
->orderedc
)
3508 collapse
= clauses
->orderedc
;
3512 code
= code
->block
->next
;
3513 gcc_assert (code
->op
== EXEC_DO
);
3515 init
= make_tree_vec (collapse
);
3516 cond
= make_tree_vec (collapse
);
3517 incr
= make_tree_vec (collapse
);
3518 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
3522 gfc_start_block (&block
);
3526 /* simd schedule modifier is only useful for composite do simd and other
3527 constructs including that, where gfc_trans_omp_do is only called
3528 on the simd construct and DO's clauses are translated elsewhere. */
3529 do_clauses
->sched_simd
= false;
3531 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3533 for (i
= 0; i
< collapse
; i
++)
3536 int dovar_found
= 0;
3541 gfc_omp_namelist
*n
= NULL
;
3542 if (op
!= EXEC_OMP_DISTRIBUTE
)
3543 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3544 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3545 n
!= NULL
; n
= n
->next
)
3546 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3550 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3551 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3552 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3558 /* Evaluate all the expressions in the iterator. */
3559 gfc_init_se (&se
, NULL
);
3560 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3561 gfc_add_block_to_block (pblock
, &se
.pre
);
3563 type
= TREE_TYPE (dovar
);
3564 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3566 gfc_init_se (&se
, NULL
);
3567 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3568 gfc_add_block_to_block (pblock
, &se
.pre
);
3569 from
= gfc_evaluate_now (se
.expr
, pblock
);
3571 gfc_init_se (&se
, NULL
);
3572 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3573 gfc_add_block_to_block (pblock
, &se
.pre
);
3574 to
= gfc_evaluate_now (se
.expr
, pblock
);
3576 gfc_init_se (&se
, NULL
);
3577 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3578 gfc_add_block_to_block (pblock
, &se
.pre
);
3579 step
= gfc_evaluate_now (se
.expr
, pblock
);
3582 /* Special case simple loops. */
3585 if (integer_onep (step
))
3587 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3592 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3598 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3599 /* The condition should not be folded. */
3600 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3601 ? LE_EXPR
: GE_EXPR
,
3602 boolean_type_node
, dovar
, to
);
3603 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3605 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3608 TREE_VEC_ELT (incr
, i
));
3612 /* STEP is not 1 or -1. Use:
3613 for (count = 0; count < (to + step - from) / step; count++)
3615 dovar = from + count * step;
3619 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3620 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3621 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3623 tmp
= gfc_evaluate_now (tmp
, pblock
);
3624 count
= gfc_create_var (type
, "count");
3625 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3626 build_int_cst (type
, 0));
3627 /* The condition should not be folded. */
3628 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3631 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3633 build_int_cst (type
, 1));
3634 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3635 MODIFY_EXPR
, type
, count
,
3636 TREE_VEC_ELT (incr
, i
));
3638 /* Initialize DOVAR. */
3639 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3640 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3641 dovar_init e
= {dovar
, tmp
};
3642 inits
.safe_push (e
);
3643 if (clauses
->orderedc
)
3645 if (doacross_steps
== NULL
)
3646 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
);
3647 (*doacross_steps
)[i
] = step
;
3651 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
3653 if (dovar_found
== 2
3654 && op
== EXEC_OMP_SIMD
3658 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3659 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3660 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3662 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3668 if (op
== EXEC_OMP_SIMD
)
3672 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3673 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3674 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3677 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3682 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3683 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3684 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3686 if (dovar_found
== 2)
3693 /* If dovar is lastprivate, but different counter is used,
3694 dovar += step needs to be added to
3695 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3696 will have the value on entry of the last loop, rather
3697 than value after iterator increment. */
3698 if (clauses
->orderedc
)
3700 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
3703 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3704 type
, count
, build_one_cst (type
));
3705 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
3707 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3712 tmp
= gfc_evaluate_now (step
, pblock
);
3713 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3716 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3718 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3719 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3720 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3722 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3725 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3726 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3728 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3732 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3734 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3735 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3736 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3738 tree l
= build_omp_clause (input_location
,
3739 OMP_CLAUSE_LASTPRIVATE
);
3740 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3741 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3742 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3744 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3748 gcc_assert (simple
|| c
!= NULL
);
3752 if (op
!= EXEC_OMP_SIMD
)
3753 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3754 else if (collapse
== 1)
3756 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3757 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3758 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3759 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3762 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3763 OMP_CLAUSE_DECL (tmp
) = count
;
3764 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3767 if (i
+ 1 < collapse
)
3768 code
= code
->block
->next
;
3771 if (pblock
!= &block
)
3774 gfc_start_block (&block
);
3777 gfc_start_block (&body
);
3779 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3780 gfc_add_modify (&body
, di
->var
, di
->init
);
3783 /* Cycle statement is implemented with a goto. Exit statement must not be
3784 present for this loop. */
3785 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3787 /* Put these labels where they can be found later. */
3789 code
->cycle_label
= cycle_label
;
3790 code
->exit_label
= NULL_TREE
;
3792 /* Main loop body. */
3793 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3794 gfc_add_expr_to_block (&body
, tmp
);
3796 /* Label for cycle statements (if needed). */
3797 if (TREE_USED (cycle_label
))
3799 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3800 gfc_add_expr_to_block (&body
, tmp
);
3803 /* End of loop body. */
3806 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3807 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3808 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3809 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
3810 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3811 default: gcc_unreachable ();
3814 TREE_TYPE (stmt
) = void_type_node
;
3815 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3816 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3817 OMP_FOR_INIT (stmt
) = init
;
3818 OMP_FOR_COND (stmt
) = cond
;
3819 OMP_FOR_INCR (stmt
) = incr
;
3821 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
3822 gfc_add_expr_to_block (&block
, stmt
);
3824 vec_free (doacross_steps
);
3825 doacross_steps
= saved_doacross_steps
;
3827 return gfc_finish_block (&block
);
3830 /* parallel loop and kernels loop. */
3832 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3834 stmtblock_t block
, *pblock
= NULL
;
3835 gfc_omp_clauses construct_clauses
, loop_clauses
;
3836 tree stmt
, oacc_clauses
= NULL_TREE
;
3837 enum tree_code construct_code
;
3841 case EXEC_OACC_PARALLEL_LOOP
:
3842 construct_code
= OACC_PARALLEL
;
3844 case EXEC_OACC_KERNELS_LOOP
:
3845 construct_code
= OACC_KERNELS
;
3851 gfc_start_block (&block
);
3853 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3854 if (code
->ext
.omp_clauses
!= NULL
)
3856 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3857 sizeof (construct_clauses
));
3858 loop_clauses
.collapse
= construct_clauses
.collapse
;
3859 loop_clauses
.gang
= construct_clauses
.gang
;
3860 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3861 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3862 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3863 loop_clauses
.vector
= construct_clauses
.vector
;
3864 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3865 loop_clauses
.worker
= construct_clauses
.worker
;
3866 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3867 loop_clauses
.seq
= construct_clauses
.seq
;
3868 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3869 loop_clauses
.independent
= construct_clauses
.independent
;
3870 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3871 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3872 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3873 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3874 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3875 construct_clauses
.gang
= false;
3876 construct_clauses
.gang_static
= false;
3877 construct_clauses
.gang_num_expr
= NULL
;
3878 construct_clauses
.gang_static_expr
= NULL
;
3879 construct_clauses
.vector
= false;
3880 construct_clauses
.vector_expr
= NULL
;
3881 construct_clauses
.worker
= false;
3882 construct_clauses
.worker_expr
= NULL
;
3883 construct_clauses
.seq
= false;
3884 construct_clauses
.par_auto
= false;
3885 construct_clauses
.independent
= false;
3886 construct_clauses
.independent
= false;
3887 construct_clauses
.tile_list
= NULL
;
3888 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3889 if (construct_code
== OACC_KERNELS
)
3890 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3891 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3894 if (!loop_clauses
.seq
)
3898 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3899 if (TREE_CODE (stmt
) != BIND_EXPR
)
3900 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3903 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3905 gfc_add_expr_to_block (&block
, stmt
);
3906 return gfc_finish_block (&block
);
3910 gfc_trans_omp_flush (void)
3912 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3913 return build_call_expr_loc (input_location
, decl
, 0);
3917 gfc_trans_omp_master (gfc_code
*code
)
3919 tree stmt
= gfc_trans_code (code
->block
->next
);
3920 if (IS_EMPTY_STMT (stmt
))
3922 return build1_v (OMP_MASTER
, stmt
);
3926 gfc_trans_omp_ordered (gfc_code
*code
)
3928 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
3930 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3931 code
->block
? gfc_trans_code (code
->block
->next
)
3932 : NULL_TREE
, omp_clauses
);
3936 gfc_trans_omp_parallel (gfc_code
*code
)
3939 tree stmt
, omp_clauses
;
3941 gfc_start_block (&block
);
3942 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3945 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3946 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3947 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3949 gfc_add_expr_to_block (&block
, stmt
);
3950 return gfc_finish_block (&block
);
3957 GFC_OMP_SPLIT_PARALLEL
,
3958 GFC_OMP_SPLIT_DISTRIBUTE
,
3959 GFC_OMP_SPLIT_TEAMS
,
3960 GFC_OMP_SPLIT_TARGET
,
3961 GFC_OMP_SPLIT_TASKLOOP
,
3967 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3968 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3969 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3970 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3971 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3972 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
3973 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
)
3977 gfc_split_omp_clauses (gfc_code
*code
,
3978 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3980 int mask
= 0, innermost
= 0;
3981 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3984 case EXEC_OMP_DISTRIBUTE
:
3985 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3987 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3988 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3989 innermost
= GFC_OMP_SPLIT_DO
;
3991 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3992 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3993 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3994 innermost
= GFC_OMP_SPLIT_SIMD
;
3996 case EXEC_OMP_DISTRIBUTE_SIMD
:
3997 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3998 innermost
= GFC_OMP_SPLIT_SIMD
;
4001 innermost
= GFC_OMP_SPLIT_DO
;
4003 case EXEC_OMP_DO_SIMD
:
4004 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4005 innermost
= GFC_OMP_SPLIT_SIMD
;
4007 case EXEC_OMP_PARALLEL
:
4008 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4010 case EXEC_OMP_PARALLEL_DO
:
4011 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4012 innermost
= GFC_OMP_SPLIT_DO
;
4014 case EXEC_OMP_PARALLEL_DO_SIMD
:
4015 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4016 innermost
= GFC_OMP_SPLIT_SIMD
;
4019 innermost
= GFC_OMP_SPLIT_SIMD
;
4021 case EXEC_OMP_TARGET
:
4022 innermost
= GFC_OMP_SPLIT_TARGET
;
4024 case EXEC_OMP_TARGET_PARALLEL
:
4025 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
4026 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4028 case EXEC_OMP_TARGET_PARALLEL_DO
:
4029 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4030 innermost
= GFC_OMP_SPLIT_DO
;
4032 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4033 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
4034 | GFC_OMP_MASK_SIMD
;
4035 innermost
= GFC_OMP_SPLIT_SIMD
;
4037 case EXEC_OMP_TARGET_SIMD
:
4038 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
4039 innermost
= GFC_OMP_SPLIT_SIMD
;
4041 case EXEC_OMP_TARGET_TEAMS
:
4042 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
4043 innermost
= GFC_OMP_SPLIT_TEAMS
;
4045 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4046 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4047 | GFC_OMP_MASK_DISTRIBUTE
;
4048 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4050 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4051 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4052 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4053 innermost
= GFC_OMP_SPLIT_DO
;
4055 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4056 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4057 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4058 innermost
= GFC_OMP_SPLIT_SIMD
;
4060 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4061 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4062 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4063 innermost
= GFC_OMP_SPLIT_SIMD
;
4065 case EXEC_OMP_TASKLOOP
:
4066 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
4068 case EXEC_OMP_TASKLOOP_SIMD
:
4069 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
4070 innermost
= GFC_OMP_SPLIT_SIMD
;
4072 case EXEC_OMP_TEAMS
:
4073 innermost
= GFC_OMP_SPLIT_TEAMS
;
4075 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4076 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
4077 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4079 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4080 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4081 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4082 innermost
= GFC_OMP_SPLIT_DO
;
4084 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4085 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4086 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4087 innermost
= GFC_OMP_SPLIT_SIMD
;
4089 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4090 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4091 innermost
= GFC_OMP_SPLIT_SIMD
;
4098 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
4101 if (code
->ext
.omp_clauses
!= NULL
)
4103 if (mask
& GFC_OMP_MASK_TARGET
)
4105 /* First the clauses that are unique to some constructs. */
4106 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
4107 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
4108 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
4109 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
4110 clausesa
[GFC_OMP_SPLIT_TARGET
].device
4111 = code
->ext
.omp_clauses
->device
;
4112 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
4113 = code
->ext
.omp_clauses
->defaultmap
;
4114 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
4115 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
4116 /* And this is copied to all. */
4117 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4118 = code
->ext
.omp_clauses
->if_expr
;
4120 if (mask
& GFC_OMP_MASK_TEAMS
)
4122 /* First the clauses that are unique to some constructs. */
4123 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4124 = code
->ext
.omp_clauses
->num_teams
;
4125 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
4126 = code
->ext
.omp_clauses
->thread_limit
;
4127 /* Shared and default clauses are allowed on parallel, teams
4129 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
4130 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4131 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
4132 = code
->ext
.omp_clauses
->default_sharing
;
4134 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4136 /* First the clauses that are unique to some constructs. */
4137 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
4138 = code
->ext
.omp_clauses
->dist_sched_kind
;
4139 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
4140 = code
->ext
.omp_clauses
->dist_chunk_size
;
4141 /* Duplicate collapse. */
4142 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
4143 = code
->ext
.omp_clauses
->collapse
;
4145 if (mask
& GFC_OMP_MASK_PARALLEL
)
4147 /* First the clauses that are unique to some constructs. */
4148 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
4149 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
4150 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
4151 = code
->ext
.omp_clauses
->num_threads
;
4152 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
4153 = code
->ext
.omp_clauses
->proc_bind
;
4154 /* Shared and default clauses are allowed on parallel, teams
4156 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
4157 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4158 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
4159 = code
->ext
.omp_clauses
->default_sharing
;
4160 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
4161 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
4162 /* And this is copied to all. */
4163 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4164 = code
->ext
.omp_clauses
->if_expr
;
4166 if (mask
& GFC_OMP_MASK_DO
)
4168 /* First the clauses that are unique to some constructs. */
4169 clausesa
[GFC_OMP_SPLIT_DO
].ordered
4170 = code
->ext
.omp_clauses
->ordered
;
4171 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
4172 = code
->ext
.omp_clauses
->orderedc
;
4173 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
4174 = code
->ext
.omp_clauses
->sched_kind
;
4175 if (innermost
== GFC_OMP_SPLIT_SIMD
)
4176 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
4177 = code
->ext
.omp_clauses
->sched_simd
;
4178 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
4179 = code
->ext
.omp_clauses
->sched_monotonic
;
4180 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
4181 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
4182 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
4183 = code
->ext
.omp_clauses
->chunk_size
;
4184 clausesa
[GFC_OMP_SPLIT_DO
].nowait
4185 = code
->ext
.omp_clauses
->nowait
;
4186 /* Duplicate collapse. */
4187 clausesa
[GFC_OMP_SPLIT_DO
].collapse
4188 = code
->ext
.omp_clauses
->collapse
;
4190 if (mask
& GFC_OMP_MASK_SIMD
)
4192 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
4193 = code
->ext
.omp_clauses
->safelen_expr
;
4194 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
4195 = code
->ext
.omp_clauses
->simdlen_expr
;
4196 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
4197 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
4198 /* Duplicate collapse. */
4199 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
4200 = code
->ext
.omp_clauses
->collapse
;
4202 if (mask
& GFC_OMP_MASK_TASKLOOP
)
4204 /* First the clauses that are unique to some constructs. */
4205 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
4206 = code
->ext
.omp_clauses
->nogroup
;
4207 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
4208 = code
->ext
.omp_clauses
->grainsize
;
4209 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
4210 = code
->ext
.omp_clauses
->num_tasks
;
4211 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
4212 = code
->ext
.omp_clauses
->priority
;
4213 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
4214 = code
->ext
.omp_clauses
->final_expr
;
4215 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
4216 = code
->ext
.omp_clauses
->untied
;
4217 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
4218 = code
->ext
.omp_clauses
->mergeable
;
4219 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
4220 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
4221 /* And this is copied to all. */
4222 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
4223 = code
->ext
.omp_clauses
->if_expr
;
4224 /* Shared and default clauses are allowed on parallel, teams
4226 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
4227 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4228 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
4229 = code
->ext
.omp_clauses
->default_sharing
;
4230 /* Duplicate collapse. */
4231 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
4232 = code
->ext
.omp_clauses
->collapse
;
4234 /* Private clause is supported on all constructs,
4235 it is enough to put it on the innermost one. For
4236 !$ omp parallel do put it on parallel though,
4237 as that's what we did for OpenMP 3.1. */
4238 clausesa
[innermost
== GFC_OMP_SPLIT_DO
4239 ? (int) GFC_OMP_SPLIT_PARALLEL
4240 : innermost
].lists
[OMP_LIST_PRIVATE
]
4241 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
4242 /* Firstprivate clause is supported on all constructs but
4243 simd. Put it on the outermost of those and duplicate
4244 on parallel and teams. */
4245 if (mask
& GFC_OMP_MASK_TARGET
)
4246 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
4247 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4248 if (mask
& GFC_OMP_MASK_TEAMS
)
4249 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
4250 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4251 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4252 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
4253 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4254 if (mask
& GFC_OMP_MASK_PARALLEL
)
4255 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
4256 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4257 else if (mask
& GFC_OMP_MASK_DO
)
4258 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
4259 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4260 /* Lastprivate is allowed on distribute, do and simd.
4261 In parallel do{, simd} we actually want to put it on
4262 parallel rather than do. */
4263 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4264 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
4265 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4266 if (mask
& GFC_OMP_MASK_PARALLEL
)
4267 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
4268 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4269 else if (mask
& GFC_OMP_MASK_DO
)
4270 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
4271 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4272 if (mask
& GFC_OMP_MASK_SIMD
)
4273 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
4274 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4275 /* Reduction is allowed on simd, do, parallel and teams.
4276 Duplicate it on all of them, but omit on do if
4277 parallel is present. */
4278 if (mask
& GFC_OMP_MASK_TEAMS
)
4279 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
4280 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4281 if (mask
& GFC_OMP_MASK_PARALLEL
)
4282 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
4283 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4284 else if (mask
& GFC_OMP_MASK_DO
)
4285 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
4286 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4287 if (mask
& GFC_OMP_MASK_SIMD
)
4288 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
4289 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4290 /* Linear clause is supported on do and simd,
4291 put it on the innermost one. */
4292 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
4293 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
4295 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4296 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4297 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
4301 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4302 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
4305 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4306 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
4309 gfc_start_block (&block
);
4311 gfc_init_block (&block
);
4313 if (clausesa
== NULL
)
4315 clausesa
= clausesa_buf
;
4316 gfc_split_omp_clauses (code
, clausesa
);
4320 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
4321 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
4322 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
4325 if (TREE_CODE (body
) != BIND_EXPR
)
4326 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
4330 else if (TREE_CODE (body
) != BIND_EXPR
)
4331 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
4334 stmt
= make_node (OMP_FOR
);
4335 TREE_TYPE (stmt
) = void_type_node
;
4336 OMP_FOR_BODY (stmt
) = body
;
4337 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
4341 gfc_add_expr_to_block (&block
, stmt
);
4342 return gfc_finish_block (&block
);
4346 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
4347 gfc_omp_clauses
*clausesa
)
4349 stmtblock_t block
, *new_pblock
= pblock
;
4350 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4351 tree stmt
, omp_clauses
= NULL_TREE
;
4354 gfc_start_block (&block
);
4356 gfc_init_block (&block
);
4358 if (clausesa
== NULL
)
4360 clausesa
= clausesa_buf
;
4361 gfc_split_omp_clauses (code
, clausesa
);
4364 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4368 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
4369 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
4370 new_pblock
= &block
;
4374 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
4375 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
4378 if (TREE_CODE (stmt
) != BIND_EXPR
)
4379 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4383 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4384 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4385 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4387 OMP_PARALLEL_COMBINED (stmt
) = 1;
4388 gfc_add_expr_to_block (&block
, stmt
);
4389 return gfc_finish_block (&block
);
4393 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4394 gfc_omp_clauses
*clausesa
)
4397 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4398 tree stmt
, omp_clauses
= NULL_TREE
;
4401 gfc_start_block (&block
);
4403 gfc_init_block (&block
);
4405 if (clausesa
== NULL
)
4407 clausesa
= clausesa_buf
;
4408 gfc_split_omp_clauses (code
, clausesa
);
4412 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4416 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
4419 if (TREE_CODE (stmt
) != BIND_EXPR
)
4420 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4424 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4425 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4428 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4430 OMP_PARALLEL_COMBINED (stmt
) = 1;
4432 gfc_add_expr_to_block (&block
, stmt
);
4433 return gfc_finish_block (&block
);
4437 gfc_trans_omp_parallel_sections (gfc_code
*code
)
4440 gfc_omp_clauses section_clauses
;
4441 tree stmt
, omp_clauses
;
4443 memset (§ion_clauses
, 0, sizeof (section_clauses
));
4444 section_clauses
.nowait
= true;
4446 gfc_start_block (&block
);
4447 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4450 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
4451 if (TREE_CODE (stmt
) != BIND_EXPR
)
4452 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4455 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4457 OMP_PARALLEL_COMBINED (stmt
) = 1;
4458 gfc_add_expr_to_block (&block
, stmt
);
4459 return gfc_finish_block (&block
);
4463 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
4466 gfc_omp_clauses workshare_clauses
;
4467 tree stmt
, omp_clauses
;
4469 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
4470 workshare_clauses
.nowait
= true;
4472 gfc_start_block (&block
);
4473 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4476 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4477 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4478 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4480 OMP_PARALLEL_COMBINED (stmt
) = 1;
4481 gfc_add_expr_to_block (&block
, stmt
);
4482 return gfc_finish_block (&block
);
4486 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4488 stmtblock_t block
, body
;
4489 tree omp_clauses
, stmt
;
4490 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4492 gfc_start_block (&block
);
4494 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4496 gfc_init_block (&body
);
4497 for (code
= code
->block
; code
; code
= code
->block
)
4499 /* Last section is special because of lastprivate, so even if it
4500 is empty, chain it in. */
4501 stmt
= gfc_trans_omp_code (code
->next
,
4502 has_lastprivate
&& code
->block
== NULL
);
4503 if (! IS_EMPTY_STMT (stmt
))
4505 stmt
= build1_v (OMP_SECTION
, stmt
);
4506 gfc_add_expr_to_block (&body
, stmt
);
4509 stmt
= gfc_finish_block (&body
);
4511 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4513 gfc_add_expr_to_block (&block
, stmt
);
4515 return gfc_finish_block (&block
);
4519 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4521 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4522 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4523 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4529 gfc_trans_omp_task (gfc_code
*code
)
4532 tree stmt
, omp_clauses
;
4534 gfc_start_block (&block
);
4535 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4538 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4539 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4540 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4542 gfc_add_expr_to_block (&block
, stmt
);
4543 return gfc_finish_block (&block
);
4547 gfc_trans_omp_taskgroup (gfc_code
*code
)
4549 tree stmt
= gfc_trans_code (code
->block
->next
);
4550 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4554 gfc_trans_omp_taskwait (void)
4556 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4557 return build_call_expr_loc (input_location
, decl
, 0);
4561 gfc_trans_omp_taskyield (void)
4563 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4564 return build_call_expr_loc (input_location
, decl
, 0);
4568 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4571 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4572 tree stmt
, omp_clauses
= NULL_TREE
;
4574 gfc_start_block (&block
);
4575 if (clausesa
== NULL
)
4577 clausesa
= clausesa_buf
;
4578 gfc_split_omp_clauses (code
, clausesa
);
4582 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4586 case EXEC_OMP_DISTRIBUTE
:
4587 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4588 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4589 /* This is handled in gfc_trans_omp_do. */
4592 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4593 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4594 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4595 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4596 if (TREE_CODE (stmt
) != BIND_EXPR
)
4597 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4601 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4602 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4603 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4604 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4605 if (TREE_CODE (stmt
) != BIND_EXPR
)
4606 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4610 case EXEC_OMP_DISTRIBUTE_SIMD
:
4611 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4612 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4613 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4614 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4615 if (TREE_CODE (stmt
) != BIND_EXPR
)
4616 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4625 tree distribute
= make_node (OMP_DISTRIBUTE
);
4626 TREE_TYPE (distribute
) = void_type_node
;
4627 OMP_FOR_BODY (distribute
) = stmt
;
4628 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4631 gfc_add_expr_to_block (&block
, stmt
);
4632 return gfc_finish_block (&block
);
4636 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
4640 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4642 bool combined
= true;
4644 gfc_start_block (&block
);
4645 if (clausesa
== NULL
)
4647 clausesa
= clausesa_buf
;
4648 gfc_split_omp_clauses (code
, clausesa
);
4652 = chainon (omp_clauses
,
4653 gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4657 case EXEC_OMP_TARGET_TEAMS
:
4658 case EXEC_OMP_TEAMS
:
4659 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4662 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4663 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4664 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4665 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4669 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4674 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4677 OMP_TEAMS_COMBINED (stmt
) = 1;
4679 gfc_add_expr_to_block (&block
, stmt
);
4680 return gfc_finish_block (&block
);
4684 gfc_trans_omp_target (gfc_code
*code
)
4687 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4688 tree stmt
, omp_clauses
= NULL_TREE
;
4690 gfc_start_block (&block
);
4691 gfc_split_omp_clauses (code
, clausesa
);
4694 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4698 case EXEC_OMP_TARGET
:
4700 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4701 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4703 case EXEC_OMP_TARGET_PARALLEL
:
4707 gfc_start_block (&iblock
);
4709 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4711 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4712 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4714 gfc_add_expr_to_block (&iblock
, stmt
);
4715 stmt
= gfc_finish_block (&iblock
);
4716 if (TREE_CODE (stmt
) != BIND_EXPR
)
4717 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4722 case EXEC_OMP_TARGET_PARALLEL_DO
:
4723 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4724 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4725 if (TREE_CODE (stmt
) != BIND_EXPR
)
4726 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4730 case EXEC_OMP_TARGET_SIMD
:
4731 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4732 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4733 if (TREE_CODE (stmt
) != BIND_EXPR
)
4734 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4740 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4741 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
4743 gfc_omp_clauses clausesb
;
4745 /* For combined !$omp target teams, the num_teams and
4746 thread_limit clauses are evaluated before entering the
4747 target construct. */
4748 memset (&clausesb
, '\0', sizeof (clausesb
));
4749 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
4750 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
4751 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
4752 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
4754 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
4756 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
4761 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
4763 if (TREE_CODE (stmt
) != BIND_EXPR
)
4764 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4771 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4773 if (code
->op
!= EXEC_OMP_TARGET
)
4774 OMP_TARGET_COMBINED (stmt
) = 1;
4776 gfc_add_expr_to_block (&block
, stmt
);
4777 return gfc_finish_block (&block
);
4781 gfc_trans_omp_taskloop (gfc_code
*code
)
4784 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4785 tree stmt
, omp_clauses
= NULL_TREE
;
4787 gfc_start_block (&block
);
4788 gfc_split_omp_clauses (code
, clausesa
);
4791 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
4795 case EXEC_OMP_TASKLOOP
:
4796 /* This is handled in gfc_trans_omp_do. */
4799 case EXEC_OMP_TASKLOOP_SIMD
:
4800 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4801 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4802 if (TREE_CODE (stmt
) != BIND_EXPR
)
4803 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4812 tree taskloop
= make_node (OMP_TASKLOOP
);
4813 TREE_TYPE (taskloop
) = void_type_node
;
4814 OMP_FOR_BODY (taskloop
) = stmt
;
4815 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
4818 gfc_add_expr_to_block (&block
, stmt
);
4819 return gfc_finish_block (&block
);
4823 gfc_trans_omp_target_data (gfc_code
*code
)
4826 tree stmt
, omp_clauses
;
4828 gfc_start_block (&block
);
4829 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4831 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4832 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4834 gfc_add_expr_to_block (&block
, stmt
);
4835 return gfc_finish_block (&block
);
4839 gfc_trans_omp_target_enter_data (gfc_code
*code
)
4842 tree stmt
, omp_clauses
;
4844 gfc_start_block (&block
);
4845 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4847 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
4849 gfc_add_expr_to_block (&block
, stmt
);
4850 return gfc_finish_block (&block
);
4854 gfc_trans_omp_target_exit_data (gfc_code
*code
)
4857 tree stmt
, omp_clauses
;
4859 gfc_start_block (&block
);
4860 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4862 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
4864 gfc_add_expr_to_block (&block
, stmt
);
4865 return gfc_finish_block (&block
);
4869 gfc_trans_omp_target_update (gfc_code
*code
)
4872 tree stmt
, omp_clauses
;
4874 gfc_start_block (&block
);
4875 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4877 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4879 gfc_add_expr_to_block (&block
, stmt
);
4880 return gfc_finish_block (&block
);
4884 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4886 tree res
, tmp
, stmt
;
4887 stmtblock_t block
, *pblock
= NULL
;
4888 stmtblock_t singleblock
;
4889 int saved_ompws_flags
;
4890 bool singleblock_in_progress
= false;
4891 /* True if previous gfc_code in workshare construct is not workshared. */
4892 bool prev_singleunit
;
4894 code
= code
->block
->next
;
4898 gfc_start_block (&block
);
4901 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4902 prev_singleunit
= false;
4904 /* Translate statements one by one to trees until we reach
4905 the end of the workshare construct. Adjacent gfc_codes that
4906 are a single unit of work are clustered and encapsulated in a
4907 single OMP_SINGLE construct. */
4908 for (; code
; code
= code
->next
)
4910 if (code
->here
!= 0)
4912 res
= gfc_trans_label_here (code
);
4913 gfc_add_expr_to_block (pblock
, res
);
4916 /* No dependence analysis, use for clauses with wait.
4917 If this is the last gfc_code, use default omp_clauses. */
4918 if (code
->next
== NULL
&& clauses
->nowait
)
4919 ompws_flags
|= OMPWS_NOWAIT
;
4921 /* By default, every gfc_code is a single unit of work. */
4922 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4923 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4932 res
= gfc_trans_assign (code
);
4935 case EXEC_POINTER_ASSIGN
:
4936 res
= gfc_trans_pointer_assign (code
);
4939 case EXEC_INIT_ASSIGN
:
4940 res
= gfc_trans_init_assign (code
);
4944 res
= gfc_trans_forall (code
);
4948 res
= gfc_trans_where (code
);
4951 case EXEC_OMP_ATOMIC
:
4952 res
= gfc_trans_omp_directive (code
);
4955 case EXEC_OMP_PARALLEL
:
4956 case EXEC_OMP_PARALLEL_DO
:
4957 case EXEC_OMP_PARALLEL_SECTIONS
:
4958 case EXEC_OMP_PARALLEL_WORKSHARE
:
4959 case EXEC_OMP_CRITICAL
:
4960 saved_ompws_flags
= ompws_flags
;
4962 res
= gfc_trans_omp_directive (code
);
4963 ompws_flags
= saved_ompws_flags
;
4967 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4970 gfc_set_backend_locus (&code
->loc
);
4972 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4974 if (prev_singleunit
)
4976 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4977 /* Add current gfc_code to single block. */
4978 gfc_add_expr_to_block (&singleblock
, res
);
4981 /* Finish single block and add it to pblock. */
4982 tmp
= gfc_finish_block (&singleblock
);
4983 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4984 void_type_node
, tmp
, NULL_TREE
);
4985 gfc_add_expr_to_block (pblock
, tmp
);
4986 /* Add current gfc_code to pblock. */
4987 gfc_add_expr_to_block (pblock
, res
);
4988 singleblock_in_progress
= false;
4993 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4995 /* Start single block. */
4996 gfc_init_block (&singleblock
);
4997 gfc_add_expr_to_block (&singleblock
, res
);
4998 singleblock_in_progress
= true;
5001 /* Add the new statement to the block. */
5002 gfc_add_expr_to_block (pblock
, res
);
5004 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
5008 /* Finish remaining SINGLE block, if we were in the middle of one. */
5009 if (singleblock_in_progress
)
5011 /* Finish single block and add it to pblock. */
5012 tmp
= gfc_finish_block (&singleblock
);
5013 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
5015 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
5017 gfc_add_expr_to_block (pblock
, tmp
);
5020 stmt
= gfc_finish_block (pblock
);
5021 if (TREE_CODE (stmt
) != BIND_EXPR
)
5023 if (!IS_EMPTY_STMT (stmt
))
5025 tree bindblock
= poplevel (1, 0);
5026 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
5034 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
5035 stmt
= gfc_trans_omp_barrier ();
5042 gfc_trans_oacc_declare (gfc_code
*code
)
5045 tree stmt
, oacc_clauses
;
5046 enum tree_code construct_code
;
5048 construct_code
= OACC_DATA
;
5050 gfc_start_block (&block
);
5052 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
5054 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5055 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
5057 gfc_add_expr_to_block (&block
, stmt
);
5059 return gfc_finish_block (&block
);
5063 gfc_trans_oacc_directive (gfc_code
*code
)
5067 case EXEC_OACC_PARALLEL_LOOP
:
5068 case EXEC_OACC_KERNELS_LOOP
:
5069 return gfc_trans_oacc_combined_directive (code
);
5070 case EXEC_OACC_PARALLEL
:
5071 case EXEC_OACC_KERNELS
:
5072 case EXEC_OACC_DATA
:
5073 case EXEC_OACC_HOST_DATA
:
5074 return gfc_trans_oacc_construct (code
);
5075 case EXEC_OACC_LOOP
:
5076 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5078 case EXEC_OACC_UPDATE
:
5079 case EXEC_OACC_CACHE
:
5080 case EXEC_OACC_ENTER_DATA
:
5081 case EXEC_OACC_EXIT_DATA
:
5082 return gfc_trans_oacc_executable_directive (code
);
5083 case EXEC_OACC_WAIT
:
5084 return gfc_trans_oacc_wait_directive (code
);
5085 case EXEC_OACC_ATOMIC
:
5086 return gfc_trans_omp_atomic (code
);
5087 case EXEC_OACC_DECLARE
:
5088 return gfc_trans_oacc_declare (code
);
5095 gfc_trans_omp_directive (gfc_code
*code
)
5099 case EXEC_OMP_ATOMIC
:
5100 return gfc_trans_omp_atomic (code
);
5101 case EXEC_OMP_BARRIER
:
5102 return gfc_trans_omp_barrier ();
5103 case EXEC_OMP_CANCEL
:
5104 return gfc_trans_omp_cancel (code
);
5105 case EXEC_OMP_CANCELLATION_POINT
:
5106 return gfc_trans_omp_cancellation_point (code
);
5107 case EXEC_OMP_CRITICAL
:
5108 return gfc_trans_omp_critical (code
);
5109 case EXEC_OMP_DISTRIBUTE
:
5112 case EXEC_OMP_TASKLOOP
:
5113 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5115 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5116 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5117 case EXEC_OMP_DISTRIBUTE_SIMD
:
5118 return gfc_trans_omp_distribute (code
, NULL
);
5119 case EXEC_OMP_DO_SIMD
:
5120 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
5121 case EXEC_OMP_FLUSH
:
5122 return gfc_trans_omp_flush ();
5123 case EXEC_OMP_MASTER
:
5124 return gfc_trans_omp_master (code
);
5125 case EXEC_OMP_ORDERED
:
5126 return gfc_trans_omp_ordered (code
);
5127 case EXEC_OMP_PARALLEL
:
5128 return gfc_trans_omp_parallel (code
);
5129 case EXEC_OMP_PARALLEL_DO
:
5130 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
5131 case EXEC_OMP_PARALLEL_DO_SIMD
:
5132 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
5133 case EXEC_OMP_PARALLEL_SECTIONS
:
5134 return gfc_trans_omp_parallel_sections (code
);
5135 case EXEC_OMP_PARALLEL_WORKSHARE
:
5136 return gfc_trans_omp_parallel_workshare (code
);
5137 case EXEC_OMP_SECTIONS
:
5138 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
5139 case EXEC_OMP_SINGLE
:
5140 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
5141 case EXEC_OMP_TARGET
:
5142 case EXEC_OMP_TARGET_PARALLEL
:
5143 case EXEC_OMP_TARGET_PARALLEL_DO
:
5144 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5145 case EXEC_OMP_TARGET_SIMD
:
5146 case EXEC_OMP_TARGET_TEAMS
:
5147 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5148 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5149 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5150 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5151 return gfc_trans_omp_target (code
);
5152 case EXEC_OMP_TARGET_DATA
:
5153 return gfc_trans_omp_target_data (code
);
5154 case EXEC_OMP_TARGET_ENTER_DATA
:
5155 return gfc_trans_omp_target_enter_data (code
);
5156 case EXEC_OMP_TARGET_EXIT_DATA
:
5157 return gfc_trans_omp_target_exit_data (code
);
5158 case EXEC_OMP_TARGET_UPDATE
:
5159 return gfc_trans_omp_target_update (code
);
5161 return gfc_trans_omp_task (code
);
5162 case EXEC_OMP_TASKGROUP
:
5163 return gfc_trans_omp_taskgroup (code
);
5164 case EXEC_OMP_TASKLOOP_SIMD
:
5165 return gfc_trans_omp_taskloop (code
);
5166 case EXEC_OMP_TASKWAIT
:
5167 return gfc_trans_omp_taskwait ();
5168 case EXEC_OMP_TASKYIELD
:
5169 return gfc_trans_omp_taskyield ();
5170 case EXEC_OMP_TEAMS
:
5171 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5172 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5173 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5174 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5175 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
5176 case EXEC_OMP_WORKSHARE
:
5177 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
5184 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
5189 gfc_omp_declare_simd
*ods
;
5190 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
5192 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
5193 tree fndecl
= ns
->proc_name
->backend_decl
;
5195 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
5196 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
5197 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
5198 DECL_ATTRIBUTES (fndecl
) = c
;