1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "gomp-constants.h"
39 #include "omp-general.h"
42 #define GCC_DIAG_STYLE __gcc_tdiag__
43 #include "diagnostic-core.h"
45 #define GCC_DIAG_STYLE __gcc_gfc__
49 /* True if OpenMP should privatize what this DECL points to rather
50 than the DECL itself. */
53 gfc_omp_privatize_by_reference (const_tree decl
)
55 tree type
= TREE_TYPE (decl
);
57 if (TREE_CODE (type
) == REFERENCE_TYPE
58 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
61 if (TREE_CODE (type
) == POINTER_TYPE
)
63 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
64 that have POINTER_TYPE type and aren't scalar pointers, scalar
65 allocatables, Cray pointees or C pointers are supposed to be
66 privatized by reference. */
67 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
68 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
69 || GFC_DECL_CRAY_POINTEE (decl
)
70 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
71 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
74 if (!DECL_ARTIFICIAL (decl
)
75 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
78 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
80 if (DECL_LANG_SPECIFIC (decl
)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
88 /* True if OpenMP sharing attribute of DECL is predetermined. */
90 enum omp_clause_default_kind
91 gfc_omp_predetermined_sharing (tree decl
)
93 /* Associate names preserve the association established during ASSOCIATE.
94 As they are implemented either as pointers to the selector or array
95 descriptor and shouldn't really change in the ASSOCIATE region,
96 this decl can be either shared or firstprivate. If it is a pointer,
97 use firstprivate, as it is cheaper that way, otherwise make it shared. */
98 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
100 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
101 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
103 return OMP_CLAUSE_DEFAULT_SHARED
;
106 if (DECL_ARTIFICIAL (decl
)
107 && ! GFC_DECL_RESULT (decl
)
108 && ! (DECL_LANG_SPECIFIC (decl
)
109 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
110 return OMP_CLAUSE_DEFAULT_SHARED
;
112 /* Cray pointees shouldn't be listed in any clauses and should be
113 gimplified to dereference of the corresponding Cray pointer.
114 Make them all private, so that they are emitted in the debug
116 if (GFC_DECL_CRAY_POINTEE (decl
))
117 return OMP_CLAUSE_DEFAULT_PRIVATE
;
119 /* Assumed-size arrays are predetermined shared. */
120 if (TREE_CODE (decl
) == PARM_DECL
121 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
122 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
123 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
124 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
126 return OMP_CLAUSE_DEFAULT_SHARED
;
128 /* Dummy procedures aren't considered variables by OpenMP, thus are
129 disallowed in OpenMP clauses. They are represented as PARM_DECLs
130 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
131 to avoid complaining about their uses with default(none). */
132 if (TREE_CODE (decl
) == PARM_DECL
133 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
134 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
135 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
137 /* COMMON and EQUIVALENCE decls are shared. They
138 are only referenced through DECL_VALUE_EXPR of the variables
139 contained in them. If those are privatized, they will not be
140 gimplified to the COMMON or EQUIVALENCE decls. */
141 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
142 return OMP_CLAUSE_DEFAULT_SHARED
;
144 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
145 return OMP_CLAUSE_DEFAULT_SHARED
;
147 /* These are either array or derived parameters, or vtables.
148 In the former cases, the OpenMP standard doesn't consider them to be
149 variables at all (they can't be redefined), but they can nevertheless appear
150 in parallel/task regions and for default(none) purposes treat them as shared.
151 For vtables likely the same handling is desirable. */
152 if (VAR_P (decl
) && TREE_READONLY (decl
) && TREE_STATIC (decl
))
153 return OMP_CLAUSE_DEFAULT_SHARED
;
155 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
158 /* Return decl that should be used when reporting DEFAULT(NONE)
162 gfc_omp_report_decl (tree decl
)
164 if (DECL_ARTIFICIAL (decl
)
165 && DECL_LANG_SPECIFIC (decl
)
166 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
167 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
172 /* Return true if TYPE has any allocatable components. */
175 gfc_has_alloc_comps (tree type
, tree decl
)
179 if (POINTER_TYPE_P (type
))
181 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
182 type
= TREE_TYPE (type
);
183 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
187 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
188 type
= gfc_get_element_type (type
);
190 if (TREE_CODE (type
) != RECORD_TYPE
)
193 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
195 ftype
= TREE_TYPE (field
);
196 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
198 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
199 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
201 if (gfc_has_alloc_comps (ftype
, field
))
207 /* Return true if DECL in private clause needs
208 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
210 gfc_omp_private_outer_ref (tree decl
)
212 tree type
= TREE_TYPE (decl
);
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (GFC_DESCRIPTOR_TYPE_P (type
)
218 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
221 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
224 if (gfc_has_alloc_comps (type
, decl
))
230 /* Callback for gfc_omp_unshare_expr. */
233 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
236 enum tree_code code
= TREE_CODE (t
);
238 /* Stop at types, decls, constants like copy_tree_r. */
239 if (TREE_CODE_CLASS (code
) == tcc_type
240 || TREE_CODE_CLASS (code
) == tcc_declaration
241 || TREE_CODE_CLASS (code
) == tcc_constant
244 else if (handled_component_p (t
)
245 || TREE_CODE (t
) == MEM_REF
)
247 *tp
= unshare_expr (t
);
254 /* Unshare in expr anything that the FE which normally doesn't
255 care much about tree sharing (because during gimplification
256 everything is unshared) could cause problems with tree sharing
257 at omp-low.c time. */
260 gfc_omp_unshare_expr (tree expr
)
262 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
266 enum walk_alloc_comps
268 WALK_ALLOC_COMPS_DTOR
,
269 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
270 WALK_ALLOC_COMPS_COPY_CTOR
273 /* Handle allocatable components in OpenMP clauses. */
276 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
277 enum walk_alloc_comps kind
)
279 stmtblock_t block
, tmpblock
;
280 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
281 gfc_init_block (&block
);
283 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
285 if (GFC_DESCRIPTOR_TYPE_P (type
))
287 gfc_init_block (&tmpblock
);
288 tem
= gfc_full_array_size (&tmpblock
, decl
,
289 GFC_TYPE_ARRAY_RANK (type
));
290 then_b
= gfc_finish_block (&tmpblock
);
291 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
292 tem
= gfc_omp_unshare_expr (tem
);
293 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
294 gfc_array_index_type
, tem
,
299 if (!TYPE_DOMAIN (type
)
300 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
301 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
302 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
304 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
305 TYPE_SIZE_UNIT (type
),
306 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
307 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
310 tem
= array_type_nelts (type
);
311 tem
= fold_convert (gfc_array_index_type
, tem
);
314 tree nelems
= gfc_evaluate_now (tem
, &block
);
315 tree index
= gfc_create_var (gfc_array_index_type
, "S");
317 gfc_init_block (&tmpblock
);
318 tem
= gfc_conv_array_data (decl
);
319 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
320 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
321 tree destvar
, destvref
= NULL_TREE
;
324 tem
= gfc_conv_array_data (dest
);
325 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
326 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
328 gfc_add_expr_to_block (&tmpblock
,
329 gfc_walk_alloc_comps (declvref
, destvref
,
333 gfc_init_loopinfo (&loop
);
335 loop
.from
[0] = gfc_index_zero_node
;
336 loop
.loopvar
[0] = index
;
338 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
339 gfc_add_block_to_block (&block
, &loop
.pre
);
340 return gfc_finish_block (&block
);
342 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
344 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
346 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
347 type
= TREE_TYPE (decl
);
350 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
351 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
353 tree ftype
= TREE_TYPE (field
);
354 tree declf
, destf
= NULL_TREE
;
355 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
356 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
357 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
358 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
361 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
362 decl
, field
, NULL_TREE
);
364 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
365 dest
, field
, NULL_TREE
);
370 case WALK_ALLOC_COMPS_DTOR
:
372 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
373 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
374 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
376 gfc_add_modify (&block
, unshare_expr (destf
),
377 unshare_expr (declf
));
378 tem
= gfc_duplicate_allocatable_nocopy
379 (destf
, declf
, ftype
,
380 GFC_TYPE_ARRAY_RANK (ftype
));
382 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
383 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
385 case WALK_ALLOC_COMPS_COPY_CTOR
:
386 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
387 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
388 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
389 GFC_TYPE_ARRAY_RANK (ftype
),
391 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
392 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
397 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
400 gfc_init_block (&tmpblock
);
401 gfc_add_expr_to_block (&tmpblock
,
402 gfc_walk_alloc_comps (declf
, destf
,
404 then_b
= gfc_finish_block (&tmpblock
);
405 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
406 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
407 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
408 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
409 tem
= unshare_expr (declf
);
414 tem
= fold_convert (pvoid_type_node
, tem
);
415 tem
= fold_build2_loc (input_location
, NE_EXPR
,
416 logical_type_node
, tem
,
418 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
420 build_empty_stmt (input_location
));
422 gfc_add_expr_to_block (&block
, then_b
);
424 if (kind
== WALK_ALLOC_COMPS_DTOR
)
426 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
427 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
429 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
430 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
431 NULL_TREE
, NULL_TREE
, true,
433 GFC_CAF_COARRAY_NOCOARRAY
);
434 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
438 tem
= gfc_call_free (unshare_expr (declf
));
439 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
444 return gfc_finish_block (&block
);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
451 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
453 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
454 stmtblock_t block
, cond_block
;
456 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
462 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
465 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
468 gfc_start_block (&block
);
469 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
470 OMP_CLAUSE_DECL (clause
),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
472 gfc_add_expr_to_block (&block
, tem
);
473 return gfc_finish_block (&block
);
478 gcc_assert (outer
!= NULL_TREE
);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block
);
485 gfc_init_block (&cond_block
);
487 if (GFC_DESCRIPTOR_TYPE_P (type
))
489 gfc_add_modify (&cond_block
, decl
, outer
);
490 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
491 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
492 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
494 gfc_conv_descriptor_lbound_get (decl
, rank
));
495 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
496 size
, gfc_index_one_node
);
497 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
498 size
= fold_build2_loc (input_location
, MULT_EXPR
,
499 gfc_array_index_type
, size
,
500 gfc_conv_descriptor_stride_get (decl
, rank
));
501 tree esize
= fold_convert (gfc_array_index_type
,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
503 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
505 size
= unshare_expr (size
);
506 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
510 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
511 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
512 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
513 if (GFC_DESCRIPTOR_TYPE_P (type
))
514 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
516 gfc_add_modify (&cond_block
, unshare_expr (decl
),
517 fold_convert (TREE_TYPE (decl
), ptr
));
518 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
520 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
521 OMP_CLAUSE_DECL (clause
),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
523 gfc_add_expr_to_block (&cond_block
, tem
);
525 then_b
= gfc_finish_block (&cond_block
);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
530 gfc_init_block (&cond_block
);
531 if (GFC_DESCRIPTOR_TYPE_P (type
))
532 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
535 gfc_add_modify (&cond_block
, unshare_expr (decl
),
536 build_zero_cst (TREE_TYPE (decl
)));
537 else_b
= gfc_finish_block (&cond_block
);
539 tree tem
= fold_convert (pvoid_type_node
,
540 GFC_DESCRIPTOR_TYPE_P (type
)
541 ? gfc_conv_descriptor_data_get (outer
) : outer
);
542 tem
= unshare_expr (tem
);
543 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
544 tem
, null_pointer_node
);
545 gfc_add_expr_to_block (&block
,
546 build3_loc (input_location
, COND_EXPR
,
547 void_type_node
, cond
, then_b
,
551 gfc_add_expr_to_block (&block
, then_b
);
553 return gfc_finish_block (&block
);
556 /* Build and return code for a copy constructor from SRC to DEST. */
559 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
561 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
562 tree cond
, then_b
, else_b
;
563 stmtblock_t block
, cond_block
;
565 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
569 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
572 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
574 gfc_start_block (&block
);
575 gfc_add_modify (&block
, dest
, src
);
576 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
577 WALK_ALLOC_COMPS_COPY_CTOR
);
578 gfc_add_expr_to_block (&block
, tem
);
579 return gfc_finish_block (&block
);
582 return build2_v (MODIFY_EXPR
, dest
, src
);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block
);
589 gfc_init_block (&cond_block
);
591 gfc_add_modify (&cond_block
, dest
, src
);
592 if (GFC_DESCRIPTOR_TYPE_P (type
))
594 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
595 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
596 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
598 gfc_conv_descriptor_lbound_get (dest
, rank
));
599 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
600 size
, gfc_index_one_node
);
601 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
602 size
= fold_build2_loc (input_location
, MULT_EXPR
,
603 gfc_array_index_type
, size
,
604 gfc_conv_descriptor_stride_get (dest
, rank
));
605 tree esize
= fold_convert (gfc_array_index_type
,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
607 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
609 size
= unshare_expr (size
);
610 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
614 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
615 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
616 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
617 if (GFC_DESCRIPTOR_TYPE_P (type
))
618 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
620 gfc_add_modify (&cond_block
, unshare_expr (dest
),
621 fold_convert (TREE_TYPE (dest
), ptr
));
623 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
624 ? gfc_conv_descriptor_data_get (src
) : src
;
625 srcptr
= unshare_expr (srcptr
);
626 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
627 call
= build_call_expr_loc (input_location
,
628 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
630 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
631 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
633 tree tem
= gfc_walk_alloc_comps (src
, dest
,
634 OMP_CLAUSE_DECL (clause
),
635 WALK_ALLOC_COMPS_COPY_CTOR
);
636 gfc_add_expr_to_block (&cond_block
, tem
);
638 then_b
= gfc_finish_block (&cond_block
);
640 gfc_init_block (&cond_block
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
645 gfc_add_modify (&cond_block
, unshare_expr (dest
),
646 build_zero_cst (TREE_TYPE (dest
)));
647 else_b
= gfc_finish_block (&cond_block
);
649 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
650 unshare_expr (srcptr
), null_pointer_node
);
651 gfc_add_expr_to_block (&block
,
652 build3_loc (input_location
, COND_EXPR
,
653 void_type_node
, cond
, then_b
, else_b
));
655 return gfc_finish_block (&block
);
658 /* Similarly, except use an intrinsic or pointer assignment operator
662 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
664 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
665 tree cond
, then_b
, else_b
;
666 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
669 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
672 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
674 gfc_start_block (&block
);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
677 OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_DTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block
, dest
, src
);
682 /* Finally allocate any allocatable components and copy. */
683 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
684 WALK_ALLOC_COMPS_COPY_CTOR
);
685 gfc_add_expr_to_block (&block
, tem
);
686 return gfc_finish_block (&block
);
689 return build2_v (MODIFY_EXPR
, dest
, src
);
692 gfc_start_block (&block
);
694 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
696 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
697 WALK_ALLOC_COMPS_DTOR
);
698 tree tem
= fold_convert (pvoid_type_node
,
699 GFC_DESCRIPTOR_TYPE_P (type
)
700 ? gfc_conv_descriptor_data_get (dest
) : dest
);
701 tem
= unshare_expr (tem
);
702 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
703 tem
, null_pointer_node
);
704 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
705 then_b
, build_empty_stmt (input_location
));
706 gfc_add_expr_to_block (&block
, tem
);
709 gfc_init_block (&cond_block
);
711 if (GFC_DESCRIPTOR_TYPE_P (type
))
713 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
714 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
715 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
717 gfc_conv_descriptor_lbound_get (src
, rank
));
718 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
719 size
, gfc_index_one_node
);
720 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
721 size
= fold_build2_loc (input_location
, MULT_EXPR
,
722 gfc_array_index_type
, size
,
723 gfc_conv_descriptor_stride_get (src
, rank
));
724 tree esize
= fold_convert (gfc_array_index_type
,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
726 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
728 size
= unshare_expr (size
);
729 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
733 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
734 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
736 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
737 ? gfc_conv_descriptor_data_get (dest
) : dest
;
738 destptr
= unshare_expr (destptr
);
739 destptr
= fold_convert (pvoid_type_node
, destptr
);
740 gfc_add_modify (&cond_block
, ptr
, destptr
);
742 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
743 destptr
, null_pointer_node
);
745 if (GFC_DESCRIPTOR_TYPE_P (type
))
748 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
750 tree rank
= gfc_rank_cst
[i
];
751 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
752 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
753 gfc_array_index_type
, tem
,
754 gfc_conv_descriptor_lbound_get (src
, rank
));
755 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
756 gfc_array_index_type
, tem
,
757 gfc_conv_descriptor_lbound_get (dest
, rank
));
758 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
759 tem
, gfc_conv_descriptor_ubound_get (dest
,
761 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
762 logical_type_node
, cond
, tem
);
766 gfc_init_block (&cond_block2
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
770 gfc_init_block (&inner_block
);
771 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
772 then_b
= gfc_finish_block (&inner_block
);
774 gfc_init_block (&inner_block
);
775 gfc_add_modify (&inner_block
, ptr
,
776 gfc_call_realloc (&inner_block
, ptr
, size
));
777 else_b
= gfc_finish_block (&inner_block
);
779 gfc_add_expr_to_block (&cond_block2
,
780 build3_loc (input_location
, COND_EXPR
,
782 unshare_expr (nonalloc
),
784 gfc_add_modify (&cond_block2
, dest
, src
);
785 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
789 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
790 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
791 fold_convert (type
, ptr
));
793 then_b
= gfc_finish_block (&cond_block2
);
794 else_b
= build_empty_stmt (input_location
);
796 gfc_add_expr_to_block (&cond_block
,
797 build3_loc (input_location
, COND_EXPR
,
798 void_type_node
, unshare_expr (cond
),
801 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
802 ? gfc_conv_descriptor_data_get (src
) : src
;
803 srcptr
= unshare_expr (srcptr
);
804 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
805 call
= build_call_expr_loc (input_location
,
806 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
808 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
809 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
811 tree tem
= gfc_walk_alloc_comps (src
, dest
,
812 OMP_CLAUSE_DECL (clause
),
813 WALK_ALLOC_COMPS_COPY_CTOR
);
814 gfc_add_expr_to_block (&cond_block
, tem
);
816 then_b
= gfc_finish_block (&cond_block
);
818 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
820 gfc_init_block (&cond_block
);
821 if (GFC_DESCRIPTOR_TYPE_P (type
))
823 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
824 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
825 NULL_TREE
, NULL_TREE
, true, NULL
,
826 GFC_CAF_COARRAY_NOCOARRAY
);
827 gfc_add_expr_to_block (&cond_block
, tmp
);
831 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
832 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
833 gfc_add_modify (&cond_block
, unshare_expr (dest
),
834 build_zero_cst (TREE_TYPE (dest
)));
836 else_b
= gfc_finish_block (&cond_block
);
838 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
839 unshare_expr (srcptr
), null_pointer_node
);
840 gfc_add_expr_to_block (&block
,
841 build3_loc (input_location
, COND_EXPR
,
842 void_type_node
, cond
,
846 gfc_add_expr_to_block (&block
, then_b
);
848 return gfc_finish_block (&block
);
852 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
853 tree add
, tree nelems
)
855 stmtblock_t tmpblock
;
856 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
857 nelems
= gfc_evaluate_now (nelems
, block
);
859 gfc_init_block (&tmpblock
);
860 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
862 desta
= gfc_build_array_ref (dest
, index
, NULL
);
863 srca
= gfc_build_array_ref (src
, index
, NULL
);
867 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
868 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
869 fold_convert (sizetype
, index
),
870 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
871 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
872 TREE_TYPE (dest
), dest
,
874 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
875 TREE_TYPE (src
), src
,
878 gfc_add_modify (&tmpblock
, desta
,
879 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
883 gfc_init_loopinfo (&loop
);
885 loop
.from
[0] = gfc_index_zero_node
;
886 loop
.loopvar
[0] = index
;
888 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
889 gfc_add_block_to_block (block
, &loop
.pre
);
892 /* Build and return code for a constructor of DEST that initializes
893 it to SRC plus ADD (ADD is scalar integer). */
896 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
898 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
901 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
903 gfc_start_block (&block
);
904 add
= gfc_evaluate_now (add
, &block
);
906 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
907 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
908 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
910 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
911 if (!TYPE_DOMAIN (type
)
912 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
913 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
914 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
916 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
917 TYPE_SIZE_UNIT (type
),
918 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
919 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
922 nelems
= array_type_nelts (type
);
923 nelems
= fold_convert (gfc_array_index_type
, nelems
);
925 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
926 return gfc_finish_block (&block
);
929 /* Allocatable arrays in LINEAR clauses need to be allocated
930 and copied from SRC. */
931 gfc_add_modify (&block
, dest
, src
);
932 if (GFC_DESCRIPTOR_TYPE_P (type
))
934 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
935 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
936 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
938 gfc_conv_descriptor_lbound_get (dest
, rank
));
939 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
940 size
, gfc_index_one_node
);
941 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
942 size
= fold_build2_loc (input_location
, MULT_EXPR
,
943 gfc_array_index_type
, size
,
944 gfc_conv_descriptor_stride_get (dest
, rank
));
945 tree esize
= fold_convert (gfc_array_index_type
,
946 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
947 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
948 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
949 nelems
, unshare_expr (esize
));
950 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
952 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
953 gfc_array_index_type
, nelems
,
957 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
958 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
959 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
960 if (GFC_DESCRIPTOR_TYPE_P (type
))
962 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
963 tree etype
= gfc_get_element_type (type
);
964 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
965 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
966 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
967 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
971 gfc_add_modify (&block
, unshare_expr (dest
),
972 fold_convert (TREE_TYPE (dest
), ptr
));
973 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
974 tree dstm
= build_fold_indirect_ref (ptr
);
975 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
976 gfc_add_modify (&block
, dstm
,
977 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
979 return gfc_finish_block (&block
);
982 /* Build and return code destructing DECL. Return NULL if nothing
986 gfc_omp_clause_dtor (tree clause
, tree decl
)
988 tree type
= TREE_TYPE (decl
), tem
;
990 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
991 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
992 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
994 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
996 OMP_CLAUSE_DECL (clause
),
997 WALK_ALLOC_COMPS_DTOR
);
1001 if (GFC_DESCRIPTOR_TYPE_P (type
))
1003 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1004 to be deallocated if they were allocated. */
1005 tem
= gfc_conv_descriptor_data_get (decl
);
1006 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1007 NULL_TREE
, true, NULL
,
1008 GFC_CAF_COARRAY_NOCOARRAY
);
1011 tem
= gfc_call_free (decl
);
1012 tem
= gfc_omp_unshare_expr (tem
);
1014 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1019 gfc_init_block (&block
);
1020 gfc_add_expr_to_block (&block
,
1021 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1022 OMP_CLAUSE_DECL (clause
),
1023 WALK_ALLOC_COMPS_DTOR
));
1024 gfc_add_expr_to_block (&block
, tem
);
1025 then_b
= gfc_finish_block (&block
);
1027 tem
= fold_convert (pvoid_type_node
,
1028 GFC_DESCRIPTOR_TYPE_P (type
)
1029 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1030 tem
= unshare_expr (tem
);
1031 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1032 tem
, null_pointer_node
);
1033 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1034 then_b
, build_empty_stmt (input_location
));
1041 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1043 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1046 tree decl
= OMP_CLAUSE_DECL (c
);
1048 /* Assumed-size arrays can't be mapped implicitly, they have to be
1049 mapped explicitly using array sections. */
1050 if (TREE_CODE (decl
) == PARM_DECL
1051 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1052 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1053 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1054 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1057 error_at (OMP_CLAUSE_LOCATION (c
),
1058 "implicit mapping of assumed size array %qD", decl
);
1062 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1063 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1065 if (!gfc_omp_privatize_by_reference (decl
)
1066 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1067 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1068 && !GFC_DECL_CRAY_POINTEE (decl
)
1069 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1071 tree orig_decl
= decl
;
1072 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1073 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1074 OMP_CLAUSE_DECL (c4
) = decl
;
1075 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1076 decl
= build_fold_indirect_ref (decl
);
1077 OMP_CLAUSE_DECL (c
) = decl
;
1078 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1079 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1080 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1081 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1083 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1084 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1085 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1086 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1087 decl
= build_fold_indirect_ref (decl
);
1088 OMP_CLAUSE_DECL (c
) = decl
;
1091 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1094 gfc_start_block (&block
);
1095 tree type
= TREE_TYPE (decl
);
1096 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1097 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1098 ptr
= build_fold_indirect_ref (ptr
);
1099 OMP_CLAUSE_DECL (c
) = ptr
;
1100 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1101 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1102 OMP_CLAUSE_DECL (c2
) = decl
;
1103 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1104 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1105 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1106 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1107 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1108 tree size
= create_tmp_var (gfc_array_index_type
);
1109 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1110 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1111 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1112 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1114 stmtblock_t cond_block
;
1115 tree tem
, then_b
, else_b
, zero
, cond
;
1117 gfc_init_block (&cond_block
);
1118 tem
= gfc_full_array_size (&cond_block
, decl
,
1119 GFC_TYPE_ARRAY_RANK (type
));
1120 gfc_add_modify (&cond_block
, size
, tem
);
1121 gfc_add_modify (&cond_block
, size
,
1122 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1124 then_b
= gfc_finish_block (&cond_block
);
1125 gfc_init_block (&cond_block
);
1126 zero
= build_int_cst (gfc_array_index_type
, 0);
1127 gfc_add_modify (&cond_block
, size
, zero
);
1128 else_b
= gfc_finish_block (&cond_block
);
1129 tem
= gfc_conv_descriptor_data_get (decl
);
1130 tem
= fold_convert (pvoid_type_node
, tem
);
1131 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1132 logical_type_node
, tem
, null_pointer_node
);
1133 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1134 void_type_node
, cond
,
1139 gfc_add_modify (&block
, size
,
1140 gfc_full_array_size (&block
, decl
,
1141 GFC_TYPE_ARRAY_RANK (type
)));
1142 gfc_add_modify (&block
, size
,
1143 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1146 OMP_CLAUSE_SIZE (c
) = size
;
1147 tree stmt
= gfc_finish_block (&block
);
1148 gimplify_and_add (stmt
, pre_p
);
1151 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1153 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1154 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1157 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1158 OMP_CLAUSE_CHAIN (last
) = c2
;
1163 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1164 OMP_CLAUSE_CHAIN (last
) = c3
;
1169 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1170 OMP_CLAUSE_CHAIN (last
) = c4
;
1176 /* Return true if DECL is a scalar variable (for the purpose of
1177 implicit firstprivatization). */
1180 gfc_omp_scalar_p (tree decl
)
1182 tree type
= TREE_TYPE (decl
);
1183 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1184 type
= TREE_TYPE (type
);
1185 if (TREE_CODE (type
) == POINTER_TYPE
)
1187 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1188 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1189 type
= TREE_TYPE (type
);
1190 if (GFC_ARRAY_TYPE_P (type
)
1191 || GFC_CLASS_TYPE_P (type
))
1194 if (TYPE_STRING_FLAG (type
))
1196 if (INTEGRAL_TYPE_P (type
)
1197 || SCALAR_FLOAT_TYPE_P (type
)
1198 || COMPLEX_FLOAT_TYPE_P (type
))
1204 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1205 disregarded in OpenMP construct, because it is going to be
1206 remapped during OpenMP lowering. SHARED is true if DECL
1207 is going to be shared, false if it is going to be privatized. */
1210 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1212 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1213 && DECL_HAS_VALUE_EXPR_P (decl
))
1215 tree value
= DECL_VALUE_EXPR (decl
);
1217 if (TREE_CODE (value
) == COMPONENT_REF
1218 && VAR_P (TREE_OPERAND (value
, 0))
1219 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1221 /* If variable in COMMON or EQUIVALENCE is privatized, return
1222 true, as just that variable is supposed to be privatized,
1223 not the whole COMMON or whole EQUIVALENCE.
1224 For shared variables in COMMON or EQUIVALENCE, let them be
1225 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1226 from the same COMMON or EQUIVALENCE just one sharing of the
1227 whole COMMON or EQUIVALENCE is enough. */
1232 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1238 /* Return true if DECL that is shared iff SHARED is true should
1239 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1243 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1245 if (GFC_DECL_CRAY_POINTEE (decl
))
1248 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1249 && DECL_HAS_VALUE_EXPR_P (decl
))
1251 tree value
= DECL_VALUE_EXPR (decl
);
1253 if (TREE_CODE (value
) == COMPONENT_REF
1254 && VAR_P (TREE_OPERAND (value
, 0))
1255 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1262 /* Register language specific type size variables as potentially OpenMP
1263 firstprivate variables. */
1266 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1268 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1272 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1273 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1275 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1276 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1277 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1279 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1280 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1286 gfc_trans_add_clause (tree node
, tree tail
)
1288 OMP_CLAUSE_CHAIN (node
) = tail
;
1293 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1298 gfc_symbol
*proc_sym
;
1299 gfc_formal_arglist
*f
;
1301 gcc_assert (sym
->attr
.dummy
);
1302 proc_sym
= sym
->ns
->proc_name
;
1303 if (proc_sym
->attr
.entry_master
)
1305 if (gfc_return_by_reference (proc_sym
))
1308 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1311 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1317 return build_int_cst (integer_type_node
, cnt
);
1320 tree t
= gfc_get_symbol_decl (sym
);
1324 bool alternate_entry
;
1327 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1328 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1329 && sym
->result
== sym
;
1330 entry_master
= sym
->attr
.result
1331 && sym
->ns
->proc_name
->attr
.entry_master
1332 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1333 parent_decl
= current_function_decl
1334 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1336 if ((t
== parent_decl
&& return_value
)
1337 || (sym
->ns
&& sym
->ns
->proc_name
1338 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1339 && (alternate_entry
|| entry_master
)))
1344 /* Special case for assigning the return value of a function.
1345 Self recursive functions must have an explicit return value. */
1346 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1347 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1349 /* Similarly for alternate entry points. */
1350 else if (alternate_entry
1351 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1354 gfc_entry_list
*el
= NULL
;
1356 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1359 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1364 else if (entry_master
1365 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1367 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1373 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1374 gfc_omp_namelist
*namelist
, tree list
,
1377 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1378 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1380 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1381 if (t
!= error_mark_node
)
1383 tree node
= build_omp_clause (input_location
, code
);
1384 OMP_CLAUSE_DECL (node
) = t
;
1385 list
= gfc_trans_add_clause (node
, list
);
1391 struct omp_udr_find_orig_data
1393 gfc_omp_udr
*omp_udr
;
1398 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1401 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1402 if ((*e
)->expr_type
== EXPR_VARIABLE
1403 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1404 cd
->omp_orig_seen
= true;
1410 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1412 gfc_symbol
*sym
= n
->sym
;
1413 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1414 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1415 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1416 gfc_symbol omp_var_copy
[4];
1417 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1419 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1420 locus old_loc
= gfc_current_locus
;
1423 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1425 decl
= OMP_CLAUSE_DECL (c
);
1426 gfc_current_locus
= where
;
1427 type
= TREE_TYPE (decl
);
1428 outer_decl
= create_tmp_var_raw (type
);
1429 if (TREE_CODE (decl
) == PARM_DECL
1430 && TREE_CODE (type
) == REFERENCE_TYPE
1431 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1432 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1434 decl
= build_fold_indirect_ref (decl
);
1435 type
= TREE_TYPE (type
);
1438 /* Create a fake symbol for init value. */
1439 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1440 init_val_sym
.ns
= sym
->ns
;
1441 init_val_sym
.name
= sym
->name
;
1442 init_val_sym
.ts
= sym
->ts
;
1443 init_val_sym
.attr
.referenced
= 1;
1444 init_val_sym
.declared_at
= where
;
1445 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1446 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1447 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1448 else if (udr
->initializer_ns
)
1449 backend_decl
= NULL
;
1451 switch (sym
->ts
.type
)
1457 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1460 backend_decl
= NULL_TREE
;
1463 init_val_sym
.backend_decl
= backend_decl
;
1465 /* Create a fake symbol for the outer array reference. */
1468 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1469 outer_sym
.attr
.dummy
= 0;
1470 outer_sym
.attr
.result
= 0;
1471 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1472 outer_sym
.backend_decl
= outer_decl
;
1473 if (decl
!= OMP_CLAUSE_DECL (c
))
1474 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1476 /* Create fake symtrees for it. */
1477 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1478 symtree1
->n
.sym
= sym
;
1479 gcc_assert (symtree1
== root1
);
1481 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1482 symtree2
->n
.sym
= &init_val_sym
;
1483 gcc_assert (symtree2
== root2
);
1485 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1486 symtree3
->n
.sym
= &outer_sym
;
1487 gcc_assert (symtree3
== root3
);
1489 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1492 omp_var_copy
[0] = *udr
->omp_out
;
1493 omp_var_copy
[1] = *udr
->omp_in
;
1494 *udr
->omp_out
= outer_sym
;
1495 *udr
->omp_in
= *sym
;
1496 if (udr
->initializer_ns
)
1498 omp_var_copy
[2] = *udr
->omp_priv
;
1499 omp_var_copy
[3] = *udr
->omp_orig
;
1500 *udr
->omp_priv
= *sym
;
1501 *udr
->omp_orig
= outer_sym
;
1505 /* Create expressions. */
1506 e1
= gfc_get_expr ();
1507 e1
->expr_type
= EXPR_VARIABLE
;
1509 e1
->symtree
= symtree1
;
1511 if (sym
->attr
.dimension
)
1513 e1
->ref
= ref
= gfc_get_ref ();
1514 ref
->type
= REF_ARRAY
;
1515 ref
->u
.ar
.where
= where
;
1516 ref
->u
.ar
.as
= sym
->as
;
1517 ref
->u
.ar
.type
= AR_FULL
;
1518 ref
->u
.ar
.dimen
= 0;
1520 t
= gfc_resolve_expr (e1
);
1524 if (backend_decl
!= NULL_TREE
)
1526 e2
= gfc_get_expr ();
1527 e2
->expr_type
= EXPR_VARIABLE
;
1529 e2
->symtree
= symtree2
;
1531 t
= gfc_resolve_expr (e2
);
1534 else if (udr
->initializer_ns
== NULL
)
1536 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1537 e2
= gfc_default_initializer (&sym
->ts
);
1539 t
= gfc_resolve_expr (e2
);
1542 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1544 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1545 t
= gfc_resolve_expr (e2
);
1548 if (udr
&& udr
->initializer_ns
)
1550 struct omp_udr_find_orig_data cd
;
1552 cd
.omp_orig_seen
= false;
1553 gfc_code_walker (&n
->udr
->initializer
,
1554 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1555 if (cd
.omp_orig_seen
)
1556 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1559 e3
= gfc_copy_expr (e1
);
1560 e3
->symtree
= symtree3
;
1561 t
= gfc_resolve_expr (e3
);
1566 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1570 e4
= gfc_add (e3
, e1
);
1573 e4
= gfc_multiply (e3
, e1
);
1575 case TRUTH_ANDIF_EXPR
:
1576 e4
= gfc_and (e3
, e1
);
1578 case TRUTH_ORIF_EXPR
:
1579 e4
= gfc_or (e3
, e1
);
1582 e4
= gfc_eqv (e3
, e1
);
1585 e4
= gfc_neqv (e3
, e1
);
1603 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1606 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1607 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1608 t
= gfc_resolve_expr (e3
);
1610 t
= gfc_resolve_expr (e4
);
1619 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1620 intrinsic_sym
.ns
= sym
->ns
;
1621 intrinsic_sym
.name
= iname
;
1622 intrinsic_sym
.ts
= sym
->ts
;
1623 intrinsic_sym
.attr
.referenced
= 1;
1624 intrinsic_sym
.attr
.intrinsic
= 1;
1625 intrinsic_sym
.attr
.function
= 1;
1626 intrinsic_sym
.attr
.implicit_type
= 1;
1627 intrinsic_sym
.result
= &intrinsic_sym
;
1628 intrinsic_sym
.declared_at
= where
;
1630 symtree4
= gfc_new_symtree (&root4
, iname
);
1631 symtree4
->n
.sym
= &intrinsic_sym
;
1632 gcc_assert (symtree4
== root4
);
1634 e4
= gfc_get_expr ();
1635 e4
->expr_type
= EXPR_FUNCTION
;
1637 e4
->symtree
= symtree4
;
1638 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1639 e4
->value
.function
.actual
->expr
= e3
;
1640 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1641 e4
->value
.function
.actual
->next
->expr
= e1
;
1643 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1645 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1646 e1
= gfc_copy_expr (e1
);
1647 e3
= gfc_copy_expr (e3
);
1648 t
= gfc_resolve_expr (e4
);
1652 /* Create the init statement list. */
1655 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1657 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1658 NULL_TREE
, NULL_TREE
, false);
1659 if (TREE_CODE (stmt
) != BIND_EXPR
)
1660 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1663 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1665 /* Create the merge statement list. */
1668 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1670 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1671 NULL_TREE
, NULL_TREE
, false);
1672 if (TREE_CODE (stmt
) != BIND_EXPR
)
1673 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1676 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1678 /* And stick the placeholder VAR_DECL into the clause as well. */
1679 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1681 gfc_current_locus
= old_loc
;
1694 gfc_free_array_spec (outer_sym
.as
);
1698 *udr
->omp_out
= omp_var_copy
[0];
1699 *udr
->omp_in
= omp_var_copy
[1];
1700 if (udr
->initializer_ns
)
1702 *udr
->omp_priv
= omp_var_copy
[2];
1703 *udr
->omp_orig
= omp_var_copy
[3];
1709 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1710 locus where
, bool mark_addressable
)
1712 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1713 if (namelist
->sym
->attr
.referenced
)
1715 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1716 if (t
!= error_mark_node
)
1718 tree node
= build_omp_clause (where
.lb
->location
,
1719 OMP_CLAUSE_REDUCTION
);
1720 OMP_CLAUSE_DECL (node
) = t
;
1721 if (mark_addressable
)
1722 TREE_ADDRESSABLE (t
) = 1;
1723 switch (namelist
->u
.reduction_op
)
1725 case OMP_REDUCTION_PLUS
:
1726 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1728 case OMP_REDUCTION_MINUS
:
1729 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1731 case OMP_REDUCTION_TIMES
:
1732 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1734 case OMP_REDUCTION_AND
:
1735 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1737 case OMP_REDUCTION_OR
:
1738 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1740 case OMP_REDUCTION_EQV
:
1741 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1743 case OMP_REDUCTION_NEQV
:
1744 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1746 case OMP_REDUCTION_MAX
:
1747 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1749 case OMP_REDUCTION_MIN
:
1750 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1752 case OMP_REDUCTION_IAND
:
1753 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1755 case OMP_REDUCTION_IOR
:
1756 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1758 case OMP_REDUCTION_IEOR
:
1759 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1761 case OMP_REDUCTION_USER
:
1762 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1767 if (namelist
->sym
->attr
.dimension
1768 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1769 || namelist
->sym
->attr
.allocatable
)
1770 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1771 list
= gfc_trans_add_clause (node
, list
);
1778 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1783 gfc_init_se (&se
, NULL
);
1784 gfc_conv_expr (&se
, expr
);
1785 gfc_add_block_to_block (block
, &se
.pre
);
1786 result
= gfc_evaluate_now (se
.expr
, block
);
1787 gfc_add_block_to_block (block
, &se
.post
);
1792 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
1795 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1796 locus where
, bool declare_simd
= false)
1798 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1800 enum omp_clause_code clause_code
;
1803 if (clauses
== NULL
)
1806 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1808 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1814 case OMP_LIST_REDUCTION
:
1815 /* An OpenACC async clause indicates the need to set reduction
1816 arguments addressable, to allow asynchronous copy-out. */
1817 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1820 case OMP_LIST_PRIVATE
:
1821 clause_code
= OMP_CLAUSE_PRIVATE
;
1823 case OMP_LIST_SHARED
:
1824 clause_code
= OMP_CLAUSE_SHARED
;
1826 case OMP_LIST_FIRSTPRIVATE
:
1827 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1829 case OMP_LIST_LASTPRIVATE
:
1830 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1832 case OMP_LIST_COPYIN
:
1833 clause_code
= OMP_CLAUSE_COPYIN
;
1835 case OMP_LIST_COPYPRIVATE
:
1836 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1838 case OMP_LIST_UNIFORM
:
1839 clause_code
= OMP_CLAUSE_UNIFORM
;
1841 case OMP_LIST_USE_DEVICE
:
1842 case OMP_LIST_USE_DEVICE_PTR
:
1843 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1845 case OMP_LIST_IS_DEVICE_PTR
:
1846 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
1851 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1854 case OMP_LIST_ALIGNED
:
1855 for (; n
!= NULL
; n
= n
->next
)
1856 if (n
->sym
->attr
.referenced
|| declare_simd
)
1858 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1859 if (t
!= error_mark_node
)
1861 tree node
= build_omp_clause (input_location
,
1862 OMP_CLAUSE_ALIGNED
);
1863 OMP_CLAUSE_DECL (node
) = t
;
1869 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1872 gfc_init_se (&se
, NULL
);
1873 gfc_conv_expr (&se
, n
->expr
);
1874 gfc_add_block_to_block (block
, &se
.pre
);
1875 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1876 gfc_add_block_to_block (block
, &se
.post
);
1878 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1880 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1884 case OMP_LIST_LINEAR
:
1886 gfc_expr
*last_step_expr
= NULL
;
1887 tree last_step
= NULL_TREE
;
1888 bool last_step_parm
= false;
1890 for (; n
!= NULL
; n
= n
->next
)
1894 last_step_expr
= n
->expr
;
1895 last_step
= NULL_TREE
;
1896 last_step_parm
= false;
1898 if (n
->sym
->attr
.referenced
|| declare_simd
)
1900 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1901 if (t
!= error_mark_node
)
1903 tree node
= build_omp_clause (input_location
,
1905 OMP_CLAUSE_DECL (node
) = t
;
1906 omp_clause_linear_kind kind
;
1907 switch (n
->u
.linear_op
)
1909 case OMP_LINEAR_DEFAULT
:
1910 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
1912 case OMP_LINEAR_REF
:
1913 kind
= OMP_CLAUSE_LINEAR_REF
;
1915 case OMP_LINEAR_VAL
:
1916 kind
= OMP_CLAUSE_LINEAR_VAL
;
1918 case OMP_LINEAR_UVAL
:
1919 kind
= OMP_CLAUSE_LINEAR_UVAL
;
1924 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
1925 if (last_step_expr
&& last_step
== NULL_TREE
)
1929 gfc_init_se (&se
, NULL
);
1930 gfc_conv_expr (&se
, last_step_expr
);
1931 gfc_add_block_to_block (block
, &se
.pre
);
1932 last_step
= gfc_evaluate_now (se
.expr
, block
);
1933 gfc_add_block_to_block (block
, &se
.post
);
1935 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
1937 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
1938 last_step
= gfc_trans_omp_variable (s
, true);
1939 last_step_parm
= true;
1943 = gfc_conv_constant_to_tree (last_step_expr
);
1947 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
1948 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
1952 if (kind
== OMP_CLAUSE_LINEAR_REF
)
1955 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
1957 type
= gfc_get_function_type (n
->sym
);
1958 type
= build_pointer_type (type
);
1961 type
= gfc_sym_type (n
->sym
);
1962 if (POINTER_TYPE_P (type
))
1963 type
= TREE_TYPE (type
);
1964 /* Otherwise to be determined what exactly
1966 tree t
= fold_convert (sizetype
, last_step
);
1967 t
= size_binop (MULT_EXPR
, t
,
1968 TYPE_SIZE_UNIT (type
));
1969 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
1974 = gfc_typenode_for_spec (&n
->sym
->ts
);
1975 OMP_CLAUSE_LINEAR_STEP (node
)
1976 = fold_convert (type
, last_step
);
1979 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1980 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1981 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1987 case OMP_LIST_DEPEND
:
1988 for (; n
!= NULL
; n
= n
->next
)
1990 if (n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
1992 tree vec
= NULL_TREE
;
1996 tree addend
= integer_zero_node
, t
;
2000 addend
= gfc_conv_constant_to_tree (n
->expr
);
2001 if (TREE_CODE (addend
) == INTEGER_CST
2002 && tree_int_cst_sgn (addend
) == -1)
2005 addend
= const_unop (NEGATE_EXPR
,
2006 TREE_TYPE (addend
), addend
);
2009 t
= gfc_trans_omp_variable (n
->sym
, false);
2010 if (t
!= error_mark_node
)
2012 if (i
< vec_safe_length (doacross_steps
)
2013 && !integer_zerop (addend
)
2014 && (*doacross_steps
)[i
])
2016 tree step
= (*doacross_steps
)[i
];
2017 addend
= fold_convert (TREE_TYPE (step
), addend
);
2018 addend
= build2 (TRUNC_DIV_EXPR
,
2019 TREE_TYPE (step
), addend
, step
);
2021 vec
= tree_cons (addend
, t
, vec
);
2023 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2026 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2030 if (vec
== NULL_TREE
)
2033 tree node
= build_omp_clause (input_location
,
2035 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2036 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2037 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2041 if (!n
->sym
->attr
.referenced
)
2044 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
2045 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2047 tree decl
= gfc_get_symbol_decl (n
->sym
);
2048 if (gfc_omp_privatize_by_reference (decl
))
2049 decl
= build_fold_indirect_ref (decl
);
2050 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2052 decl
= gfc_conv_descriptor_data_get (decl
);
2053 decl
= fold_convert (build_pointer_type (char_type_node
),
2055 decl
= build_fold_indirect_ref (decl
);
2057 else if (DECL_P (decl
))
2058 TREE_ADDRESSABLE (decl
) = 1;
2059 OMP_CLAUSE_DECL (node
) = decl
;
2064 gfc_init_se (&se
, NULL
);
2065 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2067 gfc_conv_expr_reference (&se
, n
->expr
);
2072 gfc_conv_expr_descriptor (&se
, n
->expr
);
2073 ptr
= gfc_conv_array_data (se
.expr
);
2075 gfc_add_block_to_block (block
, &se
.pre
);
2076 gfc_add_block_to_block (block
, &se
.post
);
2077 ptr
= fold_convert (build_pointer_type (char_type_node
),
2079 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2081 switch (n
->u
.depend_op
)
2084 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2086 case OMP_DEPEND_OUT
:
2087 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2089 case OMP_DEPEND_INOUT
:
2090 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2095 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2099 for (; n
!= NULL
; n
= n
->next
)
2101 if (!n
->sym
->attr
.referenced
)
2104 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2105 tree node2
= NULL_TREE
;
2106 tree node3
= NULL_TREE
;
2107 tree node4
= NULL_TREE
;
2108 tree decl
= gfc_get_symbol_decl (n
->sym
);
2110 TREE_ADDRESSABLE (decl
) = 1;
2111 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2113 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2114 && (gfc_omp_privatize_by_reference (decl
)
2115 || GFC_DECL_GET_SCALAR_POINTER (decl
)
2116 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
2117 || GFC_DECL_CRAY_POINTEE (decl
)
2118 || GFC_DESCRIPTOR_TYPE_P
2119 (TREE_TYPE (TREE_TYPE (decl
)))))
2121 tree orig_decl
= decl
;
2122 node4
= build_omp_clause (input_location
,
2124 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2125 OMP_CLAUSE_DECL (node4
) = decl
;
2126 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2127 decl
= build_fold_indirect_ref (decl
);
2128 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
2129 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
2130 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
2132 node3
= build_omp_clause (input_location
,
2134 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2135 OMP_CLAUSE_DECL (node3
) = decl
;
2136 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2137 decl
= build_fold_indirect_ref (decl
);
2140 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2142 tree type
= TREE_TYPE (decl
);
2143 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2144 ptr
= fold_convert (build_pointer_type (char_type_node
),
2146 ptr
= build_fold_indirect_ref (ptr
);
2147 OMP_CLAUSE_DECL (node
) = ptr
;
2148 node2
= build_omp_clause (input_location
,
2150 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2151 OMP_CLAUSE_DECL (node2
) = decl
;
2152 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2153 node3
= build_omp_clause (input_location
,
2155 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2156 OMP_CLAUSE_DECL (node3
)
2157 = gfc_conv_descriptor_data_get (decl
);
2158 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2160 /* We have to check for n->sym->attr.dimension because
2161 of scalar coarrays. */
2162 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
2164 stmtblock_t cond_block
;
2166 = gfc_create_var (gfc_array_index_type
, NULL
);
2167 tree tem
, then_b
, else_b
, zero
, cond
;
2169 gfc_init_block (&cond_block
);
2171 = gfc_full_array_size (&cond_block
, decl
,
2172 GFC_TYPE_ARRAY_RANK (type
));
2173 gfc_add_modify (&cond_block
, size
, tem
);
2174 then_b
= gfc_finish_block (&cond_block
);
2175 gfc_init_block (&cond_block
);
2176 zero
= build_int_cst (gfc_array_index_type
, 0);
2177 gfc_add_modify (&cond_block
, size
, zero
);
2178 else_b
= gfc_finish_block (&cond_block
);
2179 tem
= gfc_conv_descriptor_data_get (decl
);
2180 tem
= fold_convert (pvoid_type_node
, tem
);
2181 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2183 tem
, null_pointer_node
);
2184 gfc_add_expr_to_block (block
,
2185 build3_loc (input_location
,
2190 OMP_CLAUSE_SIZE (node
) = size
;
2192 else if (n
->sym
->attr
.dimension
)
2193 OMP_CLAUSE_SIZE (node
)
2194 = gfc_full_array_size (block
, decl
,
2195 GFC_TYPE_ARRAY_RANK (type
));
2196 if (n
->sym
->attr
.dimension
)
2199 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2200 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2201 OMP_CLAUSE_SIZE (node
)
2202 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2203 OMP_CLAUSE_SIZE (node
), elemsz
);
2207 OMP_CLAUSE_DECL (node
) = decl
;
2212 gfc_init_se (&se
, NULL
);
2213 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2215 gfc_conv_expr_reference (&se
, n
->expr
);
2216 gfc_add_block_to_block (block
, &se
.pre
);
2218 OMP_CLAUSE_SIZE (node
)
2219 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2223 gfc_conv_expr_descriptor (&se
, n
->expr
);
2224 ptr
= gfc_conv_array_data (se
.expr
);
2225 tree type
= TREE_TYPE (se
.expr
);
2226 gfc_add_block_to_block (block
, &se
.pre
);
2227 OMP_CLAUSE_SIZE (node
)
2228 = gfc_full_array_size (block
, se
.expr
,
2229 GFC_TYPE_ARRAY_RANK (type
));
2231 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2232 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2233 OMP_CLAUSE_SIZE (node
)
2234 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2235 OMP_CLAUSE_SIZE (node
), elemsz
);
2237 gfc_add_block_to_block (block
, &se
.post
);
2238 ptr
= fold_convert (build_pointer_type (char_type_node
),
2240 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2242 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2243 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2245 node4
= build_omp_clause (input_location
,
2247 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2248 OMP_CLAUSE_DECL (node4
) = decl
;
2249 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2250 decl
= build_fold_indirect_ref (decl
);
2252 ptr
= fold_convert (sizetype
, ptr
);
2253 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2255 tree type
= TREE_TYPE (decl
);
2256 ptr2
= gfc_conv_descriptor_data_get (decl
);
2257 node2
= build_omp_clause (input_location
,
2259 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2260 OMP_CLAUSE_DECL (node2
) = decl
;
2261 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2262 node3
= build_omp_clause (input_location
,
2264 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2265 OMP_CLAUSE_DECL (node3
)
2266 = gfc_conv_descriptor_data_get (decl
);
2270 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2271 ptr2
= build_fold_addr_expr (decl
);
2274 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2277 node3
= build_omp_clause (input_location
,
2279 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2280 OMP_CLAUSE_DECL (node3
) = decl
;
2282 ptr2
= fold_convert (sizetype
, ptr2
);
2283 OMP_CLAUSE_SIZE (node3
)
2284 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2286 switch (n
->u
.map_op
)
2289 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2292 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2295 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2297 case OMP_MAP_TOFROM
:
2298 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2300 case OMP_MAP_ALWAYS_TO
:
2301 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2303 case OMP_MAP_ALWAYS_FROM
:
2304 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2306 case OMP_MAP_ALWAYS_TOFROM
:
2307 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2309 case OMP_MAP_RELEASE
:
2310 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2312 case OMP_MAP_DELETE
:
2313 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2315 case OMP_MAP_FORCE_ALLOC
:
2316 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2318 case OMP_MAP_FORCE_TO
:
2319 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2321 case OMP_MAP_FORCE_FROM
:
2322 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2324 case OMP_MAP_FORCE_TOFROM
:
2325 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2327 case OMP_MAP_FORCE_PRESENT
:
2328 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2330 case OMP_MAP_FORCE_DEVICEPTR
:
2331 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2336 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2338 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2340 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2342 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2347 case OMP_LIST_CACHE
:
2348 for (; n
!= NULL
; n
= n
->next
)
2350 if (!n
->sym
->attr
.referenced
)
2356 clause_code
= OMP_CLAUSE_TO
;
2359 clause_code
= OMP_CLAUSE_FROM
;
2361 case OMP_LIST_CACHE
:
2362 clause_code
= OMP_CLAUSE__CACHE_
;
2367 tree node
= build_omp_clause (input_location
, clause_code
);
2368 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2370 tree decl
= gfc_get_symbol_decl (n
->sym
);
2371 if (gfc_omp_privatize_by_reference (decl
))
2372 decl
= build_fold_indirect_ref (decl
);
2373 else if (DECL_P (decl
))
2374 TREE_ADDRESSABLE (decl
) = 1;
2375 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2377 tree type
= TREE_TYPE (decl
);
2378 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2379 ptr
= fold_convert (build_pointer_type (char_type_node
),
2381 ptr
= build_fold_indirect_ref (ptr
);
2382 OMP_CLAUSE_DECL (node
) = ptr
;
2383 OMP_CLAUSE_SIZE (node
)
2384 = gfc_full_array_size (block
, decl
,
2385 GFC_TYPE_ARRAY_RANK (type
));
2387 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2388 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2389 OMP_CLAUSE_SIZE (node
)
2390 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2391 OMP_CLAUSE_SIZE (node
), elemsz
);
2394 OMP_CLAUSE_DECL (node
) = decl
;
2399 gfc_init_se (&se
, NULL
);
2400 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2402 gfc_conv_expr_reference (&se
, n
->expr
);
2404 gfc_add_block_to_block (block
, &se
.pre
);
2405 OMP_CLAUSE_SIZE (node
)
2406 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2410 gfc_conv_expr_descriptor (&se
, n
->expr
);
2411 ptr
= gfc_conv_array_data (se
.expr
);
2412 tree type
= TREE_TYPE (se
.expr
);
2413 gfc_add_block_to_block (block
, &se
.pre
);
2414 OMP_CLAUSE_SIZE (node
)
2415 = gfc_full_array_size (block
, se
.expr
,
2416 GFC_TYPE_ARRAY_RANK (type
));
2418 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2419 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2420 OMP_CLAUSE_SIZE (node
)
2421 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2422 OMP_CLAUSE_SIZE (node
), elemsz
);
2424 gfc_add_block_to_block (block
, &se
.post
);
2425 ptr
= fold_convert (build_pointer_type (char_type_node
),
2427 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2429 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2437 if (clauses
->if_expr
)
2441 gfc_init_se (&se
, NULL
);
2442 gfc_conv_expr (&se
, clauses
->if_expr
);
2443 gfc_add_block_to_block (block
, &se
.pre
);
2444 if_var
= gfc_evaluate_now (se
.expr
, block
);
2445 gfc_add_block_to_block (block
, &se
.post
);
2447 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2448 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2449 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2450 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2452 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
2453 if (clauses
->if_exprs
[ifc
])
2457 gfc_init_se (&se
, NULL
);
2458 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
2459 gfc_add_block_to_block (block
, &se
.pre
);
2460 if_var
= gfc_evaluate_now (se
.expr
, block
);
2461 gfc_add_block_to_block (block
, &se
.post
);
2463 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2466 case OMP_IF_PARALLEL
:
2467 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
2470 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
2472 case OMP_IF_TASKLOOP
:
2473 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
2476 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
2478 case OMP_IF_TARGET_DATA
:
2479 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
2481 case OMP_IF_TARGET_UPDATE
:
2482 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
2484 case OMP_IF_TARGET_ENTER_DATA
:
2485 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
2487 case OMP_IF_TARGET_EXIT_DATA
:
2488 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
2493 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2494 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2497 if (clauses
->final_expr
)
2501 gfc_init_se (&se
, NULL
);
2502 gfc_conv_expr (&se
, clauses
->final_expr
);
2503 gfc_add_block_to_block (block
, &se
.pre
);
2504 final_var
= gfc_evaluate_now (se
.expr
, block
);
2505 gfc_add_block_to_block (block
, &se
.post
);
2507 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2508 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2509 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2512 if (clauses
->num_threads
)
2516 gfc_init_se (&se
, NULL
);
2517 gfc_conv_expr (&se
, clauses
->num_threads
);
2518 gfc_add_block_to_block (block
, &se
.pre
);
2519 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2520 gfc_add_block_to_block (block
, &se
.post
);
2522 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2523 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2524 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2527 chunk_size
= NULL_TREE
;
2528 if (clauses
->chunk_size
)
2530 gfc_init_se (&se
, NULL
);
2531 gfc_conv_expr (&se
, clauses
->chunk_size
);
2532 gfc_add_block_to_block (block
, &se
.pre
);
2533 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2534 gfc_add_block_to_block (block
, &se
.post
);
2537 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2539 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2540 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2541 switch (clauses
->sched_kind
)
2543 case OMP_SCHED_STATIC
:
2544 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2546 case OMP_SCHED_DYNAMIC
:
2547 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2549 case OMP_SCHED_GUIDED
:
2550 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2552 case OMP_SCHED_RUNTIME
:
2553 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2555 case OMP_SCHED_AUTO
:
2556 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2561 if (clauses
->sched_monotonic
)
2562 OMP_CLAUSE_SCHEDULE_KIND (c
)
2563 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2564 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
2565 else if (clauses
->sched_nonmonotonic
)
2566 OMP_CLAUSE_SCHEDULE_KIND (c
)
2567 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2568 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
2569 if (clauses
->sched_simd
)
2570 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
2571 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2574 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2576 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2577 switch (clauses
->default_sharing
)
2579 case OMP_DEFAULT_NONE
:
2580 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2582 case OMP_DEFAULT_SHARED
:
2583 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2585 case OMP_DEFAULT_PRIVATE
:
2586 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2588 case OMP_DEFAULT_FIRSTPRIVATE
:
2589 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2591 case OMP_DEFAULT_PRESENT
:
2592 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
2597 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2600 if (clauses
->nowait
)
2602 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2603 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2606 if (clauses
->ordered
)
2608 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2609 OMP_CLAUSE_ORDERED_EXPR (c
)
2610 = clauses
->orderedc
? build_int_cst (integer_type_node
,
2611 clauses
->orderedc
) : NULL_TREE
;
2612 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2615 if (clauses
->untied
)
2617 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2618 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2621 if (clauses
->mergeable
)
2623 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2624 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2627 if (clauses
->collapse
)
2629 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2630 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2631 = build_int_cst (integer_type_node
, clauses
->collapse
);
2632 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2635 if (clauses
->inbranch
)
2637 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2638 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2641 if (clauses
->notinbranch
)
2643 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2644 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2647 switch (clauses
->cancel
)
2649 case OMP_CANCEL_UNKNOWN
:
2651 case OMP_CANCEL_PARALLEL
:
2652 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2653 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2655 case OMP_CANCEL_SECTIONS
:
2656 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2657 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2660 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2661 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2663 case OMP_CANCEL_TASKGROUP
:
2664 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2665 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2669 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2671 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2672 switch (clauses
->proc_bind
)
2674 case OMP_PROC_BIND_MASTER
:
2675 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2677 case OMP_PROC_BIND_SPREAD
:
2678 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2680 case OMP_PROC_BIND_CLOSE
:
2681 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2686 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2689 if (clauses
->safelen_expr
)
2693 gfc_init_se (&se
, NULL
);
2694 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2695 gfc_add_block_to_block (block
, &se
.pre
);
2696 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2697 gfc_add_block_to_block (block
, &se
.post
);
2699 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2700 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2701 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2704 if (clauses
->simdlen_expr
)
2708 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2709 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2710 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2711 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2717 gfc_init_se (&se
, NULL
);
2718 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
2719 gfc_add_block_to_block (block
, &se
.pre
);
2720 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
2721 gfc_add_block_to_block (block
, &se
.post
);
2723 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2724 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
2725 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2729 if (clauses
->num_teams
)
2733 gfc_init_se (&se
, NULL
);
2734 gfc_conv_expr (&se
, clauses
->num_teams
);
2735 gfc_add_block_to_block (block
, &se
.pre
);
2736 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2737 gfc_add_block_to_block (block
, &se
.post
);
2739 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2740 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2741 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2744 if (clauses
->device
)
2748 gfc_init_se (&se
, NULL
);
2749 gfc_conv_expr (&se
, clauses
->device
);
2750 gfc_add_block_to_block (block
, &se
.pre
);
2751 device
= gfc_evaluate_now (se
.expr
, block
);
2752 gfc_add_block_to_block (block
, &se
.post
);
2754 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2755 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2756 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2759 if (clauses
->thread_limit
)
2763 gfc_init_se (&se
, NULL
);
2764 gfc_conv_expr (&se
, clauses
->thread_limit
);
2765 gfc_add_block_to_block (block
, &se
.pre
);
2766 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2767 gfc_add_block_to_block (block
, &se
.post
);
2769 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2770 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2771 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2774 chunk_size
= NULL_TREE
;
2775 if (clauses
->dist_chunk_size
)
2777 gfc_init_se (&se
, NULL
);
2778 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2779 gfc_add_block_to_block (block
, &se
.pre
);
2780 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2781 gfc_add_block_to_block (block
, &se
.post
);
2784 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2786 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2787 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2788 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2791 if (clauses
->grainsize
)
2795 gfc_init_se (&se
, NULL
);
2796 gfc_conv_expr (&se
, clauses
->grainsize
);
2797 gfc_add_block_to_block (block
, &se
.pre
);
2798 grainsize
= gfc_evaluate_now (se
.expr
, block
);
2799 gfc_add_block_to_block (block
, &se
.post
);
2801 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GRAINSIZE
);
2802 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
2803 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2806 if (clauses
->num_tasks
)
2810 gfc_init_se (&se
, NULL
);
2811 gfc_conv_expr (&se
, clauses
->num_tasks
);
2812 gfc_add_block_to_block (block
, &se
.pre
);
2813 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
2814 gfc_add_block_to_block (block
, &se
.post
);
2816 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TASKS
);
2817 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
2818 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2821 if (clauses
->priority
)
2825 gfc_init_se (&se
, NULL
);
2826 gfc_conv_expr (&se
, clauses
->priority
);
2827 gfc_add_block_to_block (block
, &se
.pre
);
2828 priority
= gfc_evaluate_now (se
.expr
, block
);
2829 gfc_add_block_to_block (block
, &se
.post
);
2831 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PRIORITY
);
2832 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
2833 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2840 gfc_init_se (&se
, NULL
);
2841 gfc_conv_expr (&se
, clauses
->hint
);
2842 gfc_add_block_to_block (block
, &se
.pre
);
2843 hint
= gfc_evaluate_now (se
.expr
, block
);
2844 gfc_add_block_to_block (block
, &se
.post
);
2846 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_HINT
);
2847 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
2848 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2853 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMD
);
2854 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2856 if (clauses
->threads
)
2858 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREADS
);
2859 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2861 if (clauses
->nogroup
)
2863 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOGROUP
);
2864 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2866 if (clauses
->defaultmap
)
2868 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULTMAP
);
2869 OMP_CLAUSE_DEFAULTMAP_SET_KIND (c
, OMP_CLAUSE_DEFAULTMAP_TOFROM
,
2870 OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
);
2871 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2873 if (clauses
->depend_source
)
2875 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEPEND
);
2876 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
2877 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2882 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2883 if (clauses
->async_expr
)
2884 OMP_CLAUSE_ASYNC_EXPR (c
)
2885 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2887 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2888 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2892 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2893 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2895 if (clauses
->par_auto
)
2897 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2898 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2900 if (clauses
->if_present
)
2902 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF_PRESENT
);
2903 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2905 if (clauses
->finalize
)
2907 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINALIZE
);
2908 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2910 if (clauses
->independent
)
2912 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2913 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2915 if (clauses
->wait_list
)
2919 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2921 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2922 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2923 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2927 if (clauses
->num_gangs_expr
)
2930 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2931 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2932 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2933 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2935 if (clauses
->num_workers_expr
)
2937 tree num_workers_var
2938 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2939 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2940 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2941 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2943 if (clauses
->vector_length_expr
)
2945 tree vector_length_var
2946 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2947 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2948 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2949 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2951 if (clauses
->tile_list
)
2953 vec
<tree
, va_gc
> *tvec
;
2956 vec_alloc (tvec
, 4);
2958 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2959 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2961 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2962 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2963 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2966 if (clauses
->vector
)
2968 if (clauses
->vector_expr
)
2971 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2972 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2973 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2974 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2978 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2979 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2982 if (clauses
->worker
)
2984 if (clauses
->worker_expr
)
2987 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2988 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2989 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2990 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2994 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2995 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3001 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
3002 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3003 if (clauses
->gang_num_expr
)
3005 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
3006 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
3008 if (clauses
->gang_static
)
3010 arg
= clauses
->gang_static_expr
3011 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
3012 : integer_minus_one_node
;
3013 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
3017 return nreverse (omp_clauses
);
3020 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
3023 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
3028 stmt
= gfc_trans_code (code
);
3029 if (TREE_CODE (stmt
) != BIND_EXPR
)
3031 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
3033 tree block
= poplevel (1, 0);
3034 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
3044 /* Trans OpenACC directives. */
3045 /* parallel, kernels, data and host_data. */
3047 gfc_trans_oacc_construct (gfc_code
*code
)
3050 tree stmt
, oacc_clauses
;
3051 enum tree_code construct_code
;
3055 case EXEC_OACC_PARALLEL
:
3056 construct_code
= OACC_PARALLEL
;
3058 case EXEC_OACC_KERNELS
:
3059 construct_code
= OACC_KERNELS
;
3061 case EXEC_OACC_DATA
:
3062 construct_code
= OACC_DATA
;
3064 case EXEC_OACC_HOST_DATA
:
3065 construct_code
= OACC_HOST_DATA
;
3071 gfc_start_block (&block
);
3072 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3074 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3075 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3077 gfc_add_expr_to_block (&block
, stmt
);
3078 return gfc_finish_block (&block
);
3081 /* update, enter_data, exit_data, cache. */
3083 gfc_trans_oacc_executable_directive (gfc_code
*code
)
3086 tree stmt
, oacc_clauses
;
3087 enum tree_code construct_code
;
3091 case EXEC_OACC_UPDATE
:
3092 construct_code
= OACC_UPDATE
;
3094 case EXEC_OACC_ENTER_DATA
:
3095 construct_code
= OACC_ENTER_DATA
;
3097 case EXEC_OACC_EXIT_DATA
:
3098 construct_code
= OACC_EXIT_DATA
;
3100 case EXEC_OACC_CACHE
:
3101 construct_code
= OACC_CACHE
;
3107 gfc_start_block (&block
);
3108 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3110 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
3112 gfc_add_expr_to_block (&block
, stmt
);
3113 return gfc_finish_block (&block
);
3117 gfc_trans_oacc_wait_directive (gfc_code
*code
)
3121 vec
<tree
, va_gc
> *args
;
3124 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3125 location_t loc
= input_location
;
3127 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3130 vec_alloc (args
, nparms
+ 2);
3131 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
3133 gfc_start_block (&block
);
3135 if (clauses
->async_expr
)
3136 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
3138 t
= build_int_cst (integer_type_node
, -2);
3140 args
->quick_push (t
);
3141 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
3143 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3144 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
3146 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
3147 gfc_add_expr_to_block (&block
, stmt
);
3151 return gfc_finish_block (&block
);
3154 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
3155 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
3158 gfc_trans_omp_atomic (gfc_code
*code
)
3160 gfc_code
*atomic_code
= code
;
3164 gfc_expr
*expr2
, *e
;
3167 tree lhsaddr
, type
, rhs
, x
;
3168 enum tree_code op
= ERROR_MARK
;
3169 enum tree_code aop
= OMP_ATOMIC
;
3170 bool var_on_left
= false;
3171 enum omp_memory_order mo
3172 = ((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
)
3173 ? OMP_MEMORY_ORDER_SEQ_CST
: OMP_MEMORY_ORDER_RELAXED
);
3175 code
= code
->block
->next
;
3176 gcc_assert (code
->op
== EXEC_ASSIGN
);
3177 var
= code
->expr1
->symtree
->n
.sym
;
3179 gfc_init_se (&lse
, NULL
);
3180 gfc_init_se (&rse
, NULL
);
3181 gfc_init_se (&vse
, NULL
);
3182 gfc_start_block (&block
);
3184 expr2
= code
->expr2
;
3185 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3186 != GFC_OMP_ATOMIC_WRITE
)
3187 && (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
) == 0
3188 && expr2
->expr_type
== EXPR_FUNCTION
3189 && expr2
->value
.function
.isym
3190 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3191 expr2
= expr2
->value
.function
.actual
->expr
;
3193 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3195 case GFC_OMP_ATOMIC_READ
:
3196 gfc_conv_expr (&vse
, code
->expr1
);
3197 gfc_add_block_to_block (&block
, &vse
.pre
);
3199 gfc_conv_expr (&lse
, expr2
);
3200 gfc_add_block_to_block (&block
, &lse
.pre
);
3201 type
= TREE_TYPE (lse
.expr
);
3202 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3204 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
3205 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
3206 x
= convert (TREE_TYPE (vse
.expr
), x
);
3207 gfc_add_modify (&block
, vse
.expr
, x
);
3209 gfc_add_block_to_block (&block
, &lse
.pre
);
3210 gfc_add_block_to_block (&block
, &rse
.pre
);
3212 return gfc_finish_block (&block
);
3213 case GFC_OMP_ATOMIC_CAPTURE
:
3214 aop
= OMP_ATOMIC_CAPTURE_NEW
;
3215 if (expr2
->expr_type
== EXPR_VARIABLE
)
3217 aop
= OMP_ATOMIC_CAPTURE_OLD
;
3218 gfc_conv_expr (&vse
, code
->expr1
);
3219 gfc_add_block_to_block (&block
, &vse
.pre
);
3221 gfc_conv_expr (&lse
, expr2
);
3222 gfc_add_block_to_block (&block
, &lse
.pre
);
3223 gfc_init_se (&lse
, NULL
);
3225 var
= code
->expr1
->symtree
->n
.sym
;
3226 expr2
= code
->expr2
;
3227 if (expr2
->expr_type
== EXPR_FUNCTION
3228 && expr2
->value
.function
.isym
3229 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3230 expr2
= expr2
->value
.function
.actual
->expr
;
3237 gfc_conv_expr (&lse
, code
->expr1
);
3238 gfc_add_block_to_block (&block
, &lse
.pre
);
3239 type
= TREE_TYPE (lse
.expr
);
3240 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3242 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3243 == GFC_OMP_ATOMIC_WRITE
)
3244 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3246 gfc_conv_expr (&rse
, expr2
);
3247 gfc_add_block_to_block (&block
, &rse
.pre
);
3249 else if (expr2
->expr_type
== EXPR_OP
)
3252 switch (expr2
->value
.op
.op
)
3254 case INTRINSIC_PLUS
:
3257 case INTRINSIC_TIMES
:
3260 case INTRINSIC_MINUS
:
3263 case INTRINSIC_DIVIDE
:
3264 if (expr2
->ts
.type
== BT_INTEGER
)
3265 op
= TRUNC_DIV_EXPR
;
3270 op
= TRUTH_ANDIF_EXPR
;
3273 op
= TRUTH_ORIF_EXPR
;
3278 case INTRINSIC_NEQV
:
3284 e
= expr2
->value
.op
.op1
;
3285 if (e
->expr_type
== EXPR_FUNCTION
3286 && e
->value
.function
.isym
3287 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3288 e
= e
->value
.function
.actual
->expr
;
3289 if (e
->expr_type
== EXPR_VARIABLE
3290 && e
->symtree
!= NULL
3291 && e
->symtree
->n
.sym
== var
)
3293 expr2
= expr2
->value
.op
.op2
;
3298 e
= expr2
->value
.op
.op2
;
3299 if (e
->expr_type
== EXPR_FUNCTION
3300 && e
->value
.function
.isym
3301 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3302 e
= e
->value
.function
.actual
->expr
;
3303 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3304 && e
->symtree
!= NULL
3305 && e
->symtree
->n
.sym
== var
);
3306 expr2
= expr2
->value
.op
.op1
;
3307 var_on_left
= false;
3309 gfc_conv_expr (&rse
, expr2
);
3310 gfc_add_block_to_block (&block
, &rse
.pre
);
3314 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
3315 switch (expr2
->value
.function
.isym
->id
)
3335 e
= expr2
->value
.function
.actual
->expr
;
3336 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3337 && e
->symtree
!= NULL
3338 && e
->symtree
->n
.sym
== var
);
3340 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
3341 gfc_add_block_to_block (&block
, &rse
.pre
);
3342 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
3344 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
3345 gfc_actual_arglist
*arg
;
3347 gfc_add_modify (&block
, accum
, rse
.expr
);
3348 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
3351 gfc_init_block (&rse
.pre
);
3352 gfc_conv_expr (&rse
, arg
->expr
);
3353 gfc_add_block_to_block (&block
, &rse
.pre
);
3354 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
3356 gfc_add_modify (&block
, accum
, x
);
3362 expr2
= expr2
->value
.function
.actual
->next
->expr
;
3365 lhsaddr
= save_expr (lhsaddr
);
3366 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3367 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3368 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3370 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3371 it even after unsharing function body. */
3372 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3373 DECL_CONTEXT (var
) = current_function_decl
;
3374 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3375 NULL_TREE
, NULL_TREE
);
3378 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3380 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3381 == GFC_OMP_ATOMIC_WRITE
)
3382 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3386 x
= convert (TREE_TYPE (rhs
),
3387 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3389 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3391 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3394 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3395 && TREE_CODE (type
) != COMPLEX_TYPE
)
3396 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3397 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3399 gfc_add_block_to_block (&block
, &lse
.pre
);
3400 gfc_add_block_to_block (&block
, &rse
.pre
);
3402 if (aop
== OMP_ATOMIC
)
3404 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3405 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
3406 gfc_add_expr_to_block (&block
, x
);
3410 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3413 expr2
= code
->expr2
;
3414 if (expr2
->expr_type
== EXPR_FUNCTION
3415 && expr2
->value
.function
.isym
3416 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3417 expr2
= expr2
->value
.function
.actual
->expr
;
3419 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3420 gfc_conv_expr (&vse
, code
->expr1
);
3421 gfc_add_block_to_block (&block
, &vse
.pre
);
3423 gfc_init_se (&lse
, NULL
);
3424 gfc_conv_expr (&lse
, expr2
);
3425 gfc_add_block_to_block (&block
, &lse
.pre
);
3427 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3428 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
3429 x
= convert (TREE_TYPE (vse
.expr
), x
);
3430 gfc_add_modify (&block
, vse
.expr
, x
);
3433 return gfc_finish_block (&block
);
3437 gfc_trans_omp_barrier (void)
3439 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3440 return build_call_expr_loc (input_location
, decl
, 0);
3444 gfc_trans_omp_cancel (gfc_code
*code
)
3447 tree ifc
= boolean_true_node
;
3449 switch (code
->ext
.omp_clauses
->cancel
)
3451 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3452 case OMP_CANCEL_DO
: mask
= 2; break;
3453 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3454 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3455 default: gcc_unreachable ();
3457 gfc_start_block (&block
);
3458 if (code
->ext
.omp_clauses
->if_expr
)
3463 gfc_init_se (&se
, NULL
);
3464 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3465 gfc_add_block_to_block (&block
, &se
.pre
);
3466 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3467 gfc_add_block_to_block (&block
, &se
.post
);
3468 tree type
= TREE_TYPE (if_var
);
3469 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3470 boolean_type_node
, if_var
,
3471 build_zero_cst (type
));
3473 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3474 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3475 ifc
= fold_convert (c_bool_type
, ifc
);
3476 gfc_add_expr_to_block (&block
,
3477 build_call_expr_loc (input_location
, decl
, 2,
3478 build_int_cst (integer_type_node
,
3480 return gfc_finish_block (&block
);
3484 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3487 switch (code
->ext
.omp_clauses
->cancel
)
3489 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3490 case OMP_CANCEL_DO
: mask
= 2; break;
3491 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3492 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3493 default: gcc_unreachable ();
3495 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3496 return build_call_expr_loc (input_location
, decl
, 1,
3497 build_int_cst (integer_type_node
, mask
));
3501 gfc_trans_omp_critical (gfc_code
*code
)
3503 tree name
= NULL_TREE
, stmt
;
3504 if (code
->ext
.omp_clauses
!= NULL
)
3505 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
3506 stmt
= gfc_trans_code (code
->block
->next
);
3507 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3511 typedef struct dovar_init_d
{
3518 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3519 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3522 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
3523 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3526 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3527 int i
, collapse
= clauses
->collapse
;
3528 vec
<dovar_init
> inits
= vNULL
;
3531 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
3532 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
3534 /* Both collapsed and tiled loops are lowered the same way. In
3535 OpenACC, those clauses are not compatible, so prioritize the tile
3536 clause, if present. */
3540 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
3544 doacross_steps
= NULL
;
3545 if (clauses
->orderedc
)
3546 collapse
= clauses
->orderedc
;
3550 code
= code
->block
->next
;
3551 gcc_assert (code
->op
== EXEC_DO
);
3553 init
= make_tree_vec (collapse
);
3554 cond
= make_tree_vec (collapse
);
3555 incr
= make_tree_vec (collapse
);
3556 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
3560 gfc_start_block (&block
);
3564 /* simd schedule modifier is only useful for composite do simd and other
3565 constructs including that, where gfc_trans_omp_do is only called
3566 on the simd construct and DO's clauses are translated elsewhere. */
3567 do_clauses
->sched_simd
= false;
3569 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3571 for (i
= 0; i
< collapse
; i
++)
3574 int dovar_found
= 0;
3579 gfc_omp_namelist
*n
= NULL
;
3580 if (op
!= EXEC_OMP_DISTRIBUTE
)
3581 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3582 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3583 n
!= NULL
; n
= n
->next
)
3584 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3588 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3589 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3590 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3596 /* Evaluate all the expressions in the iterator. */
3597 gfc_init_se (&se
, NULL
);
3598 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3599 gfc_add_block_to_block (pblock
, &se
.pre
);
3601 type
= TREE_TYPE (dovar
);
3602 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3604 gfc_init_se (&se
, NULL
);
3605 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3606 gfc_add_block_to_block (pblock
, &se
.pre
);
3607 from
= gfc_evaluate_now (se
.expr
, pblock
);
3609 gfc_init_se (&se
, NULL
);
3610 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3611 gfc_add_block_to_block (pblock
, &se
.pre
);
3612 to
= gfc_evaluate_now (se
.expr
, pblock
);
3614 gfc_init_se (&se
, NULL
);
3615 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3616 gfc_add_block_to_block (pblock
, &se
.pre
);
3617 step
= gfc_evaluate_now (se
.expr
, pblock
);
3620 /* Special case simple loops. */
3623 if (integer_onep (step
))
3625 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3630 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3636 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3637 /* The condition should not be folded. */
3638 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3639 ? LE_EXPR
: GE_EXPR
,
3640 logical_type_node
, dovar
, to
);
3641 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3643 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3646 TREE_VEC_ELT (incr
, i
));
3650 /* STEP is not 1 or -1. Use:
3651 for (count = 0; count < (to + step - from) / step; count++)
3653 dovar = from + count * step;
3657 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3658 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3659 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3661 tmp
= gfc_evaluate_now (tmp
, pblock
);
3662 count
= gfc_create_var (type
, "count");
3663 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3664 build_int_cst (type
, 0));
3665 /* The condition should not be folded. */
3666 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3669 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3671 build_int_cst (type
, 1));
3672 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3673 MODIFY_EXPR
, type
, count
,
3674 TREE_VEC_ELT (incr
, i
));
3676 /* Initialize DOVAR. */
3677 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3678 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3679 dovar_init e
= {dovar
, tmp
};
3680 inits
.safe_push (e
);
3681 if (clauses
->orderedc
)
3683 if (doacross_steps
== NULL
)
3684 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
);
3685 (*doacross_steps
)[i
] = step
;
3689 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
3691 if (dovar_found
== 2
3692 && op
== EXEC_OMP_SIMD
3696 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3697 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3698 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3700 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3706 if (op
== EXEC_OMP_SIMD
)
3710 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3711 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3712 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3715 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3720 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3721 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3722 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3724 if (dovar_found
== 2)
3731 /* If dovar is lastprivate, but different counter is used,
3732 dovar += step needs to be added to
3733 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3734 will have the value on entry of the last loop, rather
3735 than value after iterator increment. */
3736 if (clauses
->orderedc
)
3738 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
3741 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3742 type
, count
, build_one_cst (type
));
3743 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
3745 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3750 tmp
= gfc_evaluate_now (step
, pblock
);
3751 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3754 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3756 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3757 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3758 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3760 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3763 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3764 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3766 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3770 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3772 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3773 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3774 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3776 tree l
= build_omp_clause (input_location
,
3777 OMP_CLAUSE_LASTPRIVATE
);
3778 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3779 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3780 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3782 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3786 gcc_assert (simple
|| c
!= NULL
);
3790 if (op
!= EXEC_OMP_SIMD
)
3791 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3792 else if (collapse
== 1)
3794 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3795 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3796 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3797 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3800 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3801 OMP_CLAUSE_DECL (tmp
) = count
;
3802 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3805 if (i
+ 1 < collapse
)
3806 code
= code
->block
->next
;
3809 if (pblock
!= &block
)
3812 gfc_start_block (&block
);
3815 gfc_start_block (&body
);
3817 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3818 gfc_add_modify (&body
, di
->var
, di
->init
);
3821 /* Cycle statement is implemented with a goto. Exit statement must not be
3822 present for this loop. */
3823 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3825 /* Put these labels where they can be found later. */
3827 code
->cycle_label
= cycle_label
;
3828 code
->exit_label
= NULL_TREE
;
3830 /* Main loop body. */
3831 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3832 gfc_add_expr_to_block (&body
, tmp
);
3834 /* Label for cycle statements (if needed). */
3835 if (TREE_USED (cycle_label
))
3837 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3838 gfc_add_expr_to_block (&body
, tmp
);
3841 /* End of loop body. */
3844 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3845 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3846 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3847 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
3848 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3849 default: gcc_unreachable ();
3852 TREE_TYPE (stmt
) = void_type_node
;
3853 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3854 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3855 OMP_FOR_INIT (stmt
) = init
;
3856 OMP_FOR_COND (stmt
) = cond
;
3857 OMP_FOR_INCR (stmt
) = incr
;
3859 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
3860 gfc_add_expr_to_block (&block
, stmt
);
3862 vec_free (doacross_steps
);
3863 doacross_steps
= saved_doacross_steps
;
3865 return gfc_finish_block (&block
);
3868 /* parallel loop and kernels loop. */
3870 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3872 stmtblock_t block
, *pblock
= NULL
;
3873 gfc_omp_clauses construct_clauses
, loop_clauses
;
3874 tree stmt
, oacc_clauses
= NULL_TREE
;
3875 enum tree_code construct_code
;
3879 case EXEC_OACC_PARALLEL_LOOP
:
3880 construct_code
= OACC_PARALLEL
;
3882 case EXEC_OACC_KERNELS_LOOP
:
3883 construct_code
= OACC_KERNELS
;
3889 gfc_start_block (&block
);
3891 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3892 if (code
->ext
.omp_clauses
!= NULL
)
3894 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3895 sizeof (construct_clauses
));
3896 loop_clauses
.collapse
= construct_clauses
.collapse
;
3897 loop_clauses
.gang
= construct_clauses
.gang
;
3898 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3899 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3900 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3901 loop_clauses
.vector
= construct_clauses
.vector
;
3902 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3903 loop_clauses
.worker
= construct_clauses
.worker
;
3904 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3905 loop_clauses
.seq
= construct_clauses
.seq
;
3906 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3907 loop_clauses
.independent
= construct_clauses
.independent
;
3908 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3909 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3910 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3911 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3912 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3913 construct_clauses
.gang
= false;
3914 construct_clauses
.gang_static
= false;
3915 construct_clauses
.gang_num_expr
= NULL
;
3916 construct_clauses
.gang_static_expr
= NULL
;
3917 construct_clauses
.vector
= false;
3918 construct_clauses
.vector_expr
= NULL
;
3919 construct_clauses
.worker
= false;
3920 construct_clauses
.worker_expr
= NULL
;
3921 construct_clauses
.seq
= false;
3922 construct_clauses
.par_auto
= false;
3923 construct_clauses
.independent
= false;
3924 construct_clauses
.independent
= false;
3925 construct_clauses
.tile_list
= NULL
;
3926 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3927 if (construct_code
== OACC_KERNELS
)
3928 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3929 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3932 if (!loop_clauses
.seq
)
3936 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3937 if (TREE_CODE (stmt
) != BIND_EXPR
)
3938 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3941 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3943 gfc_add_expr_to_block (&block
, stmt
);
3944 return gfc_finish_block (&block
);
3948 gfc_trans_omp_flush (void)
3950 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3951 return build_call_expr_loc (input_location
, decl
, 0);
3955 gfc_trans_omp_master (gfc_code
*code
)
3957 tree stmt
= gfc_trans_code (code
->block
->next
);
3958 if (IS_EMPTY_STMT (stmt
))
3960 return build1_v (OMP_MASTER
, stmt
);
3964 gfc_trans_omp_ordered (gfc_code
*code
)
3968 if (!code
->ext
.omp_clauses
->simd
)
3969 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
3970 code
->ext
.omp_clauses
->threads
= 0;
3972 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
3974 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3975 code
->block
? gfc_trans_code (code
->block
->next
)
3976 : NULL_TREE
, omp_clauses
);
3980 gfc_trans_omp_parallel (gfc_code
*code
)
3983 tree stmt
, omp_clauses
;
3985 gfc_start_block (&block
);
3986 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3989 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3990 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3991 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3993 gfc_add_expr_to_block (&block
, stmt
);
3994 return gfc_finish_block (&block
);
4001 GFC_OMP_SPLIT_PARALLEL
,
4002 GFC_OMP_SPLIT_DISTRIBUTE
,
4003 GFC_OMP_SPLIT_TEAMS
,
4004 GFC_OMP_SPLIT_TARGET
,
4005 GFC_OMP_SPLIT_TASKLOOP
,
4011 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
4012 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
4013 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
4014 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
4015 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
4016 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
4017 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
)
4021 gfc_split_omp_clauses (gfc_code
*code
,
4022 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
4024 int mask
= 0, innermost
= 0;
4025 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
4028 case EXEC_OMP_DISTRIBUTE
:
4029 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4031 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4032 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4033 innermost
= GFC_OMP_SPLIT_DO
;
4035 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4036 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
4037 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4038 innermost
= GFC_OMP_SPLIT_SIMD
;
4040 case EXEC_OMP_DISTRIBUTE_SIMD
:
4041 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4042 innermost
= GFC_OMP_SPLIT_SIMD
;
4045 innermost
= GFC_OMP_SPLIT_DO
;
4047 case EXEC_OMP_DO_SIMD
:
4048 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4049 innermost
= GFC_OMP_SPLIT_SIMD
;
4051 case EXEC_OMP_PARALLEL
:
4052 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4054 case EXEC_OMP_PARALLEL_DO
:
4055 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4056 innermost
= GFC_OMP_SPLIT_DO
;
4058 case EXEC_OMP_PARALLEL_DO_SIMD
:
4059 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4060 innermost
= GFC_OMP_SPLIT_SIMD
;
4063 innermost
= GFC_OMP_SPLIT_SIMD
;
4065 case EXEC_OMP_TARGET
:
4066 innermost
= GFC_OMP_SPLIT_TARGET
;
4068 case EXEC_OMP_TARGET_PARALLEL
:
4069 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
4070 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4072 case EXEC_OMP_TARGET_PARALLEL_DO
:
4073 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4074 innermost
= GFC_OMP_SPLIT_DO
;
4076 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4077 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
4078 | GFC_OMP_MASK_SIMD
;
4079 innermost
= GFC_OMP_SPLIT_SIMD
;
4081 case EXEC_OMP_TARGET_SIMD
:
4082 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
4083 innermost
= GFC_OMP_SPLIT_SIMD
;
4085 case EXEC_OMP_TARGET_TEAMS
:
4086 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
4087 innermost
= GFC_OMP_SPLIT_TEAMS
;
4089 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4090 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4091 | GFC_OMP_MASK_DISTRIBUTE
;
4092 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4094 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4095 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4096 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4097 innermost
= GFC_OMP_SPLIT_DO
;
4099 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4100 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4101 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4102 innermost
= GFC_OMP_SPLIT_SIMD
;
4104 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4105 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4106 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4107 innermost
= GFC_OMP_SPLIT_SIMD
;
4109 case EXEC_OMP_TASKLOOP
:
4110 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
4112 case EXEC_OMP_TASKLOOP_SIMD
:
4113 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
4114 innermost
= GFC_OMP_SPLIT_SIMD
;
4116 case EXEC_OMP_TEAMS
:
4117 innermost
= GFC_OMP_SPLIT_TEAMS
;
4119 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4120 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
4121 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4123 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4124 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4125 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4126 innermost
= GFC_OMP_SPLIT_DO
;
4128 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4129 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4130 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4131 innermost
= GFC_OMP_SPLIT_SIMD
;
4133 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4134 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4135 innermost
= GFC_OMP_SPLIT_SIMD
;
4142 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
4145 if (code
->ext
.omp_clauses
!= NULL
)
4147 if (mask
& GFC_OMP_MASK_TARGET
)
4149 /* First the clauses that are unique to some constructs. */
4150 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
4151 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
4152 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
4153 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
4154 clausesa
[GFC_OMP_SPLIT_TARGET
].device
4155 = code
->ext
.omp_clauses
->device
;
4156 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
4157 = code
->ext
.omp_clauses
->defaultmap
;
4158 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
4159 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
4160 /* And this is copied to all. */
4161 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4162 = code
->ext
.omp_clauses
->if_expr
;
4164 if (mask
& GFC_OMP_MASK_TEAMS
)
4166 /* First the clauses that are unique to some constructs. */
4167 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4168 = code
->ext
.omp_clauses
->num_teams
;
4169 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
4170 = code
->ext
.omp_clauses
->thread_limit
;
4171 /* Shared and default clauses are allowed on parallel, teams
4173 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
4174 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4175 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
4176 = code
->ext
.omp_clauses
->default_sharing
;
4178 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4180 /* First the clauses that are unique to some constructs. */
4181 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
4182 = code
->ext
.omp_clauses
->dist_sched_kind
;
4183 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
4184 = code
->ext
.omp_clauses
->dist_chunk_size
;
4185 /* Duplicate collapse. */
4186 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
4187 = code
->ext
.omp_clauses
->collapse
;
4189 if (mask
& GFC_OMP_MASK_PARALLEL
)
4191 /* First the clauses that are unique to some constructs. */
4192 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
4193 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
4194 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
4195 = code
->ext
.omp_clauses
->num_threads
;
4196 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
4197 = code
->ext
.omp_clauses
->proc_bind
;
4198 /* Shared and default clauses are allowed on parallel, teams
4200 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
4201 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4202 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
4203 = code
->ext
.omp_clauses
->default_sharing
;
4204 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
4205 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
4206 /* And this is copied to all. */
4207 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4208 = code
->ext
.omp_clauses
->if_expr
;
4210 if (mask
& GFC_OMP_MASK_DO
)
4212 /* First the clauses that are unique to some constructs. */
4213 clausesa
[GFC_OMP_SPLIT_DO
].ordered
4214 = code
->ext
.omp_clauses
->ordered
;
4215 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
4216 = code
->ext
.omp_clauses
->orderedc
;
4217 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
4218 = code
->ext
.omp_clauses
->sched_kind
;
4219 if (innermost
== GFC_OMP_SPLIT_SIMD
)
4220 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
4221 = code
->ext
.omp_clauses
->sched_simd
;
4222 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
4223 = code
->ext
.omp_clauses
->sched_monotonic
;
4224 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
4225 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
4226 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
4227 = code
->ext
.omp_clauses
->chunk_size
;
4228 clausesa
[GFC_OMP_SPLIT_DO
].nowait
4229 = code
->ext
.omp_clauses
->nowait
;
4230 /* Duplicate collapse. */
4231 clausesa
[GFC_OMP_SPLIT_DO
].collapse
4232 = code
->ext
.omp_clauses
->collapse
;
4234 if (mask
& GFC_OMP_MASK_SIMD
)
4236 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
4237 = code
->ext
.omp_clauses
->safelen_expr
;
4238 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
4239 = code
->ext
.omp_clauses
->simdlen_expr
;
4240 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
4241 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
4242 /* Duplicate collapse. */
4243 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
4244 = code
->ext
.omp_clauses
->collapse
;
4246 if (mask
& GFC_OMP_MASK_TASKLOOP
)
4248 /* First the clauses that are unique to some constructs. */
4249 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
4250 = code
->ext
.omp_clauses
->nogroup
;
4251 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
4252 = code
->ext
.omp_clauses
->grainsize
;
4253 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
4254 = code
->ext
.omp_clauses
->num_tasks
;
4255 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
4256 = code
->ext
.omp_clauses
->priority
;
4257 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
4258 = code
->ext
.omp_clauses
->final_expr
;
4259 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
4260 = code
->ext
.omp_clauses
->untied
;
4261 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
4262 = code
->ext
.omp_clauses
->mergeable
;
4263 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
4264 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
4265 /* And this is copied to all. */
4266 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
4267 = code
->ext
.omp_clauses
->if_expr
;
4268 /* Shared and default clauses are allowed on parallel, teams
4270 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
4271 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4272 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
4273 = code
->ext
.omp_clauses
->default_sharing
;
4274 /* Duplicate collapse. */
4275 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
4276 = code
->ext
.omp_clauses
->collapse
;
4278 /* Private clause is supported on all constructs,
4279 it is enough to put it on the innermost one. For
4280 !$ omp parallel do put it on parallel though,
4281 as that's what we did for OpenMP 3.1. */
4282 clausesa
[innermost
== GFC_OMP_SPLIT_DO
4283 ? (int) GFC_OMP_SPLIT_PARALLEL
4284 : innermost
].lists
[OMP_LIST_PRIVATE
]
4285 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
4286 /* Firstprivate clause is supported on all constructs but
4287 simd. Put it on the outermost of those and duplicate
4288 on parallel and teams. */
4289 if (mask
& GFC_OMP_MASK_TARGET
)
4290 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
4291 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4292 if (mask
& GFC_OMP_MASK_TEAMS
)
4293 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
4294 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4295 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4296 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
4297 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4298 if (mask
& GFC_OMP_MASK_PARALLEL
)
4299 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
4300 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4301 else if (mask
& GFC_OMP_MASK_DO
)
4302 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
4303 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4304 /* Lastprivate is allowed on distribute, do and simd.
4305 In parallel do{, simd} we actually want to put it on
4306 parallel rather than do. */
4307 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4308 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
4309 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4310 if (mask
& GFC_OMP_MASK_PARALLEL
)
4311 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
4312 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4313 else if (mask
& GFC_OMP_MASK_DO
)
4314 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
4315 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4316 if (mask
& GFC_OMP_MASK_SIMD
)
4317 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
4318 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4319 /* Reduction is allowed on simd, do, parallel and teams.
4320 Duplicate it on all of them, but omit on do if
4321 parallel is present. */
4322 if (mask
& GFC_OMP_MASK_TEAMS
)
4323 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
4324 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4325 if (mask
& GFC_OMP_MASK_PARALLEL
)
4326 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
4327 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4328 else if (mask
& GFC_OMP_MASK_DO
)
4329 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
4330 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4331 if (mask
& GFC_OMP_MASK_SIMD
)
4332 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
4333 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4334 /* Linear clause is supported on do and simd,
4335 put it on the innermost one. */
4336 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
4337 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
4339 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4340 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4341 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
4345 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4346 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
4349 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4350 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
4353 gfc_start_block (&block
);
4355 gfc_init_block (&block
);
4357 if (clausesa
== NULL
)
4359 clausesa
= clausesa_buf
;
4360 gfc_split_omp_clauses (code
, clausesa
);
4364 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
4365 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
4366 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
4369 if (TREE_CODE (body
) != BIND_EXPR
)
4370 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
4374 else if (TREE_CODE (body
) != BIND_EXPR
)
4375 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
4378 stmt
= make_node (OMP_FOR
);
4379 TREE_TYPE (stmt
) = void_type_node
;
4380 OMP_FOR_BODY (stmt
) = body
;
4381 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
4385 gfc_add_expr_to_block (&block
, stmt
);
4386 return gfc_finish_block (&block
);
4390 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
4391 gfc_omp_clauses
*clausesa
)
4393 stmtblock_t block
, *new_pblock
= pblock
;
4394 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4395 tree stmt
, omp_clauses
= NULL_TREE
;
4398 gfc_start_block (&block
);
4400 gfc_init_block (&block
);
4402 if (clausesa
== NULL
)
4404 clausesa
= clausesa_buf
;
4405 gfc_split_omp_clauses (code
, clausesa
);
4408 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4412 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
4413 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
4414 new_pblock
= &block
;
4418 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
4419 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
4422 if (TREE_CODE (stmt
) != BIND_EXPR
)
4423 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4427 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4428 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4429 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4431 OMP_PARALLEL_COMBINED (stmt
) = 1;
4432 gfc_add_expr_to_block (&block
, stmt
);
4433 return gfc_finish_block (&block
);
4437 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4438 gfc_omp_clauses
*clausesa
)
4441 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4442 tree stmt
, omp_clauses
= NULL_TREE
;
4445 gfc_start_block (&block
);
4447 gfc_init_block (&block
);
4449 if (clausesa
== NULL
)
4451 clausesa
= clausesa_buf
;
4452 gfc_split_omp_clauses (code
, clausesa
);
4456 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4460 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
4463 if (TREE_CODE (stmt
) != BIND_EXPR
)
4464 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4468 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4469 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4472 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4474 OMP_PARALLEL_COMBINED (stmt
) = 1;
4476 gfc_add_expr_to_block (&block
, stmt
);
4477 return gfc_finish_block (&block
);
4481 gfc_trans_omp_parallel_sections (gfc_code
*code
)
4484 gfc_omp_clauses section_clauses
;
4485 tree stmt
, omp_clauses
;
4487 memset (§ion_clauses
, 0, sizeof (section_clauses
));
4488 section_clauses
.nowait
= true;
4490 gfc_start_block (&block
);
4491 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4494 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
4495 if (TREE_CODE (stmt
) != BIND_EXPR
)
4496 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4499 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4501 OMP_PARALLEL_COMBINED (stmt
) = 1;
4502 gfc_add_expr_to_block (&block
, stmt
);
4503 return gfc_finish_block (&block
);
4507 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
4510 gfc_omp_clauses workshare_clauses
;
4511 tree stmt
, omp_clauses
;
4513 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
4514 workshare_clauses
.nowait
= true;
4516 gfc_start_block (&block
);
4517 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4520 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4521 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4522 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4524 OMP_PARALLEL_COMBINED (stmt
) = 1;
4525 gfc_add_expr_to_block (&block
, stmt
);
4526 return gfc_finish_block (&block
);
4530 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4532 stmtblock_t block
, body
;
4533 tree omp_clauses
, stmt
;
4534 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4536 gfc_start_block (&block
);
4538 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4540 gfc_init_block (&body
);
4541 for (code
= code
->block
; code
; code
= code
->block
)
4543 /* Last section is special because of lastprivate, so even if it
4544 is empty, chain it in. */
4545 stmt
= gfc_trans_omp_code (code
->next
,
4546 has_lastprivate
&& code
->block
== NULL
);
4547 if (! IS_EMPTY_STMT (stmt
))
4549 stmt
= build1_v (OMP_SECTION
, stmt
);
4550 gfc_add_expr_to_block (&body
, stmt
);
4553 stmt
= gfc_finish_block (&body
);
4555 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4557 gfc_add_expr_to_block (&block
, stmt
);
4559 return gfc_finish_block (&block
);
4563 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4565 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4566 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4567 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4573 gfc_trans_omp_task (gfc_code
*code
)
4576 tree stmt
, omp_clauses
;
4578 gfc_start_block (&block
);
4579 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4582 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4583 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4584 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4586 gfc_add_expr_to_block (&block
, stmt
);
4587 return gfc_finish_block (&block
);
4591 gfc_trans_omp_taskgroup (gfc_code
*code
)
4593 tree body
= gfc_trans_code (code
->block
->next
);
4594 tree stmt
= make_node (OMP_TASKGROUP
);
4595 TREE_TYPE (stmt
) = void_type_node
;
4596 OMP_TASKGROUP_BODY (stmt
) = body
;
4597 OMP_TASKGROUP_CLAUSES (stmt
) = NULL_TREE
;
4602 gfc_trans_omp_taskwait (void)
4604 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4605 return build_call_expr_loc (input_location
, decl
, 0);
4609 gfc_trans_omp_taskyield (void)
4611 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4612 return build_call_expr_loc (input_location
, decl
, 0);
4616 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4619 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4620 tree stmt
, omp_clauses
= NULL_TREE
;
4622 gfc_start_block (&block
);
4623 if (clausesa
== NULL
)
4625 clausesa
= clausesa_buf
;
4626 gfc_split_omp_clauses (code
, clausesa
);
4630 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4634 case EXEC_OMP_DISTRIBUTE
:
4635 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4636 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4637 /* This is handled in gfc_trans_omp_do. */
4640 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4641 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4642 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4643 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4644 if (TREE_CODE (stmt
) != BIND_EXPR
)
4645 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4649 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4650 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4651 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4652 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4653 if (TREE_CODE (stmt
) != BIND_EXPR
)
4654 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4658 case EXEC_OMP_DISTRIBUTE_SIMD
:
4659 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4660 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4661 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4662 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4663 if (TREE_CODE (stmt
) != BIND_EXPR
)
4664 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4673 tree distribute
= make_node (OMP_DISTRIBUTE
);
4674 TREE_TYPE (distribute
) = void_type_node
;
4675 OMP_FOR_BODY (distribute
) = stmt
;
4676 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4679 gfc_add_expr_to_block (&block
, stmt
);
4680 return gfc_finish_block (&block
);
4684 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
4688 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4690 bool combined
= true;
4692 gfc_start_block (&block
);
4693 if (clausesa
== NULL
)
4695 clausesa
= clausesa_buf
;
4696 gfc_split_omp_clauses (code
, clausesa
);
4700 = chainon (omp_clauses
,
4701 gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4705 case EXEC_OMP_TARGET_TEAMS
:
4706 case EXEC_OMP_TEAMS
:
4707 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4710 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4711 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4712 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4713 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4717 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4722 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4725 OMP_TEAMS_COMBINED (stmt
) = 1;
4727 gfc_add_expr_to_block (&block
, stmt
);
4728 return gfc_finish_block (&block
);
4732 gfc_trans_omp_target (gfc_code
*code
)
4735 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4736 tree stmt
, omp_clauses
= NULL_TREE
;
4738 gfc_start_block (&block
);
4739 gfc_split_omp_clauses (code
, clausesa
);
4742 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4746 case EXEC_OMP_TARGET
:
4748 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4749 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4751 case EXEC_OMP_TARGET_PARALLEL
:
4755 gfc_start_block (&iblock
);
4757 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4759 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4760 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4762 gfc_add_expr_to_block (&iblock
, stmt
);
4763 stmt
= gfc_finish_block (&iblock
);
4764 if (TREE_CODE (stmt
) != BIND_EXPR
)
4765 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4770 case EXEC_OMP_TARGET_PARALLEL_DO
:
4771 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4772 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4773 if (TREE_CODE (stmt
) != BIND_EXPR
)
4774 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4778 case EXEC_OMP_TARGET_SIMD
:
4779 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4780 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4781 if (TREE_CODE (stmt
) != BIND_EXPR
)
4782 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4788 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4789 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
4791 gfc_omp_clauses clausesb
;
4793 /* For combined !$omp target teams, the num_teams and
4794 thread_limit clauses are evaluated before entering the
4795 target construct. */
4796 memset (&clausesb
, '\0', sizeof (clausesb
));
4797 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
4798 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
4799 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
4800 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
4802 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
4804 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
4809 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
4811 if (TREE_CODE (stmt
) != BIND_EXPR
)
4812 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4819 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4821 if (code
->op
!= EXEC_OMP_TARGET
)
4822 OMP_TARGET_COMBINED (stmt
) = 1;
4824 gfc_add_expr_to_block (&block
, stmt
);
4825 return gfc_finish_block (&block
);
4829 gfc_trans_omp_taskloop (gfc_code
*code
)
4832 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4833 tree stmt
, omp_clauses
= NULL_TREE
;
4835 gfc_start_block (&block
);
4836 gfc_split_omp_clauses (code
, clausesa
);
4839 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
4843 case EXEC_OMP_TASKLOOP
:
4844 /* This is handled in gfc_trans_omp_do. */
4847 case EXEC_OMP_TASKLOOP_SIMD
:
4848 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4849 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4850 if (TREE_CODE (stmt
) != BIND_EXPR
)
4851 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4860 tree taskloop
= make_node (OMP_TASKLOOP
);
4861 TREE_TYPE (taskloop
) = void_type_node
;
4862 OMP_FOR_BODY (taskloop
) = stmt
;
4863 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
4866 gfc_add_expr_to_block (&block
, stmt
);
4867 return gfc_finish_block (&block
);
4871 gfc_trans_omp_target_data (gfc_code
*code
)
4874 tree stmt
, omp_clauses
;
4876 gfc_start_block (&block
);
4877 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4879 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4880 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4882 gfc_add_expr_to_block (&block
, stmt
);
4883 return gfc_finish_block (&block
);
4887 gfc_trans_omp_target_enter_data (gfc_code
*code
)
4890 tree stmt
, omp_clauses
;
4892 gfc_start_block (&block
);
4893 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4895 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
4897 gfc_add_expr_to_block (&block
, stmt
);
4898 return gfc_finish_block (&block
);
4902 gfc_trans_omp_target_exit_data (gfc_code
*code
)
4905 tree stmt
, omp_clauses
;
4907 gfc_start_block (&block
);
4908 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4910 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
4912 gfc_add_expr_to_block (&block
, stmt
);
4913 return gfc_finish_block (&block
);
4917 gfc_trans_omp_target_update (gfc_code
*code
)
4920 tree stmt
, omp_clauses
;
4922 gfc_start_block (&block
);
4923 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4925 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4927 gfc_add_expr_to_block (&block
, stmt
);
4928 return gfc_finish_block (&block
);
4932 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4934 tree res
, tmp
, stmt
;
4935 stmtblock_t block
, *pblock
= NULL
;
4936 stmtblock_t singleblock
;
4937 int saved_ompws_flags
;
4938 bool singleblock_in_progress
= false;
4939 /* True if previous gfc_code in workshare construct is not workshared. */
4940 bool prev_singleunit
;
4942 code
= code
->block
->next
;
4946 gfc_start_block (&block
);
4949 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4950 prev_singleunit
= false;
4952 /* Translate statements one by one to trees until we reach
4953 the end of the workshare construct. Adjacent gfc_codes that
4954 are a single unit of work are clustered and encapsulated in a
4955 single OMP_SINGLE construct. */
4956 for (; code
; code
= code
->next
)
4958 if (code
->here
!= 0)
4960 res
= gfc_trans_label_here (code
);
4961 gfc_add_expr_to_block (pblock
, res
);
4964 /* No dependence analysis, use for clauses with wait.
4965 If this is the last gfc_code, use default omp_clauses. */
4966 if (code
->next
== NULL
&& clauses
->nowait
)
4967 ompws_flags
|= OMPWS_NOWAIT
;
4969 /* By default, every gfc_code is a single unit of work. */
4970 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4971 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4980 res
= gfc_trans_assign (code
);
4983 case EXEC_POINTER_ASSIGN
:
4984 res
= gfc_trans_pointer_assign (code
);
4987 case EXEC_INIT_ASSIGN
:
4988 res
= gfc_trans_init_assign (code
);
4992 res
= gfc_trans_forall (code
);
4996 res
= gfc_trans_where (code
);
4999 case EXEC_OMP_ATOMIC
:
5000 res
= gfc_trans_omp_directive (code
);
5003 case EXEC_OMP_PARALLEL
:
5004 case EXEC_OMP_PARALLEL_DO
:
5005 case EXEC_OMP_PARALLEL_SECTIONS
:
5006 case EXEC_OMP_PARALLEL_WORKSHARE
:
5007 case EXEC_OMP_CRITICAL
:
5008 saved_ompws_flags
= ompws_flags
;
5010 res
= gfc_trans_omp_directive (code
);
5011 ompws_flags
= saved_ompws_flags
;
5015 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
5018 gfc_set_backend_locus (&code
->loc
);
5020 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
5022 if (prev_singleunit
)
5024 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5025 /* Add current gfc_code to single block. */
5026 gfc_add_expr_to_block (&singleblock
, res
);
5029 /* Finish single block and add it to pblock. */
5030 tmp
= gfc_finish_block (&singleblock
);
5031 tmp
= build2_loc (input_location
, OMP_SINGLE
,
5032 void_type_node
, tmp
, NULL_TREE
);
5033 gfc_add_expr_to_block (pblock
, tmp
);
5034 /* Add current gfc_code to pblock. */
5035 gfc_add_expr_to_block (pblock
, res
);
5036 singleblock_in_progress
= false;
5041 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5043 /* Start single block. */
5044 gfc_init_block (&singleblock
);
5045 gfc_add_expr_to_block (&singleblock
, res
);
5046 singleblock_in_progress
= true;
5049 /* Add the new statement to the block. */
5050 gfc_add_expr_to_block (pblock
, res
);
5052 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
5056 /* Finish remaining SINGLE block, if we were in the middle of one. */
5057 if (singleblock_in_progress
)
5059 /* Finish single block and add it to pblock. */
5060 tmp
= gfc_finish_block (&singleblock
);
5061 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
5063 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
5065 gfc_add_expr_to_block (pblock
, tmp
);
5068 stmt
= gfc_finish_block (pblock
);
5069 if (TREE_CODE (stmt
) != BIND_EXPR
)
5071 if (!IS_EMPTY_STMT (stmt
))
5073 tree bindblock
= poplevel (1, 0);
5074 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
5082 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
5083 stmt
= gfc_trans_omp_barrier ();
5090 gfc_trans_oacc_declare (gfc_code
*code
)
5093 tree stmt
, oacc_clauses
;
5094 enum tree_code construct_code
;
5096 construct_code
= OACC_DATA
;
5098 gfc_start_block (&block
);
5100 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
5102 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5103 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
5105 gfc_add_expr_to_block (&block
, stmt
);
5107 return gfc_finish_block (&block
);
5111 gfc_trans_oacc_directive (gfc_code
*code
)
5115 case EXEC_OACC_PARALLEL_LOOP
:
5116 case EXEC_OACC_KERNELS_LOOP
:
5117 return gfc_trans_oacc_combined_directive (code
);
5118 case EXEC_OACC_PARALLEL
:
5119 case EXEC_OACC_KERNELS
:
5120 case EXEC_OACC_DATA
:
5121 case EXEC_OACC_HOST_DATA
:
5122 return gfc_trans_oacc_construct (code
);
5123 case EXEC_OACC_LOOP
:
5124 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5126 case EXEC_OACC_UPDATE
:
5127 case EXEC_OACC_CACHE
:
5128 case EXEC_OACC_ENTER_DATA
:
5129 case EXEC_OACC_EXIT_DATA
:
5130 return gfc_trans_oacc_executable_directive (code
);
5131 case EXEC_OACC_WAIT
:
5132 return gfc_trans_oacc_wait_directive (code
);
5133 case EXEC_OACC_ATOMIC
:
5134 return gfc_trans_omp_atomic (code
);
5135 case EXEC_OACC_DECLARE
:
5136 return gfc_trans_oacc_declare (code
);
5143 gfc_trans_omp_directive (gfc_code
*code
)
5147 case EXEC_OMP_ATOMIC
:
5148 return gfc_trans_omp_atomic (code
);
5149 case EXEC_OMP_BARRIER
:
5150 return gfc_trans_omp_barrier ();
5151 case EXEC_OMP_CANCEL
:
5152 return gfc_trans_omp_cancel (code
);
5153 case EXEC_OMP_CANCELLATION_POINT
:
5154 return gfc_trans_omp_cancellation_point (code
);
5155 case EXEC_OMP_CRITICAL
:
5156 return gfc_trans_omp_critical (code
);
5157 case EXEC_OMP_DISTRIBUTE
:
5160 case EXEC_OMP_TASKLOOP
:
5161 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5163 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5164 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5165 case EXEC_OMP_DISTRIBUTE_SIMD
:
5166 return gfc_trans_omp_distribute (code
, NULL
);
5167 case EXEC_OMP_DO_SIMD
:
5168 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
5169 case EXEC_OMP_FLUSH
:
5170 return gfc_trans_omp_flush ();
5171 case EXEC_OMP_MASTER
:
5172 return gfc_trans_omp_master (code
);
5173 case EXEC_OMP_ORDERED
:
5174 return gfc_trans_omp_ordered (code
);
5175 case EXEC_OMP_PARALLEL
:
5176 return gfc_trans_omp_parallel (code
);
5177 case EXEC_OMP_PARALLEL_DO
:
5178 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
5179 case EXEC_OMP_PARALLEL_DO_SIMD
:
5180 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
5181 case EXEC_OMP_PARALLEL_SECTIONS
:
5182 return gfc_trans_omp_parallel_sections (code
);
5183 case EXEC_OMP_PARALLEL_WORKSHARE
:
5184 return gfc_trans_omp_parallel_workshare (code
);
5185 case EXEC_OMP_SECTIONS
:
5186 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
5187 case EXEC_OMP_SINGLE
:
5188 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
5189 case EXEC_OMP_TARGET
:
5190 case EXEC_OMP_TARGET_PARALLEL
:
5191 case EXEC_OMP_TARGET_PARALLEL_DO
:
5192 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5193 case EXEC_OMP_TARGET_SIMD
:
5194 case EXEC_OMP_TARGET_TEAMS
:
5195 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5196 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5197 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5198 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5199 return gfc_trans_omp_target (code
);
5200 case EXEC_OMP_TARGET_DATA
:
5201 return gfc_trans_omp_target_data (code
);
5202 case EXEC_OMP_TARGET_ENTER_DATA
:
5203 return gfc_trans_omp_target_enter_data (code
);
5204 case EXEC_OMP_TARGET_EXIT_DATA
:
5205 return gfc_trans_omp_target_exit_data (code
);
5206 case EXEC_OMP_TARGET_UPDATE
:
5207 return gfc_trans_omp_target_update (code
);
5209 return gfc_trans_omp_task (code
);
5210 case EXEC_OMP_TASKGROUP
:
5211 return gfc_trans_omp_taskgroup (code
);
5212 case EXEC_OMP_TASKLOOP_SIMD
:
5213 return gfc_trans_omp_taskloop (code
);
5214 case EXEC_OMP_TASKWAIT
:
5215 return gfc_trans_omp_taskwait ();
5216 case EXEC_OMP_TASKYIELD
:
5217 return gfc_trans_omp_taskyield ();
5218 case EXEC_OMP_TEAMS
:
5219 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5220 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5221 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5222 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5223 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
5224 case EXEC_OMP_WORKSHARE
:
5225 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
5232 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
5237 gfc_omp_declare_simd
*ods
;
5238 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
5240 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
5241 tree fndecl
= ns
->proc_name
->backend_decl
;
5243 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
5244 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
5245 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
5246 DECL_ATTRIBUTES (fndecl
) = c
;