1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "gomp-constants.h"
39 #include "omp-general.h"
42 #define GCC_DIAG_STYLE __gcc_tdiag__
43 #include "diagnostic-core.h"
45 #define GCC_DIAG_STYLE __gcc_gfc__
49 /* True if OpenMP should privatize what this DECL points to rather
50 than the DECL itself. */
53 gfc_omp_privatize_by_reference (const_tree decl
)
55 tree type
= TREE_TYPE (decl
);
57 if (TREE_CODE (type
) == REFERENCE_TYPE
58 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
61 if (TREE_CODE (type
) == POINTER_TYPE
)
63 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
64 that have POINTER_TYPE type and aren't scalar pointers, scalar
65 allocatables, Cray pointees or C pointers are supposed to be
66 privatized by reference. */
67 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
68 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
69 || GFC_DECL_CRAY_POINTEE (decl
)
70 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
71 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
74 if (!DECL_ARTIFICIAL (decl
)
75 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
78 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
80 if (DECL_LANG_SPECIFIC (decl
)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
88 /* True if OpenMP sharing attribute of DECL is predetermined. */
90 enum omp_clause_default_kind
91 gfc_omp_predetermined_sharing (tree decl
)
93 /* Associate names preserve the association established during ASSOCIATE.
94 As they are implemented either as pointers to the selector or array
95 descriptor and shouldn't really change in the ASSOCIATE region,
96 this decl can be either shared or firstprivate. If it is a pointer,
97 use firstprivate, as it is cheaper that way, otherwise make it shared. */
98 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
100 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
101 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
103 return OMP_CLAUSE_DEFAULT_SHARED
;
106 if (DECL_ARTIFICIAL (decl
)
107 && ! GFC_DECL_RESULT (decl
)
108 && ! (DECL_LANG_SPECIFIC (decl
)
109 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
110 return OMP_CLAUSE_DEFAULT_SHARED
;
112 /* Cray pointees shouldn't be listed in any clauses and should be
113 gimplified to dereference of the corresponding Cray pointer.
114 Make them all private, so that they are emitted in the debug
116 if (GFC_DECL_CRAY_POINTEE (decl
))
117 return OMP_CLAUSE_DEFAULT_PRIVATE
;
119 /* Assumed-size arrays are predetermined shared. */
120 if (TREE_CODE (decl
) == PARM_DECL
121 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
122 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
123 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
124 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
126 return OMP_CLAUSE_DEFAULT_SHARED
;
128 /* Dummy procedures aren't considered variables by OpenMP, thus are
129 disallowed in OpenMP clauses. They are represented as PARM_DECLs
130 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
131 to avoid complaining about their uses with default(none). */
132 if (TREE_CODE (decl
) == PARM_DECL
133 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
134 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
135 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
137 /* COMMON and EQUIVALENCE decls are shared. They
138 are only referenced through DECL_VALUE_EXPR of the variables
139 contained in them. If those are privatized, they will not be
140 gimplified to the COMMON or EQUIVALENCE decls. */
141 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
142 return OMP_CLAUSE_DEFAULT_SHARED
;
144 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
145 return OMP_CLAUSE_DEFAULT_SHARED
;
147 /* These are either array or derived parameters, or vtables.
148 In the former cases, the OpenMP standard doesn't consider them to be
149 variables at all (they can't be redefined), but they can nevertheless appear
150 in parallel/task regions and for default(none) purposes treat them as shared.
151 For vtables likely the same handling is desirable. */
152 if (VAR_P (decl
) && TREE_READONLY (decl
) && TREE_STATIC (decl
))
153 return OMP_CLAUSE_DEFAULT_SHARED
;
155 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
158 /* Return decl that should be used when reporting DEFAULT(NONE)
162 gfc_omp_report_decl (tree decl
)
164 if (DECL_ARTIFICIAL (decl
)
165 && DECL_LANG_SPECIFIC (decl
)
166 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
167 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
172 /* Return true if TYPE has any allocatable components. */
175 gfc_has_alloc_comps (tree type
, tree decl
)
179 if (POINTER_TYPE_P (type
))
181 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
182 type
= TREE_TYPE (type
);
183 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
187 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
188 type
= gfc_get_element_type (type
);
190 if (TREE_CODE (type
) != RECORD_TYPE
)
193 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
195 ftype
= TREE_TYPE (field
);
196 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
198 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
199 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
201 if (gfc_has_alloc_comps (ftype
, field
))
207 /* Return true if DECL in private clause needs
208 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
210 gfc_omp_private_outer_ref (tree decl
)
212 tree type
= TREE_TYPE (decl
);
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (GFC_DESCRIPTOR_TYPE_P (type
)
218 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
221 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
224 if (gfc_has_alloc_comps (type
, decl
))
230 /* Callback for gfc_omp_unshare_expr. */
233 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
236 enum tree_code code
= TREE_CODE (t
);
238 /* Stop at types, decls, constants like copy_tree_r. */
239 if (TREE_CODE_CLASS (code
) == tcc_type
240 || TREE_CODE_CLASS (code
) == tcc_declaration
241 || TREE_CODE_CLASS (code
) == tcc_constant
244 else if (handled_component_p (t
)
245 || TREE_CODE (t
) == MEM_REF
)
247 *tp
= unshare_expr (t
);
254 /* Unshare in expr anything that the FE which normally doesn't
255 care much about tree sharing (because during gimplification
256 everything is unshared) could cause problems with tree sharing
257 at omp-low.c time. */
260 gfc_omp_unshare_expr (tree expr
)
262 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
266 enum walk_alloc_comps
268 WALK_ALLOC_COMPS_DTOR
,
269 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
270 WALK_ALLOC_COMPS_COPY_CTOR
273 /* Handle allocatable components in OpenMP clauses. */
276 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
277 enum walk_alloc_comps kind
)
279 stmtblock_t block
, tmpblock
;
280 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
281 gfc_init_block (&block
);
283 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
285 if (GFC_DESCRIPTOR_TYPE_P (type
))
287 gfc_init_block (&tmpblock
);
288 tem
= gfc_full_array_size (&tmpblock
, decl
,
289 GFC_TYPE_ARRAY_RANK (type
));
290 then_b
= gfc_finish_block (&tmpblock
);
291 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
292 tem
= gfc_omp_unshare_expr (tem
);
293 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
294 gfc_array_index_type
, tem
,
299 if (!TYPE_DOMAIN (type
)
300 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
301 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
302 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
304 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
305 TYPE_SIZE_UNIT (type
),
306 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
307 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
310 tem
= array_type_nelts (type
);
311 tem
= fold_convert (gfc_array_index_type
, tem
);
314 tree nelems
= gfc_evaluate_now (tem
, &block
);
315 tree index
= gfc_create_var (gfc_array_index_type
, "S");
317 gfc_init_block (&tmpblock
);
318 tem
= gfc_conv_array_data (decl
);
319 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
320 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
321 tree destvar
, destvref
= NULL_TREE
;
324 tem
= gfc_conv_array_data (dest
);
325 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
326 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
328 gfc_add_expr_to_block (&tmpblock
,
329 gfc_walk_alloc_comps (declvref
, destvref
,
333 gfc_init_loopinfo (&loop
);
335 loop
.from
[0] = gfc_index_zero_node
;
336 loop
.loopvar
[0] = index
;
338 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
339 gfc_add_block_to_block (&block
, &loop
.pre
);
340 return gfc_finish_block (&block
);
342 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
344 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
346 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
347 type
= TREE_TYPE (decl
);
350 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
351 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
353 tree ftype
= TREE_TYPE (field
);
354 tree declf
, destf
= NULL_TREE
;
355 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
356 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
357 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
358 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
361 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
362 decl
, field
, NULL_TREE
);
364 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
365 dest
, field
, NULL_TREE
);
370 case WALK_ALLOC_COMPS_DTOR
:
372 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
373 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
374 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
376 gfc_add_modify (&block
, unshare_expr (destf
),
377 unshare_expr (declf
));
378 tem
= gfc_duplicate_allocatable_nocopy
379 (destf
, declf
, ftype
,
380 GFC_TYPE_ARRAY_RANK (ftype
));
382 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
383 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
385 case WALK_ALLOC_COMPS_COPY_CTOR
:
386 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
387 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
388 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
389 GFC_TYPE_ARRAY_RANK (ftype
),
391 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
392 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
397 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
400 gfc_init_block (&tmpblock
);
401 gfc_add_expr_to_block (&tmpblock
,
402 gfc_walk_alloc_comps (declf
, destf
,
404 then_b
= gfc_finish_block (&tmpblock
);
405 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
406 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
407 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
408 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
409 tem
= unshare_expr (declf
);
414 tem
= fold_convert (pvoid_type_node
, tem
);
415 tem
= fold_build2_loc (input_location
, NE_EXPR
,
416 logical_type_node
, tem
,
418 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
420 build_empty_stmt (input_location
));
422 gfc_add_expr_to_block (&block
, then_b
);
424 if (kind
== WALK_ALLOC_COMPS_DTOR
)
426 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
427 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
429 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
430 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
431 NULL_TREE
, NULL_TREE
, true,
433 GFC_CAF_COARRAY_NOCOARRAY
);
434 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
438 tem
= gfc_call_free (unshare_expr (declf
));
439 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
444 return gfc_finish_block (&block
);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
451 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
453 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
454 stmtblock_t block
, cond_block
;
456 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
462 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
465 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
468 gfc_start_block (&block
);
469 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
470 OMP_CLAUSE_DECL (clause
),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
472 gfc_add_expr_to_block (&block
, tem
);
473 return gfc_finish_block (&block
);
478 gcc_assert (outer
!= NULL_TREE
);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block
);
485 gfc_init_block (&cond_block
);
487 if (GFC_DESCRIPTOR_TYPE_P (type
))
489 gfc_add_modify (&cond_block
, decl
, outer
);
490 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
491 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
492 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
494 gfc_conv_descriptor_lbound_get (decl
, rank
));
495 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
496 size
, gfc_index_one_node
);
497 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
498 size
= fold_build2_loc (input_location
, MULT_EXPR
,
499 gfc_array_index_type
, size
,
500 gfc_conv_descriptor_stride_get (decl
, rank
));
501 tree esize
= fold_convert (gfc_array_index_type
,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
503 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
505 size
= unshare_expr (size
);
506 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
510 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
511 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
512 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
513 if (GFC_DESCRIPTOR_TYPE_P (type
))
514 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
516 gfc_add_modify (&cond_block
, unshare_expr (decl
),
517 fold_convert (TREE_TYPE (decl
), ptr
));
518 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
520 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
521 OMP_CLAUSE_DECL (clause
),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
523 gfc_add_expr_to_block (&cond_block
, tem
);
525 then_b
= gfc_finish_block (&cond_block
);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
530 gfc_init_block (&cond_block
);
531 if (GFC_DESCRIPTOR_TYPE_P (type
))
532 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
535 gfc_add_modify (&cond_block
, unshare_expr (decl
),
536 build_zero_cst (TREE_TYPE (decl
)));
537 else_b
= gfc_finish_block (&cond_block
);
539 tree tem
= fold_convert (pvoid_type_node
,
540 GFC_DESCRIPTOR_TYPE_P (type
)
541 ? gfc_conv_descriptor_data_get (outer
) : outer
);
542 tem
= unshare_expr (tem
);
543 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
544 tem
, null_pointer_node
);
545 gfc_add_expr_to_block (&block
,
546 build3_loc (input_location
, COND_EXPR
,
547 void_type_node
, cond
, then_b
,
551 gfc_add_expr_to_block (&block
, then_b
);
553 return gfc_finish_block (&block
);
556 /* Build and return code for a copy constructor from SRC to DEST. */
559 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
561 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
562 tree cond
, then_b
, else_b
;
563 stmtblock_t block
, cond_block
;
565 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
569 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
572 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
574 gfc_start_block (&block
);
575 gfc_add_modify (&block
, dest
, src
);
576 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
577 WALK_ALLOC_COMPS_COPY_CTOR
);
578 gfc_add_expr_to_block (&block
, tem
);
579 return gfc_finish_block (&block
);
582 return build2_v (MODIFY_EXPR
, dest
, src
);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block
);
589 gfc_init_block (&cond_block
);
591 gfc_add_modify (&cond_block
, dest
, src
);
592 if (GFC_DESCRIPTOR_TYPE_P (type
))
594 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
595 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
596 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
598 gfc_conv_descriptor_lbound_get (dest
, rank
));
599 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
600 size
, gfc_index_one_node
);
601 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
602 size
= fold_build2_loc (input_location
, MULT_EXPR
,
603 gfc_array_index_type
, size
,
604 gfc_conv_descriptor_stride_get (dest
, rank
));
605 tree esize
= fold_convert (gfc_array_index_type
,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
607 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
609 size
= unshare_expr (size
);
610 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
614 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
615 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
616 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
617 if (GFC_DESCRIPTOR_TYPE_P (type
))
618 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
620 gfc_add_modify (&cond_block
, unshare_expr (dest
),
621 fold_convert (TREE_TYPE (dest
), ptr
));
623 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
624 ? gfc_conv_descriptor_data_get (src
) : src
;
625 srcptr
= unshare_expr (srcptr
);
626 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
627 call
= build_call_expr_loc (input_location
,
628 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
630 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
631 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
633 tree tem
= gfc_walk_alloc_comps (src
, dest
,
634 OMP_CLAUSE_DECL (clause
),
635 WALK_ALLOC_COMPS_COPY_CTOR
);
636 gfc_add_expr_to_block (&cond_block
, tem
);
638 then_b
= gfc_finish_block (&cond_block
);
640 gfc_init_block (&cond_block
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
645 gfc_add_modify (&cond_block
, unshare_expr (dest
),
646 build_zero_cst (TREE_TYPE (dest
)));
647 else_b
= gfc_finish_block (&cond_block
);
649 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
650 unshare_expr (srcptr
), null_pointer_node
);
651 gfc_add_expr_to_block (&block
,
652 build3_loc (input_location
, COND_EXPR
,
653 void_type_node
, cond
, then_b
, else_b
));
655 return gfc_finish_block (&block
);
658 /* Similarly, except use an intrinsic or pointer assignment operator
662 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
664 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
665 tree cond
, then_b
, else_b
;
666 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
669 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
672 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
674 gfc_start_block (&block
);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
677 OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_DTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block
, dest
, src
);
682 /* Finally allocate any allocatable components and copy. */
683 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
684 WALK_ALLOC_COMPS_COPY_CTOR
);
685 gfc_add_expr_to_block (&block
, tem
);
686 return gfc_finish_block (&block
);
689 return build2_v (MODIFY_EXPR
, dest
, src
);
692 gfc_start_block (&block
);
694 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
696 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
697 WALK_ALLOC_COMPS_DTOR
);
698 tree tem
= fold_convert (pvoid_type_node
,
699 GFC_DESCRIPTOR_TYPE_P (type
)
700 ? gfc_conv_descriptor_data_get (dest
) : dest
);
701 tem
= unshare_expr (tem
);
702 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
703 tem
, null_pointer_node
);
704 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
705 then_b
, build_empty_stmt (input_location
));
706 gfc_add_expr_to_block (&block
, tem
);
709 gfc_init_block (&cond_block
);
711 if (GFC_DESCRIPTOR_TYPE_P (type
))
713 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
714 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
715 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
717 gfc_conv_descriptor_lbound_get (src
, rank
));
718 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
719 size
, gfc_index_one_node
);
720 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
721 size
= fold_build2_loc (input_location
, MULT_EXPR
,
722 gfc_array_index_type
, size
,
723 gfc_conv_descriptor_stride_get (src
, rank
));
724 tree esize
= fold_convert (gfc_array_index_type
,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
726 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
728 size
= unshare_expr (size
);
729 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
733 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
734 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
736 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
737 ? gfc_conv_descriptor_data_get (dest
) : dest
;
738 destptr
= unshare_expr (destptr
);
739 destptr
= fold_convert (pvoid_type_node
, destptr
);
740 gfc_add_modify (&cond_block
, ptr
, destptr
);
742 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
743 destptr
, null_pointer_node
);
745 if (GFC_DESCRIPTOR_TYPE_P (type
))
748 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
750 tree rank
= gfc_rank_cst
[i
];
751 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
752 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
753 gfc_array_index_type
, tem
,
754 gfc_conv_descriptor_lbound_get (src
, rank
));
755 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
756 gfc_array_index_type
, tem
,
757 gfc_conv_descriptor_lbound_get (dest
, rank
));
758 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
759 tem
, gfc_conv_descriptor_ubound_get (dest
,
761 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
762 logical_type_node
, cond
, tem
);
766 gfc_init_block (&cond_block2
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
770 gfc_init_block (&inner_block
);
771 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
772 then_b
= gfc_finish_block (&inner_block
);
774 gfc_init_block (&inner_block
);
775 gfc_add_modify (&inner_block
, ptr
,
776 gfc_call_realloc (&inner_block
, ptr
, size
));
777 else_b
= gfc_finish_block (&inner_block
);
779 gfc_add_expr_to_block (&cond_block2
,
780 build3_loc (input_location
, COND_EXPR
,
782 unshare_expr (nonalloc
),
784 gfc_add_modify (&cond_block2
, dest
, src
);
785 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
789 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
790 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
791 fold_convert (type
, ptr
));
793 then_b
= gfc_finish_block (&cond_block2
);
794 else_b
= build_empty_stmt (input_location
);
796 gfc_add_expr_to_block (&cond_block
,
797 build3_loc (input_location
, COND_EXPR
,
798 void_type_node
, unshare_expr (cond
),
801 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
802 ? gfc_conv_descriptor_data_get (src
) : src
;
803 srcptr
= unshare_expr (srcptr
);
804 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
805 call
= build_call_expr_loc (input_location
,
806 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
808 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
809 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
811 tree tem
= gfc_walk_alloc_comps (src
, dest
,
812 OMP_CLAUSE_DECL (clause
),
813 WALK_ALLOC_COMPS_COPY_CTOR
);
814 gfc_add_expr_to_block (&cond_block
, tem
);
816 then_b
= gfc_finish_block (&cond_block
);
818 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
820 gfc_init_block (&cond_block
);
821 if (GFC_DESCRIPTOR_TYPE_P (type
))
823 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
824 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
825 NULL_TREE
, NULL_TREE
, true, NULL
,
826 GFC_CAF_COARRAY_NOCOARRAY
);
827 gfc_add_expr_to_block (&cond_block
, tmp
);
831 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
832 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
833 gfc_add_modify (&cond_block
, unshare_expr (dest
),
834 build_zero_cst (TREE_TYPE (dest
)));
836 else_b
= gfc_finish_block (&cond_block
);
838 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
839 unshare_expr (srcptr
), null_pointer_node
);
840 gfc_add_expr_to_block (&block
,
841 build3_loc (input_location
, COND_EXPR
,
842 void_type_node
, cond
,
846 gfc_add_expr_to_block (&block
, then_b
);
848 return gfc_finish_block (&block
);
852 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
853 tree add
, tree nelems
)
855 stmtblock_t tmpblock
;
856 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
857 nelems
= gfc_evaluate_now (nelems
, block
);
859 gfc_init_block (&tmpblock
);
860 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
862 desta
= gfc_build_array_ref (dest
, index
, NULL
);
863 srca
= gfc_build_array_ref (src
, index
, NULL
);
867 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
868 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
869 fold_convert (sizetype
, index
),
870 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
871 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
872 TREE_TYPE (dest
), dest
,
874 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
875 TREE_TYPE (src
), src
,
878 gfc_add_modify (&tmpblock
, desta
,
879 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
883 gfc_init_loopinfo (&loop
);
885 loop
.from
[0] = gfc_index_zero_node
;
886 loop
.loopvar
[0] = index
;
888 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
889 gfc_add_block_to_block (block
, &loop
.pre
);
892 /* Build and return code for a constructor of DEST that initializes
893 it to SRC plus ADD (ADD is scalar integer). */
896 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
898 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
901 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
903 gfc_start_block (&block
);
904 add
= gfc_evaluate_now (add
, &block
);
906 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
907 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
908 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
910 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
911 if (!TYPE_DOMAIN (type
)
912 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
913 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
914 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
916 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
917 TYPE_SIZE_UNIT (type
),
918 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
919 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
922 nelems
= array_type_nelts (type
);
923 nelems
= fold_convert (gfc_array_index_type
, nelems
);
925 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
926 return gfc_finish_block (&block
);
929 /* Allocatable arrays in LINEAR clauses need to be allocated
930 and copied from SRC. */
931 gfc_add_modify (&block
, dest
, src
);
932 if (GFC_DESCRIPTOR_TYPE_P (type
))
934 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
935 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
936 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
938 gfc_conv_descriptor_lbound_get (dest
, rank
));
939 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
940 size
, gfc_index_one_node
);
941 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
942 size
= fold_build2_loc (input_location
, MULT_EXPR
,
943 gfc_array_index_type
, size
,
944 gfc_conv_descriptor_stride_get (dest
, rank
));
945 tree esize
= fold_convert (gfc_array_index_type
,
946 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
947 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
948 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
949 nelems
, unshare_expr (esize
));
950 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
952 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
953 gfc_array_index_type
, nelems
,
957 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
958 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
959 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
960 if (GFC_DESCRIPTOR_TYPE_P (type
))
962 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
963 tree etype
= gfc_get_element_type (type
);
964 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
965 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
966 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
967 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
971 gfc_add_modify (&block
, unshare_expr (dest
),
972 fold_convert (TREE_TYPE (dest
), ptr
));
973 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
974 tree dstm
= build_fold_indirect_ref (ptr
);
975 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
976 gfc_add_modify (&block
, dstm
,
977 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
979 return gfc_finish_block (&block
);
982 /* Build and return code destructing DECL. Return NULL if nothing
986 gfc_omp_clause_dtor (tree clause
, tree decl
)
988 tree type
= TREE_TYPE (decl
), tem
;
990 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
991 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
992 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
994 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
996 OMP_CLAUSE_DECL (clause
),
997 WALK_ALLOC_COMPS_DTOR
);
1001 if (GFC_DESCRIPTOR_TYPE_P (type
))
1003 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1004 to be deallocated if they were allocated. */
1005 tem
= gfc_conv_descriptor_data_get (decl
);
1006 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1007 NULL_TREE
, true, NULL
,
1008 GFC_CAF_COARRAY_NOCOARRAY
);
1011 tem
= gfc_call_free (decl
);
1012 tem
= gfc_omp_unshare_expr (tem
);
1014 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1019 gfc_init_block (&block
);
1020 gfc_add_expr_to_block (&block
,
1021 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1022 OMP_CLAUSE_DECL (clause
),
1023 WALK_ALLOC_COMPS_DTOR
));
1024 gfc_add_expr_to_block (&block
, tem
);
1025 then_b
= gfc_finish_block (&block
);
1027 tem
= fold_convert (pvoid_type_node
,
1028 GFC_DESCRIPTOR_TYPE_P (type
)
1029 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1030 tem
= unshare_expr (tem
);
1031 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1032 tem
, null_pointer_node
);
1033 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1034 then_b
, build_empty_stmt (input_location
));
1041 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1043 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1046 tree decl
= OMP_CLAUSE_DECL (c
);
1048 /* Assumed-size arrays can't be mapped implicitly, they have to be
1049 mapped explicitly using array sections. */
1050 if (TREE_CODE (decl
) == PARM_DECL
1051 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1052 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1053 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1054 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1057 error_at (OMP_CLAUSE_LOCATION (c
),
1058 "implicit mapping of assumed size array %qD", decl
);
1062 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1063 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1065 if (!gfc_omp_privatize_by_reference (decl
)
1066 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1067 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1068 && !GFC_DECL_CRAY_POINTEE (decl
)
1069 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1071 tree orig_decl
= decl
;
1072 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1073 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1074 OMP_CLAUSE_DECL (c4
) = decl
;
1075 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1076 decl
= build_fold_indirect_ref (decl
);
1077 OMP_CLAUSE_DECL (c
) = decl
;
1078 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1079 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1080 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1081 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1083 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1084 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1085 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1086 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1087 decl
= build_fold_indirect_ref (decl
);
1088 OMP_CLAUSE_DECL (c
) = decl
;
1091 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1094 gfc_start_block (&block
);
1095 tree type
= TREE_TYPE (decl
);
1096 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1097 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1098 ptr
= build_fold_indirect_ref (ptr
);
1099 OMP_CLAUSE_DECL (c
) = ptr
;
1100 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1101 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1102 OMP_CLAUSE_DECL (c2
) = decl
;
1103 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1104 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1105 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1106 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1107 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1108 tree size
= create_tmp_var (gfc_array_index_type
);
1109 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1110 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1111 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1112 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1114 stmtblock_t cond_block
;
1115 tree tem
, then_b
, else_b
, zero
, cond
;
1117 gfc_init_block (&cond_block
);
1118 tem
= gfc_full_array_size (&cond_block
, decl
,
1119 GFC_TYPE_ARRAY_RANK (type
));
1120 gfc_add_modify (&cond_block
, size
, tem
);
1121 gfc_add_modify (&cond_block
, size
,
1122 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1124 then_b
= gfc_finish_block (&cond_block
);
1125 gfc_init_block (&cond_block
);
1126 zero
= build_int_cst (gfc_array_index_type
, 0);
1127 gfc_add_modify (&cond_block
, size
, zero
);
1128 else_b
= gfc_finish_block (&cond_block
);
1129 tem
= gfc_conv_descriptor_data_get (decl
);
1130 tem
= fold_convert (pvoid_type_node
, tem
);
1131 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1132 logical_type_node
, tem
, null_pointer_node
);
1133 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1134 void_type_node
, cond
,
1139 gfc_add_modify (&block
, size
,
1140 gfc_full_array_size (&block
, decl
,
1141 GFC_TYPE_ARRAY_RANK (type
)));
1142 gfc_add_modify (&block
, size
,
1143 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1146 OMP_CLAUSE_SIZE (c
) = size
;
1147 tree stmt
= gfc_finish_block (&block
);
1148 gimplify_and_add (stmt
, pre_p
);
1151 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1153 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1154 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1157 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1158 OMP_CLAUSE_CHAIN (last
) = c2
;
1163 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1164 OMP_CLAUSE_CHAIN (last
) = c3
;
1169 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1170 OMP_CLAUSE_CHAIN (last
) = c4
;
1176 /* Return true if DECL is a scalar variable (for the purpose of
1177 implicit firstprivatization). */
1180 gfc_omp_scalar_p (tree decl
)
1182 tree type
= TREE_TYPE (decl
);
1183 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1184 type
= TREE_TYPE (type
);
1185 if (TREE_CODE (type
) == POINTER_TYPE
)
1187 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1188 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1189 type
= TREE_TYPE (type
);
1190 if (GFC_ARRAY_TYPE_P (type
)
1191 || GFC_CLASS_TYPE_P (type
))
1194 if (TYPE_STRING_FLAG (type
))
1196 if (INTEGRAL_TYPE_P (type
)
1197 || SCALAR_FLOAT_TYPE_P (type
)
1198 || COMPLEX_FLOAT_TYPE_P (type
))
1204 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1205 disregarded in OpenMP construct, because it is going to be
1206 remapped during OpenMP lowering. SHARED is true if DECL
1207 is going to be shared, false if it is going to be privatized. */
1210 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1212 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1213 && DECL_HAS_VALUE_EXPR_P (decl
))
1215 tree value
= DECL_VALUE_EXPR (decl
);
1217 if (TREE_CODE (value
) == COMPONENT_REF
1218 && VAR_P (TREE_OPERAND (value
, 0))
1219 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1221 /* If variable in COMMON or EQUIVALENCE is privatized, return
1222 true, as just that variable is supposed to be privatized,
1223 not the whole COMMON or whole EQUIVALENCE.
1224 For shared variables in COMMON or EQUIVALENCE, let them be
1225 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1226 from the same COMMON or EQUIVALENCE just one sharing of the
1227 whole COMMON or EQUIVALENCE is enough. */
1232 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1238 /* Return true if DECL that is shared iff SHARED is true should
1239 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1243 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1245 if (GFC_DECL_CRAY_POINTEE (decl
))
1248 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1249 && DECL_HAS_VALUE_EXPR_P (decl
))
1251 tree value
= DECL_VALUE_EXPR (decl
);
1253 if (TREE_CODE (value
) == COMPONENT_REF
1254 && VAR_P (TREE_OPERAND (value
, 0))
1255 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1262 /* Register language specific type size variables as potentially OpenMP
1263 firstprivate variables. */
1266 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1268 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1272 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1273 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1275 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1276 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1277 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1279 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1280 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1286 gfc_trans_add_clause (tree node
, tree tail
)
1288 OMP_CLAUSE_CHAIN (node
) = tail
;
1293 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1298 gfc_symbol
*proc_sym
;
1299 gfc_formal_arglist
*f
;
1301 gcc_assert (sym
->attr
.dummy
);
1302 proc_sym
= sym
->ns
->proc_name
;
1303 if (proc_sym
->attr
.entry_master
)
1305 if (gfc_return_by_reference (proc_sym
))
1308 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1311 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1317 return build_int_cst (integer_type_node
, cnt
);
1320 tree t
= gfc_get_symbol_decl (sym
);
1324 bool alternate_entry
;
1327 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1328 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1329 && sym
->result
== sym
;
1330 entry_master
= sym
->attr
.result
1331 && sym
->ns
->proc_name
->attr
.entry_master
1332 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1333 parent_decl
= current_function_decl
1334 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1336 if ((t
== parent_decl
&& return_value
)
1337 || (sym
->ns
&& sym
->ns
->proc_name
1338 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1339 && (alternate_entry
|| entry_master
)))
1344 /* Special case for assigning the return value of a function.
1345 Self recursive functions must have an explicit return value. */
1346 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1347 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1349 /* Similarly for alternate entry points. */
1350 else if (alternate_entry
1351 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1354 gfc_entry_list
*el
= NULL
;
1356 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1359 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1364 else if (entry_master
1365 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1367 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1373 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1374 gfc_omp_namelist
*namelist
, tree list
,
1377 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1378 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1380 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1381 if (t
!= error_mark_node
)
1383 tree node
= build_omp_clause (input_location
, code
);
1384 OMP_CLAUSE_DECL (node
) = t
;
1385 list
= gfc_trans_add_clause (node
, list
);
1391 struct omp_udr_find_orig_data
1393 gfc_omp_udr
*omp_udr
;
1398 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1401 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1402 if ((*e
)->expr_type
== EXPR_VARIABLE
1403 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1404 cd
->omp_orig_seen
= true;
1410 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1412 gfc_symbol
*sym
= n
->sym
;
1413 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1414 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1415 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1416 gfc_symbol omp_var_copy
[4];
1417 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1419 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1420 locus old_loc
= gfc_current_locus
;
1423 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1425 decl
= OMP_CLAUSE_DECL (c
);
1426 gfc_current_locus
= where
;
1427 type
= TREE_TYPE (decl
);
1428 outer_decl
= create_tmp_var_raw (type
);
1429 if (TREE_CODE (decl
) == PARM_DECL
1430 && TREE_CODE (type
) == REFERENCE_TYPE
1431 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1432 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1434 decl
= build_fold_indirect_ref (decl
);
1435 type
= TREE_TYPE (type
);
1438 /* Create a fake symbol for init value. */
1439 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1440 init_val_sym
.ns
= sym
->ns
;
1441 init_val_sym
.name
= sym
->name
;
1442 init_val_sym
.ts
= sym
->ts
;
1443 init_val_sym
.attr
.referenced
= 1;
1444 init_val_sym
.declared_at
= where
;
1445 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1446 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1447 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1448 else if (udr
->initializer_ns
)
1449 backend_decl
= NULL
;
1451 switch (sym
->ts
.type
)
1457 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1460 backend_decl
= NULL_TREE
;
1463 init_val_sym
.backend_decl
= backend_decl
;
1465 /* Create a fake symbol for the outer array reference. */
1468 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1469 outer_sym
.attr
.dummy
= 0;
1470 outer_sym
.attr
.result
= 0;
1471 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1472 outer_sym
.backend_decl
= outer_decl
;
1473 if (decl
!= OMP_CLAUSE_DECL (c
))
1474 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1476 /* Create fake symtrees for it. */
1477 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1478 symtree1
->n
.sym
= sym
;
1479 gcc_assert (symtree1
== root1
);
1481 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1482 symtree2
->n
.sym
= &init_val_sym
;
1483 gcc_assert (symtree2
== root2
);
1485 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1486 symtree3
->n
.sym
= &outer_sym
;
1487 gcc_assert (symtree3
== root3
);
1489 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1492 omp_var_copy
[0] = *udr
->omp_out
;
1493 omp_var_copy
[1] = *udr
->omp_in
;
1494 *udr
->omp_out
= outer_sym
;
1495 *udr
->omp_in
= *sym
;
1496 if (udr
->initializer_ns
)
1498 omp_var_copy
[2] = *udr
->omp_priv
;
1499 omp_var_copy
[3] = *udr
->omp_orig
;
1500 *udr
->omp_priv
= *sym
;
1501 *udr
->omp_orig
= outer_sym
;
1505 /* Create expressions. */
1506 e1
= gfc_get_expr ();
1507 e1
->expr_type
= EXPR_VARIABLE
;
1509 e1
->symtree
= symtree1
;
1511 if (sym
->attr
.dimension
)
1513 e1
->ref
= ref
= gfc_get_ref ();
1514 ref
->type
= REF_ARRAY
;
1515 ref
->u
.ar
.where
= where
;
1516 ref
->u
.ar
.as
= sym
->as
;
1517 ref
->u
.ar
.type
= AR_FULL
;
1518 ref
->u
.ar
.dimen
= 0;
1520 t
= gfc_resolve_expr (e1
);
1524 if (backend_decl
!= NULL_TREE
)
1526 e2
= gfc_get_expr ();
1527 e2
->expr_type
= EXPR_VARIABLE
;
1529 e2
->symtree
= symtree2
;
1531 t
= gfc_resolve_expr (e2
);
1534 else if (udr
->initializer_ns
== NULL
)
1536 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1537 e2
= gfc_default_initializer (&sym
->ts
);
1539 t
= gfc_resolve_expr (e2
);
1542 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1544 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1545 t
= gfc_resolve_expr (e2
);
1548 if (udr
&& udr
->initializer_ns
)
1550 struct omp_udr_find_orig_data cd
;
1552 cd
.omp_orig_seen
= false;
1553 gfc_code_walker (&n
->udr
->initializer
,
1554 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1555 if (cd
.omp_orig_seen
)
1556 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1559 e3
= gfc_copy_expr (e1
);
1560 e3
->symtree
= symtree3
;
1561 t
= gfc_resolve_expr (e3
);
1566 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1570 e4
= gfc_add (e3
, e1
);
1573 e4
= gfc_multiply (e3
, e1
);
1575 case TRUTH_ANDIF_EXPR
:
1576 e4
= gfc_and (e3
, e1
);
1578 case TRUTH_ORIF_EXPR
:
1579 e4
= gfc_or (e3
, e1
);
1582 e4
= gfc_eqv (e3
, e1
);
1585 e4
= gfc_neqv (e3
, e1
);
1603 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1606 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1607 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1608 t
= gfc_resolve_expr (e3
);
1610 t
= gfc_resolve_expr (e4
);
1619 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1620 intrinsic_sym
.ns
= sym
->ns
;
1621 intrinsic_sym
.name
= iname
;
1622 intrinsic_sym
.ts
= sym
->ts
;
1623 intrinsic_sym
.attr
.referenced
= 1;
1624 intrinsic_sym
.attr
.intrinsic
= 1;
1625 intrinsic_sym
.attr
.function
= 1;
1626 intrinsic_sym
.attr
.implicit_type
= 1;
1627 intrinsic_sym
.result
= &intrinsic_sym
;
1628 intrinsic_sym
.declared_at
= where
;
1630 symtree4
= gfc_new_symtree (&root4
, iname
);
1631 symtree4
->n
.sym
= &intrinsic_sym
;
1632 gcc_assert (symtree4
== root4
);
1634 e4
= gfc_get_expr ();
1635 e4
->expr_type
= EXPR_FUNCTION
;
1637 e4
->symtree
= symtree4
;
1638 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1639 e4
->value
.function
.actual
->expr
= e3
;
1640 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1641 e4
->value
.function
.actual
->next
->expr
= e1
;
1643 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1645 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1646 e1
= gfc_copy_expr (e1
);
1647 e3
= gfc_copy_expr (e3
);
1648 t
= gfc_resolve_expr (e4
);
1652 /* Create the init statement list. */
1655 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1657 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1658 NULL_TREE
, NULL_TREE
, false);
1659 if (TREE_CODE (stmt
) != BIND_EXPR
)
1660 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1663 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1665 /* Create the merge statement list. */
1668 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1670 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1671 NULL_TREE
, NULL_TREE
, false);
1672 if (TREE_CODE (stmt
) != BIND_EXPR
)
1673 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1676 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1678 /* And stick the placeholder VAR_DECL into the clause as well. */
1679 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1681 gfc_current_locus
= old_loc
;
1694 gfc_free_array_spec (outer_sym
.as
);
1698 *udr
->omp_out
= omp_var_copy
[0];
1699 *udr
->omp_in
= omp_var_copy
[1];
1700 if (udr
->initializer_ns
)
1702 *udr
->omp_priv
= omp_var_copy
[2];
1703 *udr
->omp_orig
= omp_var_copy
[3];
1709 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1710 locus where
, bool mark_addressable
)
1712 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1713 if (namelist
->sym
->attr
.referenced
)
1715 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1716 if (t
!= error_mark_node
)
1718 tree node
= build_omp_clause (where
.lb
->location
,
1719 OMP_CLAUSE_REDUCTION
);
1720 OMP_CLAUSE_DECL (node
) = t
;
1721 if (mark_addressable
)
1722 TREE_ADDRESSABLE (t
) = 1;
1723 switch (namelist
->u
.reduction_op
)
1725 case OMP_REDUCTION_PLUS
:
1726 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1728 case OMP_REDUCTION_MINUS
:
1729 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1731 case OMP_REDUCTION_TIMES
:
1732 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1734 case OMP_REDUCTION_AND
:
1735 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1737 case OMP_REDUCTION_OR
:
1738 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1740 case OMP_REDUCTION_EQV
:
1741 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1743 case OMP_REDUCTION_NEQV
:
1744 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1746 case OMP_REDUCTION_MAX
:
1747 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1749 case OMP_REDUCTION_MIN
:
1750 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1752 case OMP_REDUCTION_IAND
:
1753 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1755 case OMP_REDUCTION_IOR
:
1756 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1758 case OMP_REDUCTION_IEOR
:
1759 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1761 case OMP_REDUCTION_USER
:
1762 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1767 if (namelist
->sym
->attr
.dimension
1768 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1769 || namelist
->sym
->attr
.allocatable
)
1770 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1771 list
= gfc_trans_add_clause (node
, list
);
1778 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1783 gfc_init_se (&se
, NULL
);
1784 gfc_conv_expr (&se
, expr
);
1785 gfc_add_block_to_block (block
, &se
.pre
);
1786 result
= gfc_evaluate_now (se
.expr
, block
);
1787 gfc_add_block_to_block (block
, &se
.post
);
1792 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
1795 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1796 locus where
, bool declare_simd
= false)
1798 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1800 enum omp_clause_code clause_code
;
1803 if (clauses
== NULL
)
1806 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1808 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1814 case OMP_LIST_REDUCTION
:
1815 /* An OpenACC async clause indicates the need to set reduction
1816 arguments addressable, to allow asynchronous copy-out. */
1817 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1820 case OMP_LIST_PRIVATE
:
1821 clause_code
= OMP_CLAUSE_PRIVATE
;
1823 case OMP_LIST_SHARED
:
1824 clause_code
= OMP_CLAUSE_SHARED
;
1826 case OMP_LIST_FIRSTPRIVATE
:
1827 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1829 case OMP_LIST_LASTPRIVATE
:
1830 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1832 case OMP_LIST_COPYIN
:
1833 clause_code
= OMP_CLAUSE_COPYIN
;
1835 case OMP_LIST_COPYPRIVATE
:
1836 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1838 case OMP_LIST_UNIFORM
:
1839 clause_code
= OMP_CLAUSE_UNIFORM
;
1841 case OMP_LIST_USE_DEVICE
:
1842 case OMP_LIST_USE_DEVICE_PTR
:
1843 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1845 case OMP_LIST_IS_DEVICE_PTR
:
1846 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
1851 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1854 case OMP_LIST_ALIGNED
:
1855 for (; n
!= NULL
; n
= n
->next
)
1856 if (n
->sym
->attr
.referenced
|| declare_simd
)
1858 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1859 if (t
!= error_mark_node
)
1861 tree node
= build_omp_clause (input_location
,
1862 OMP_CLAUSE_ALIGNED
);
1863 OMP_CLAUSE_DECL (node
) = t
;
1869 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1872 gfc_init_se (&se
, NULL
);
1873 gfc_conv_expr (&se
, n
->expr
);
1874 gfc_add_block_to_block (block
, &se
.pre
);
1875 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1876 gfc_add_block_to_block (block
, &se
.post
);
1878 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1880 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1884 case OMP_LIST_LINEAR
:
1886 gfc_expr
*last_step_expr
= NULL
;
1887 tree last_step
= NULL_TREE
;
1888 bool last_step_parm
= false;
1890 for (; n
!= NULL
; n
= n
->next
)
1894 last_step_expr
= n
->expr
;
1895 last_step
= NULL_TREE
;
1896 last_step_parm
= false;
1898 if (n
->sym
->attr
.referenced
|| declare_simd
)
1900 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1901 if (t
!= error_mark_node
)
1903 tree node
= build_omp_clause (input_location
,
1905 OMP_CLAUSE_DECL (node
) = t
;
1906 omp_clause_linear_kind kind
;
1907 switch (n
->u
.linear_op
)
1909 case OMP_LINEAR_DEFAULT
:
1910 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
1912 case OMP_LINEAR_REF
:
1913 kind
= OMP_CLAUSE_LINEAR_REF
;
1915 case OMP_LINEAR_VAL
:
1916 kind
= OMP_CLAUSE_LINEAR_VAL
;
1918 case OMP_LINEAR_UVAL
:
1919 kind
= OMP_CLAUSE_LINEAR_UVAL
;
1924 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
1925 if (last_step_expr
&& last_step
== NULL_TREE
)
1929 gfc_init_se (&se
, NULL
);
1930 gfc_conv_expr (&se
, last_step_expr
);
1931 gfc_add_block_to_block (block
, &se
.pre
);
1932 last_step
= gfc_evaluate_now (se
.expr
, block
);
1933 gfc_add_block_to_block (block
, &se
.post
);
1935 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
1937 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
1938 last_step
= gfc_trans_omp_variable (s
, true);
1939 last_step_parm
= true;
1943 = gfc_conv_constant_to_tree (last_step_expr
);
1947 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
1948 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
1952 if (kind
== OMP_CLAUSE_LINEAR_REF
)
1955 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
1957 type
= gfc_get_function_type (n
->sym
);
1958 type
= build_pointer_type (type
);
1961 type
= gfc_sym_type (n
->sym
);
1962 if (POINTER_TYPE_P (type
))
1963 type
= TREE_TYPE (type
);
1964 /* Otherwise to be determined what exactly
1966 tree t
= fold_convert (sizetype
, last_step
);
1967 t
= size_binop (MULT_EXPR
, t
,
1968 TYPE_SIZE_UNIT (type
));
1969 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
1974 = gfc_typenode_for_spec (&n
->sym
->ts
);
1975 OMP_CLAUSE_LINEAR_STEP (node
)
1976 = fold_convert (type
, last_step
);
1979 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1980 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1981 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1987 case OMP_LIST_DEPEND
:
1988 for (; n
!= NULL
; n
= n
->next
)
1990 if (n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
1992 tree vec
= NULL_TREE
;
1996 tree addend
= integer_zero_node
, t
;
2000 addend
= gfc_conv_constant_to_tree (n
->expr
);
2001 if (TREE_CODE (addend
) == INTEGER_CST
2002 && tree_int_cst_sgn (addend
) == -1)
2005 addend
= const_unop (NEGATE_EXPR
,
2006 TREE_TYPE (addend
), addend
);
2009 t
= gfc_trans_omp_variable (n
->sym
, false);
2010 if (t
!= error_mark_node
)
2012 if (i
< vec_safe_length (doacross_steps
)
2013 && !integer_zerop (addend
)
2014 && (*doacross_steps
)[i
])
2016 tree step
= (*doacross_steps
)[i
];
2017 addend
= fold_convert (TREE_TYPE (step
), addend
);
2018 addend
= build2 (TRUNC_DIV_EXPR
,
2019 TREE_TYPE (step
), addend
, step
);
2021 vec
= tree_cons (addend
, t
, vec
);
2023 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2026 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2030 if (vec
== NULL_TREE
)
2033 tree node
= build_omp_clause (input_location
,
2035 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2036 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2037 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2041 if (!n
->sym
->attr
.referenced
)
2044 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
2045 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2047 tree decl
= gfc_get_symbol_decl (n
->sym
);
2048 if (gfc_omp_privatize_by_reference (decl
))
2049 decl
= build_fold_indirect_ref (decl
);
2050 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2052 decl
= gfc_conv_descriptor_data_get (decl
);
2053 decl
= fold_convert (build_pointer_type (char_type_node
),
2055 decl
= build_fold_indirect_ref (decl
);
2057 else if (DECL_P (decl
))
2058 TREE_ADDRESSABLE (decl
) = 1;
2059 OMP_CLAUSE_DECL (node
) = decl
;
2064 gfc_init_se (&se
, NULL
);
2065 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2067 gfc_conv_expr_reference (&se
, n
->expr
);
2072 gfc_conv_expr_descriptor (&se
, n
->expr
);
2073 ptr
= gfc_conv_array_data (se
.expr
);
2075 gfc_add_block_to_block (block
, &se
.pre
);
2076 gfc_add_block_to_block (block
, &se
.post
);
2077 ptr
= fold_convert (build_pointer_type (char_type_node
),
2079 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2081 switch (n
->u
.depend_op
)
2084 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2086 case OMP_DEPEND_OUT
:
2087 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2089 case OMP_DEPEND_INOUT
:
2090 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2095 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2099 for (; n
!= NULL
; n
= n
->next
)
2101 if (!n
->sym
->attr
.referenced
)
2104 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2105 tree node2
= NULL_TREE
;
2106 tree node3
= NULL_TREE
;
2107 tree node4
= NULL_TREE
;
2108 tree decl
= gfc_get_symbol_decl (n
->sym
);
2110 TREE_ADDRESSABLE (decl
) = 1;
2111 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2113 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2114 && (gfc_omp_privatize_by_reference (decl
)
2115 || GFC_DECL_GET_SCALAR_POINTER (decl
)
2116 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
2117 || GFC_DECL_CRAY_POINTEE (decl
)
2118 || GFC_DESCRIPTOR_TYPE_P
2119 (TREE_TYPE (TREE_TYPE (decl
)))))
2121 tree orig_decl
= decl
;
2122 node4
= build_omp_clause (input_location
,
2124 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2125 OMP_CLAUSE_DECL (node4
) = decl
;
2126 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2127 decl
= build_fold_indirect_ref (decl
);
2128 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
2129 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
2130 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
2132 node3
= build_omp_clause (input_location
,
2134 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2135 OMP_CLAUSE_DECL (node3
) = decl
;
2136 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2137 decl
= build_fold_indirect_ref (decl
);
2140 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2142 tree type
= TREE_TYPE (decl
);
2143 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2144 ptr
= fold_convert (build_pointer_type (char_type_node
),
2146 ptr
= build_fold_indirect_ref (ptr
);
2147 OMP_CLAUSE_DECL (node
) = ptr
;
2148 node2
= build_omp_clause (input_location
,
2150 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2151 OMP_CLAUSE_DECL (node2
) = decl
;
2152 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2153 node3
= build_omp_clause (input_location
,
2155 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2156 OMP_CLAUSE_DECL (node3
)
2157 = gfc_conv_descriptor_data_get (decl
);
2158 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2160 /* We have to check for n->sym->attr.dimension because
2161 of scalar coarrays. */
2162 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
2164 stmtblock_t cond_block
;
2166 = gfc_create_var (gfc_array_index_type
, NULL
);
2167 tree tem
, then_b
, else_b
, zero
, cond
;
2169 gfc_init_block (&cond_block
);
2171 = gfc_full_array_size (&cond_block
, decl
,
2172 GFC_TYPE_ARRAY_RANK (type
));
2173 gfc_add_modify (&cond_block
, size
, tem
);
2174 then_b
= gfc_finish_block (&cond_block
);
2175 gfc_init_block (&cond_block
);
2176 zero
= build_int_cst (gfc_array_index_type
, 0);
2177 gfc_add_modify (&cond_block
, size
, zero
);
2178 else_b
= gfc_finish_block (&cond_block
);
2179 tem
= gfc_conv_descriptor_data_get (decl
);
2180 tem
= fold_convert (pvoid_type_node
, tem
);
2181 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2183 tem
, null_pointer_node
);
2184 gfc_add_expr_to_block (block
,
2185 build3_loc (input_location
,
2190 OMP_CLAUSE_SIZE (node
) = size
;
2192 else if (n
->sym
->attr
.dimension
)
2193 OMP_CLAUSE_SIZE (node
)
2194 = gfc_full_array_size (block
, decl
,
2195 GFC_TYPE_ARRAY_RANK (type
));
2196 if (n
->sym
->attr
.dimension
)
2199 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2200 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2201 OMP_CLAUSE_SIZE (node
)
2202 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2203 OMP_CLAUSE_SIZE (node
), elemsz
);
2207 OMP_CLAUSE_DECL (node
) = decl
;
2212 gfc_init_se (&se
, NULL
);
2213 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2215 gfc_conv_expr_reference (&se
, n
->expr
);
2216 gfc_add_block_to_block (block
, &se
.pre
);
2218 OMP_CLAUSE_SIZE (node
)
2219 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2223 gfc_conv_expr_descriptor (&se
, n
->expr
);
2224 ptr
= gfc_conv_array_data (se
.expr
);
2225 tree type
= TREE_TYPE (se
.expr
);
2226 gfc_add_block_to_block (block
, &se
.pre
);
2227 OMP_CLAUSE_SIZE (node
)
2228 = gfc_full_array_size (block
, se
.expr
,
2229 GFC_TYPE_ARRAY_RANK (type
));
2231 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2232 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2233 OMP_CLAUSE_SIZE (node
)
2234 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2235 OMP_CLAUSE_SIZE (node
), elemsz
);
2237 gfc_add_block_to_block (block
, &se
.post
);
2238 ptr
= fold_convert (build_pointer_type (char_type_node
),
2240 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2242 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2243 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2245 node4
= build_omp_clause (input_location
,
2247 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2248 OMP_CLAUSE_DECL (node4
) = decl
;
2249 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2250 decl
= build_fold_indirect_ref (decl
);
2252 ptr
= fold_convert (sizetype
, ptr
);
2253 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2255 tree type
= TREE_TYPE (decl
);
2256 ptr2
= gfc_conv_descriptor_data_get (decl
);
2257 node2
= build_omp_clause (input_location
,
2259 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2260 OMP_CLAUSE_DECL (node2
) = decl
;
2261 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2262 node3
= build_omp_clause (input_location
,
2264 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2265 OMP_CLAUSE_DECL (node3
)
2266 = gfc_conv_descriptor_data_get (decl
);
2270 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2271 ptr2
= build_fold_addr_expr (decl
);
2274 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2277 node3
= build_omp_clause (input_location
,
2279 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2280 OMP_CLAUSE_DECL (node3
) = decl
;
2282 ptr2
= fold_convert (sizetype
, ptr2
);
2283 OMP_CLAUSE_SIZE (node3
)
2284 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2286 switch (n
->u
.map_op
)
2289 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2292 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2295 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2297 case OMP_MAP_TOFROM
:
2298 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2300 case OMP_MAP_ALWAYS_TO
:
2301 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2303 case OMP_MAP_ALWAYS_FROM
:
2304 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2306 case OMP_MAP_ALWAYS_TOFROM
:
2307 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2309 case OMP_MAP_RELEASE
:
2310 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2312 case OMP_MAP_DELETE
:
2313 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2315 case OMP_MAP_FORCE_ALLOC
:
2316 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2318 case OMP_MAP_FORCE_TO
:
2319 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2321 case OMP_MAP_FORCE_FROM
:
2322 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2324 case OMP_MAP_FORCE_TOFROM
:
2325 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2327 case OMP_MAP_FORCE_PRESENT
:
2328 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2330 case OMP_MAP_FORCE_DEVICEPTR
:
2331 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2336 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2338 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2340 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2342 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2347 case OMP_LIST_CACHE
:
2348 for (; n
!= NULL
; n
= n
->next
)
2350 if (!n
->sym
->attr
.referenced
)
2356 clause_code
= OMP_CLAUSE_TO
;
2359 clause_code
= OMP_CLAUSE_FROM
;
2361 case OMP_LIST_CACHE
:
2362 clause_code
= OMP_CLAUSE__CACHE_
;
2367 tree node
= build_omp_clause (input_location
, clause_code
);
2368 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2370 tree decl
= gfc_get_symbol_decl (n
->sym
);
2371 if (gfc_omp_privatize_by_reference (decl
))
2372 decl
= build_fold_indirect_ref (decl
);
2373 else if (DECL_P (decl
))
2374 TREE_ADDRESSABLE (decl
) = 1;
2375 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2377 tree type
= TREE_TYPE (decl
);
2378 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2379 ptr
= fold_convert (build_pointer_type (char_type_node
),
2381 ptr
= build_fold_indirect_ref (ptr
);
2382 OMP_CLAUSE_DECL (node
) = ptr
;
2383 OMP_CLAUSE_SIZE (node
)
2384 = gfc_full_array_size (block
, decl
,
2385 GFC_TYPE_ARRAY_RANK (type
));
2387 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2388 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2389 OMP_CLAUSE_SIZE (node
)
2390 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2391 OMP_CLAUSE_SIZE (node
), elemsz
);
2394 OMP_CLAUSE_DECL (node
) = decl
;
2399 gfc_init_se (&se
, NULL
);
2400 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2402 gfc_conv_expr_reference (&se
, n
->expr
);
2404 gfc_add_block_to_block (block
, &se
.pre
);
2405 OMP_CLAUSE_SIZE (node
)
2406 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2410 gfc_conv_expr_descriptor (&se
, n
->expr
);
2411 ptr
= gfc_conv_array_data (se
.expr
);
2412 tree type
= TREE_TYPE (se
.expr
);
2413 gfc_add_block_to_block (block
, &se
.pre
);
2414 OMP_CLAUSE_SIZE (node
)
2415 = gfc_full_array_size (block
, se
.expr
,
2416 GFC_TYPE_ARRAY_RANK (type
));
2418 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2419 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2420 OMP_CLAUSE_SIZE (node
)
2421 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2422 OMP_CLAUSE_SIZE (node
), elemsz
);
2424 gfc_add_block_to_block (block
, &se
.post
);
2425 ptr
= fold_convert (build_pointer_type (char_type_node
),
2427 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2429 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2437 if (clauses
->if_expr
)
2441 gfc_init_se (&se
, NULL
);
2442 gfc_conv_expr (&se
, clauses
->if_expr
);
2443 gfc_add_block_to_block (block
, &se
.pre
);
2444 if_var
= gfc_evaluate_now (se
.expr
, block
);
2445 gfc_add_block_to_block (block
, &se
.post
);
2447 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2448 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2449 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2450 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2452 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
2453 if (clauses
->if_exprs
[ifc
])
2457 gfc_init_se (&se
, NULL
);
2458 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
2459 gfc_add_block_to_block (block
, &se
.pre
);
2460 if_var
= gfc_evaluate_now (se
.expr
, block
);
2461 gfc_add_block_to_block (block
, &se
.post
);
2463 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2466 case OMP_IF_PARALLEL
:
2467 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
2470 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
2472 case OMP_IF_TASKLOOP
:
2473 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
2476 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
2478 case OMP_IF_TARGET_DATA
:
2479 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
2481 case OMP_IF_TARGET_UPDATE
:
2482 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
2484 case OMP_IF_TARGET_ENTER_DATA
:
2485 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
2487 case OMP_IF_TARGET_EXIT_DATA
:
2488 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
2493 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2494 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2497 if (clauses
->final_expr
)
2501 gfc_init_se (&se
, NULL
);
2502 gfc_conv_expr (&se
, clauses
->final_expr
);
2503 gfc_add_block_to_block (block
, &se
.pre
);
2504 final_var
= gfc_evaluate_now (se
.expr
, block
);
2505 gfc_add_block_to_block (block
, &se
.post
);
2507 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2508 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2509 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2512 if (clauses
->num_threads
)
2516 gfc_init_se (&se
, NULL
);
2517 gfc_conv_expr (&se
, clauses
->num_threads
);
2518 gfc_add_block_to_block (block
, &se
.pre
);
2519 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2520 gfc_add_block_to_block (block
, &se
.post
);
2522 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2523 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2524 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2527 chunk_size
= NULL_TREE
;
2528 if (clauses
->chunk_size
)
2530 gfc_init_se (&se
, NULL
);
2531 gfc_conv_expr (&se
, clauses
->chunk_size
);
2532 gfc_add_block_to_block (block
, &se
.pre
);
2533 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2534 gfc_add_block_to_block (block
, &se
.post
);
2537 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2539 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2540 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2541 switch (clauses
->sched_kind
)
2543 case OMP_SCHED_STATIC
:
2544 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2546 case OMP_SCHED_DYNAMIC
:
2547 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2549 case OMP_SCHED_GUIDED
:
2550 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2552 case OMP_SCHED_RUNTIME
:
2553 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2555 case OMP_SCHED_AUTO
:
2556 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2561 if (clauses
->sched_monotonic
)
2562 OMP_CLAUSE_SCHEDULE_KIND (c
)
2563 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2564 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
2565 else if (clauses
->sched_nonmonotonic
)
2566 OMP_CLAUSE_SCHEDULE_KIND (c
)
2567 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2568 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
2569 if (clauses
->sched_simd
)
2570 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
2571 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2574 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2576 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2577 switch (clauses
->default_sharing
)
2579 case OMP_DEFAULT_NONE
:
2580 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2582 case OMP_DEFAULT_SHARED
:
2583 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2585 case OMP_DEFAULT_PRIVATE
:
2586 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2588 case OMP_DEFAULT_FIRSTPRIVATE
:
2589 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2591 case OMP_DEFAULT_PRESENT
:
2592 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
2597 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2600 if (clauses
->nowait
)
2602 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2603 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2606 if (clauses
->ordered
)
2608 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2609 OMP_CLAUSE_ORDERED_EXPR (c
)
2610 = clauses
->orderedc
? build_int_cst (integer_type_node
,
2611 clauses
->orderedc
) : NULL_TREE
;
2612 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2615 if (clauses
->untied
)
2617 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2618 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2621 if (clauses
->mergeable
)
2623 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2624 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2627 if (clauses
->collapse
)
2629 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2630 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2631 = build_int_cst (integer_type_node
, clauses
->collapse
);
2632 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2635 if (clauses
->inbranch
)
2637 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2638 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2641 if (clauses
->notinbranch
)
2643 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2644 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2647 switch (clauses
->cancel
)
2649 case OMP_CANCEL_UNKNOWN
:
2651 case OMP_CANCEL_PARALLEL
:
2652 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2653 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2655 case OMP_CANCEL_SECTIONS
:
2656 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2657 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2660 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2661 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2663 case OMP_CANCEL_TASKGROUP
:
2664 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2665 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2669 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2671 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2672 switch (clauses
->proc_bind
)
2674 case OMP_PROC_BIND_MASTER
:
2675 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2677 case OMP_PROC_BIND_SPREAD
:
2678 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2680 case OMP_PROC_BIND_CLOSE
:
2681 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2686 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2689 if (clauses
->safelen_expr
)
2693 gfc_init_se (&se
, NULL
);
2694 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2695 gfc_add_block_to_block (block
, &se
.pre
);
2696 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2697 gfc_add_block_to_block (block
, &se
.post
);
2699 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2700 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2701 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2704 if (clauses
->simdlen_expr
)
2708 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2709 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2710 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2711 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2717 gfc_init_se (&se
, NULL
);
2718 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
2719 gfc_add_block_to_block (block
, &se
.pre
);
2720 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
2721 gfc_add_block_to_block (block
, &se
.post
);
2723 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2724 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
2725 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2729 if (clauses
->num_teams
)
2733 gfc_init_se (&se
, NULL
);
2734 gfc_conv_expr (&se
, clauses
->num_teams
);
2735 gfc_add_block_to_block (block
, &se
.pre
);
2736 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2737 gfc_add_block_to_block (block
, &se
.post
);
2739 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2740 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2741 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2744 if (clauses
->device
)
2748 gfc_init_se (&se
, NULL
);
2749 gfc_conv_expr (&se
, clauses
->device
);
2750 gfc_add_block_to_block (block
, &se
.pre
);
2751 device
= gfc_evaluate_now (se
.expr
, block
);
2752 gfc_add_block_to_block (block
, &se
.post
);
2754 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2755 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2756 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2759 if (clauses
->thread_limit
)
2763 gfc_init_se (&se
, NULL
);
2764 gfc_conv_expr (&se
, clauses
->thread_limit
);
2765 gfc_add_block_to_block (block
, &se
.pre
);
2766 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2767 gfc_add_block_to_block (block
, &se
.post
);
2769 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2770 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2771 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2774 chunk_size
= NULL_TREE
;
2775 if (clauses
->dist_chunk_size
)
2777 gfc_init_se (&se
, NULL
);
2778 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2779 gfc_add_block_to_block (block
, &se
.pre
);
2780 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2781 gfc_add_block_to_block (block
, &se
.post
);
2784 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2786 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2787 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2788 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2791 if (clauses
->grainsize
)
2795 gfc_init_se (&se
, NULL
);
2796 gfc_conv_expr (&se
, clauses
->grainsize
);
2797 gfc_add_block_to_block (block
, &se
.pre
);
2798 grainsize
= gfc_evaluate_now (se
.expr
, block
);
2799 gfc_add_block_to_block (block
, &se
.post
);
2801 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GRAINSIZE
);
2802 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
2803 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2806 if (clauses
->num_tasks
)
2810 gfc_init_se (&se
, NULL
);
2811 gfc_conv_expr (&se
, clauses
->num_tasks
);
2812 gfc_add_block_to_block (block
, &se
.pre
);
2813 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
2814 gfc_add_block_to_block (block
, &se
.post
);
2816 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TASKS
);
2817 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
2818 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2821 if (clauses
->priority
)
2825 gfc_init_se (&se
, NULL
);
2826 gfc_conv_expr (&se
, clauses
->priority
);
2827 gfc_add_block_to_block (block
, &se
.pre
);
2828 priority
= gfc_evaluate_now (se
.expr
, block
);
2829 gfc_add_block_to_block (block
, &se
.post
);
2831 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PRIORITY
);
2832 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
2833 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2840 gfc_init_se (&se
, NULL
);
2841 gfc_conv_expr (&se
, clauses
->hint
);
2842 gfc_add_block_to_block (block
, &se
.pre
);
2843 hint
= gfc_evaluate_now (se
.expr
, block
);
2844 gfc_add_block_to_block (block
, &se
.post
);
2846 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_HINT
);
2847 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
2848 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2853 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMD
);
2854 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2856 if (clauses
->threads
)
2858 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREADS
);
2859 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2861 if (clauses
->nogroup
)
2863 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOGROUP
);
2864 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2866 if (clauses
->defaultmap
)
2868 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULTMAP
);
2869 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2871 if (clauses
->depend_source
)
2873 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEPEND
);
2874 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
2875 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2880 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2881 if (clauses
->async_expr
)
2882 OMP_CLAUSE_ASYNC_EXPR (c
)
2883 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2885 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2886 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2890 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2891 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2893 if (clauses
->par_auto
)
2895 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2896 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2898 if (clauses
->independent
)
2900 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2901 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2903 if (clauses
->wait_list
)
2907 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2909 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2910 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2911 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2915 if (clauses
->num_gangs_expr
)
2918 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2919 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2920 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2921 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2923 if (clauses
->num_workers_expr
)
2925 tree num_workers_var
2926 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2927 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2928 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2929 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2931 if (clauses
->vector_length_expr
)
2933 tree vector_length_var
2934 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2935 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2936 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2937 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2939 if (clauses
->tile_list
)
2941 vec
<tree
, va_gc
> *tvec
;
2944 vec_alloc (tvec
, 4);
2946 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2947 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2949 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2950 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2951 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2954 if (clauses
->vector
)
2956 if (clauses
->vector_expr
)
2959 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2960 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2961 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2962 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2966 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2967 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2970 if (clauses
->worker
)
2972 if (clauses
->worker_expr
)
2975 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2976 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2977 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2978 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2982 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2983 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2989 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2990 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2991 if (clauses
->gang_num_expr
)
2993 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
2994 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
2996 if (clauses
->gang_static
)
2998 arg
= clauses
->gang_static_expr
2999 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
3000 : integer_minus_one_node
;
3001 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
3005 return nreverse (omp_clauses
);
3008 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
3011 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
3016 stmt
= gfc_trans_code (code
);
3017 if (TREE_CODE (stmt
) != BIND_EXPR
)
3019 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
3021 tree block
= poplevel (1, 0);
3022 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
3032 /* Trans OpenACC directives. */
3033 /* parallel, kernels, data and host_data. */
3035 gfc_trans_oacc_construct (gfc_code
*code
)
3038 tree stmt
, oacc_clauses
;
3039 enum tree_code construct_code
;
3043 case EXEC_OACC_PARALLEL
:
3044 construct_code
= OACC_PARALLEL
;
3046 case EXEC_OACC_KERNELS
:
3047 construct_code
= OACC_KERNELS
;
3049 case EXEC_OACC_DATA
:
3050 construct_code
= OACC_DATA
;
3052 case EXEC_OACC_HOST_DATA
:
3053 construct_code
= OACC_HOST_DATA
;
3059 gfc_start_block (&block
);
3060 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3062 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3063 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3065 gfc_add_expr_to_block (&block
, stmt
);
3066 return gfc_finish_block (&block
);
3069 /* update, enter_data, exit_data, cache. */
3071 gfc_trans_oacc_executable_directive (gfc_code
*code
)
3074 tree stmt
, oacc_clauses
;
3075 enum tree_code construct_code
;
3079 case EXEC_OACC_UPDATE
:
3080 construct_code
= OACC_UPDATE
;
3082 case EXEC_OACC_ENTER_DATA
:
3083 construct_code
= OACC_ENTER_DATA
;
3085 case EXEC_OACC_EXIT_DATA
:
3086 construct_code
= OACC_EXIT_DATA
;
3088 case EXEC_OACC_CACHE
:
3089 construct_code
= OACC_CACHE
;
3095 gfc_start_block (&block
);
3096 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3098 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
3100 gfc_add_expr_to_block (&block
, stmt
);
3101 return gfc_finish_block (&block
);
3105 gfc_trans_oacc_wait_directive (gfc_code
*code
)
3109 vec
<tree
, va_gc
> *args
;
3112 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3113 location_t loc
= input_location
;
3115 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3118 vec_alloc (args
, nparms
+ 2);
3119 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
3121 gfc_start_block (&block
);
3123 if (clauses
->async_expr
)
3124 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
3126 t
= build_int_cst (integer_type_node
, -2);
3128 args
->quick_push (t
);
3129 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
3131 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3132 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
3134 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
3135 gfc_add_expr_to_block (&block
, stmt
);
3139 return gfc_finish_block (&block
);
3142 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
3143 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
3146 gfc_trans_omp_atomic (gfc_code
*code
)
3148 gfc_code
*atomic_code
= code
;
3152 gfc_expr
*expr2
, *e
;
3155 tree lhsaddr
, type
, rhs
, x
;
3156 enum tree_code op
= ERROR_MARK
;
3157 enum tree_code aop
= OMP_ATOMIC
;
3158 bool var_on_left
= false;
3159 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
3161 code
= code
->block
->next
;
3162 gcc_assert (code
->op
== EXEC_ASSIGN
);
3163 var
= code
->expr1
->symtree
->n
.sym
;
3165 gfc_init_se (&lse
, NULL
);
3166 gfc_init_se (&rse
, NULL
);
3167 gfc_init_se (&vse
, NULL
);
3168 gfc_start_block (&block
);
3170 expr2
= code
->expr2
;
3171 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3172 != GFC_OMP_ATOMIC_WRITE
)
3173 && (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
) == 0
3174 && expr2
->expr_type
== EXPR_FUNCTION
3175 && expr2
->value
.function
.isym
3176 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3177 expr2
= expr2
->value
.function
.actual
->expr
;
3179 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3181 case GFC_OMP_ATOMIC_READ
:
3182 gfc_conv_expr (&vse
, code
->expr1
);
3183 gfc_add_block_to_block (&block
, &vse
.pre
);
3185 gfc_conv_expr (&lse
, expr2
);
3186 gfc_add_block_to_block (&block
, &lse
.pre
);
3187 type
= TREE_TYPE (lse
.expr
);
3188 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3190 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
3191 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3192 x
= convert (TREE_TYPE (vse
.expr
), x
);
3193 gfc_add_modify (&block
, vse
.expr
, x
);
3195 gfc_add_block_to_block (&block
, &lse
.pre
);
3196 gfc_add_block_to_block (&block
, &rse
.pre
);
3198 return gfc_finish_block (&block
);
3199 case GFC_OMP_ATOMIC_CAPTURE
:
3200 aop
= OMP_ATOMIC_CAPTURE_NEW
;
3201 if (expr2
->expr_type
== EXPR_VARIABLE
)
3203 aop
= OMP_ATOMIC_CAPTURE_OLD
;
3204 gfc_conv_expr (&vse
, code
->expr1
);
3205 gfc_add_block_to_block (&block
, &vse
.pre
);
3207 gfc_conv_expr (&lse
, expr2
);
3208 gfc_add_block_to_block (&block
, &lse
.pre
);
3209 gfc_init_se (&lse
, NULL
);
3211 var
= code
->expr1
->symtree
->n
.sym
;
3212 expr2
= code
->expr2
;
3213 if (expr2
->expr_type
== EXPR_FUNCTION
3214 && expr2
->value
.function
.isym
3215 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3216 expr2
= expr2
->value
.function
.actual
->expr
;
3223 gfc_conv_expr (&lse
, code
->expr1
);
3224 gfc_add_block_to_block (&block
, &lse
.pre
);
3225 type
= TREE_TYPE (lse
.expr
);
3226 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3228 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3229 == GFC_OMP_ATOMIC_WRITE
)
3230 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3232 gfc_conv_expr (&rse
, expr2
);
3233 gfc_add_block_to_block (&block
, &rse
.pre
);
3235 else if (expr2
->expr_type
== EXPR_OP
)
3238 switch (expr2
->value
.op
.op
)
3240 case INTRINSIC_PLUS
:
3243 case INTRINSIC_TIMES
:
3246 case INTRINSIC_MINUS
:
3249 case INTRINSIC_DIVIDE
:
3250 if (expr2
->ts
.type
== BT_INTEGER
)
3251 op
= TRUNC_DIV_EXPR
;
3256 op
= TRUTH_ANDIF_EXPR
;
3259 op
= TRUTH_ORIF_EXPR
;
3264 case INTRINSIC_NEQV
:
3270 e
= expr2
->value
.op
.op1
;
3271 if (e
->expr_type
== EXPR_FUNCTION
3272 && e
->value
.function
.isym
3273 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3274 e
= e
->value
.function
.actual
->expr
;
3275 if (e
->expr_type
== EXPR_VARIABLE
3276 && e
->symtree
!= NULL
3277 && e
->symtree
->n
.sym
== var
)
3279 expr2
= expr2
->value
.op
.op2
;
3284 e
= expr2
->value
.op
.op2
;
3285 if (e
->expr_type
== EXPR_FUNCTION
3286 && e
->value
.function
.isym
3287 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3288 e
= e
->value
.function
.actual
->expr
;
3289 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3290 && e
->symtree
!= NULL
3291 && e
->symtree
->n
.sym
== var
);
3292 expr2
= expr2
->value
.op
.op1
;
3293 var_on_left
= false;
3295 gfc_conv_expr (&rse
, expr2
);
3296 gfc_add_block_to_block (&block
, &rse
.pre
);
3300 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
3301 switch (expr2
->value
.function
.isym
->id
)
3321 e
= expr2
->value
.function
.actual
->expr
;
3322 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3323 && e
->symtree
!= NULL
3324 && e
->symtree
->n
.sym
== var
);
3326 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
3327 gfc_add_block_to_block (&block
, &rse
.pre
);
3328 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
3330 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
3331 gfc_actual_arglist
*arg
;
3333 gfc_add_modify (&block
, accum
, rse
.expr
);
3334 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
3337 gfc_init_block (&rse
.pre
);
3338 gfc_conv_expr (&rse
, arg
->expr
);
3339 gfc_add_block_to_block (&block
, &rse
.pre
);
3340 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
3342 gfc_add_modify (&block
, accum
, x
);
3348 expr2
= expr2
->value
.function
.actual
->next
->expr
;
3351 lhsaddr
= save_expr (lhsaddr
);
3352 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3353 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3354 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3356 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3357 it even after unsharing function body. */
3358 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3359 DECL_CONTEXT (var
) = current_function_decl
;
3360 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3361 NULL_TREE
, NULL_TREE
);
3364 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3366 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3367 == GFC_OMP_ATOMIC_WRITE
)
3368 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3372 x
= convert (TREE_TYPE (rhs
),
3373 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3375 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3377 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3380 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3381 && TREE_CODE (type
) != COMPLEX_TYPE
)
3382 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3383 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3385 gfc_add_block_to_block (&block
, &lse
.pre
);
3386 gfc_add_block_to_block (&block
, &rse
.pre
);
3388 if (aop
== OMP_ATOMIC
)
3390 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3391 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3392 gfc_add_expr_to_block (&block
, x
);
3396 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3399 expr2
= code
->expr2
;
3400 if (expr2
->expr_type
== EXPR_FUNCTION
3401 && expr2
->value
.function
.isym
3402 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3403 expr2
= expr2
->value
.function
.actual
->expr
;
3405 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3406 gfc_conv_expr (&vse
, code
->expr1
);
3407 gfc_add_block_to_block (&block
, &vse
.pre
);
3409 gfc_init_se (&lse
, NULL
);
3410 gfc_conv_expr (&lse
, expr2
);
3411 gfc_add_block_to_block (&block
, &lse
.pre
);
3413 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3414 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3415 x
= convert (TREE_TYPE (vse
.expr
), x
);
3416 gfc_add_modify (&block
, vse
.expr
, x
);
3419 return gfc_finish_block (&block
);
3423 gfc_trans_omp_barrier (void)
3425 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3426 return build_call_expr_loc (input_location
, decl
, 0);
3430 gfc_trans_omp_cancel (gfc_code
*code
)
3433 tree ifc
= boolean_true_node
;
3435 switch (code
->ext
.omp_clauses
->cancel
)
3437 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3438 case OMP_CANCEL_DO
: mask
= 2; break;
3439 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3440 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3441 default: gcc_unreachable ();
3443 gfc_start_block (&block
);
3444 if (code
->ext
.omp_clauses
->if_expr
)
3449 gfc_init_se (&se
, NULL
);
3450 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3451 gfc_add_block_to_block (&block
, &se
.pre
);
3452 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3453 gfc_add_block_to_block (&block
, &se
.post
);
3454 tree type
= TREE_TYPE (if_var
);
3455 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3456 boolean_type_node
, if_var
,
3457 build_zero_cst (type
));
3459 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3460 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3461 ifc
= fold_convert (c_bool_type
, ifc
);
3462 gfc_add_expr_to_block (&block
,
3463 build_call_expr_loc (input_location
, decl
, 2,
3464 build_int_cst (integer_type_node
,
3466 return gfc_finish_block (&block
);
3470 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3473 switch (code
->ext
.omp_clauses
->cancel
)
3475 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3476 case OMP_CANCEL_DO
: mask
= 2; break;
3477 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3478 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3479 default: gcc_unreachable ();
3481 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3482 return build_call_expr_loc (input_location
, decl
, 1,
3483 build_int_cst (integer_type_node
, mask
));
3487 gfc_trans_omp_critical (gfc_code
*code
)
3489 tree name
= NULL_TREE
, stmt
;
3490 if (code
->ext
.omp_clauses
!= NULL
)
3491 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
3492 stmt
= gfc_trans_code (code
->block
->next
);
3493 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3497 typedef struct dovar_init_d
{
3504 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3505 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3508 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
3509 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3512 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3513 int i
, collapse
= clauses
->collapse
;
3514 vec
<dovar_init
> inits
= vNULL
;
3517 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
3518 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
3520 /* Both collapsed and tiled loops are lowered the same way. In
3521 OpenACC, those clauses are not compatible, so prioritize the tile
3522 clause, if present. */
3526 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
3530 doacross_steps
= NULL
;
3531 if (clauses
->orderedc
)
3532 collapse
= clauses
->orderedc
;
3536 code
= code
->block
->next
;
3537 gcc_assert (code
->op
== EXEC_DO
);
3539 init
= make_tree_vec (collapse
);
3540 cond
= make_tree_vec (collapse
);
3541 incr
= make_tree_vec (collapse
);
3542 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
3546 gfc_start_block (&block
);
3550 /* simd schedule modifier is only useful for composite do simd and other
3551 constructs including that, where gfc_trans_omp_do is only called
3552 on the simd construct and DO's clauses are translated elsewhere. */
3553 do_clauses
->sched_simd
= false;
3555 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3557 for (i
= 0; i
< collapse
; i
++)
3560 int dovar_found
= 0;
3565 gfc_omp_namelist
*n
= NULL
;
3566 if (op
!= EXEC_OMP_DISTRIBUTE
)
3567 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3568 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3569 n
!= NULL
; n
= n
->next
)
3570 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3574 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3575 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3576 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3582 /* Evaluate all the expressions in the iterator. */
3583 gfc_init_se (&se
, NULL
);
3584 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3585 gfc_add_block_to_block (pblock
, &se
.pre
);
3587 type
= TREE_TYPE (dovar
);
3588 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3590 gfc_init_se (&se
, NULL
);
3591 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3592 gfc_add_block_to_block (pblock
, &se
.pre
);
3593 from
= gfc_evaluate_now (se
.expr
, pblock
);
3595 gfc_init_se (&se
, NULL
);
3596 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3597 gfc_add_block_to_block (pblock
, &se
.pre
);
3598 to
= gfc_evaluate_now (se
.expr
, pblock
);
3600 gfc_init_se (&se
, NULL
);
3601 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3602 gfc_add_block_to_block (pblock
, &se
.pre
);
3603 step
= gfc_evaluate_now (se
.expr
, pblock
);
3606 /* Special case simple loops. */
3609 if (integer_onep (step
))
3611 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3616 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3622 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3623 /* The condition should not be folded. */
3624 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3625 ? LE_EXPR
: GE_EXPR
,
3626 logical_type_node
, dovar
, to
);
3627 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3629 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3632 TREE_VEC_ELT (incr
, i
));
3636 /* STEP is not 1 or -1. Use:
3637 for (count = 0; count < (to + step - from) / step; count++)
3639 dovar = from + count * step;
3643 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3644 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3645 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3647 tmp
= gfc_evaluate_now (tmp
, pblock
);
3648 count
= gfc_create_var (type
, "count");
3649 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3650 build_int_cst (type
, 0));
3651 /* The condition should not be folded. */
3652 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3655 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3657 build_int_cst (type
, 1));
3658 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3659 MODIFY_EXPR
, type
, count
,
3660 TREE_VEC_ELT (incr
, i
));
3662 /* Initialize DOVAR. */
3663 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3664 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3665 dovar_init e
= {dovar
, tmp
};
3666 inits
.safe_push (e
);
3667 if (clauses
->orderedc
)
3669 if (doacross_steps
== NULL
)
3670 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
);
3671 (*doacross_steps
)[i
] = step
;
3675 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
3677 if (dovar_found
== 2
3678 && op
== EXEC_OMP_SIMD
3682 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3683 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3684 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3686 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3692 if (op
== EXEC_OMP_SIMD
)
3696 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3697 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3698 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3701 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3706 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3707 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3708 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3710 if (dovar_found
== 2)
3717 /* If dovar is lastprivate, but different counter is used,
3718 dovar += step needs to be added to
3719 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3720 will have the value on entry of the last loop, rather
3721 than value after iterator increment. */
3722 if (clauses
->orderedc
)
3724 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
3727 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3728 type
, count
, build_one_cst (type
));
3729 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
3731 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3736 tmp
= gfc_evaluate_now (step
, pblock
);
3737 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3740 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3742 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3743 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3744 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3746 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3749 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3750 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3752 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3756 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3758 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3759 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3760 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3762 tree l
= build_omp_clause (input_location
,
3763 OMP_CLAUSE_LASTPRIVATE
);
3764 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3765 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3766 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3768 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3772 gcc_assert (simple
|| c
!= NULL
);
3776 if (op
!= EXEC_OMP_SIMD
)
3777 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3778 else if (collapse
== 1)
3780 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3781 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3782 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3783 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3786 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3787 OMP_CLAUSE_DECL (tmp
) = count
;
3788 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3791 if (i
+ 1 < collapse
)
3792 code
= code
->block
->next
;
3795 if (pblock
!= &block
)
3798 gfc_start_block (&block
);
3801 gfc_start_block (&body
);
3803 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3804 gfc_add_modify (&body
, di
->var
, di
->init
);
3807 /* Cycle statement is implemented with a goto. Exit statement must not be
3808 present for this loop. */
3809 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3811 /* Put these labels where they can be found later. */
3813 code
->cycle_label
= cycle_label
;
3814 code
->exit_label
= NULL_TREE
;
3816 /* Main loop body. */
3817 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3818 gfc_add_expr_to_block (&body
, tmp
);
3820 /* Label for cycle statements (if needed). */
3821 if (TREE_USED (cycle_label
))
3823 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3824 gfc_add_expr_to_block (&body
, tmp
);
3827 /* End of loop body. */
3830 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3831 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3832 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3833 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
3834 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3835 default: gcc_unreachable ();
3838 TREE_TYPE (stmt
) = void_type_node
;
3839 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3840 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3841 OMP_FOR_INIT (stmt
) = init
;
3842 OMP_FOR_COND (stmt
) = cond
;
3843 OMP_FOR_INCR (stmt
) = incr
;
3845 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
3846 gfc_add_expr_to_block (&block
, stmt
);
3848 vec_free (doacross_steps
);
3849 doacross_steps
= saved_doacross_steps
;
3851 return gfc_finish_block (&block
);
3854 /* parallel loop and kernels loop. */
3856 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3858 stmtblock_t block
, *pblock
= NULL
;
3859 gfc_omp_clauses construct_clauses
, loop_clauses
;
3860 tree stmt
, oacc_clauses
= NULL_TREE
;
3861 enum tree_code construct_code
;
3865 case EXEC_OACC_PARALLEL_LOOP
:
3866 construct_code
= OACC_PARALLEL
;
3868 case EXEC_OACC_KERNELS_LOOP
:
3869 construct_code
= OACC_KERNELS
;
3875 gfc_start_block (&block
);
3877 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3878 if (code
->ext
.omp_clauses
!= NULL
)
3880 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3881 sizeof (construct_clauses
));
3882 loop_clauses
.collapse
= construct_clauses
.collapse
;
3883 loop_clauses
.gang
= construct_clauses
.gang
;
3884 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3885 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3886 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3887 loop_clauses
.vector
= construct_clauses
.vector
;
3888 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3889 loop_clauses
.worker
= construct_clauses
.worker
;
3890 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3891 loop_clauses
.seq
= construct_clauses
.seq
;
3892 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3893 loop_clauses
.independent
= construct_clauses
.independent
;
3894 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3895 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3896 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3897 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3898 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3899 construct_clauses
.gang
= false;
3900 construct_clauses
.gang_static
= false;
3901 construct_clauses
.gang_num_expr
= NULL
;
3902 construct_clauses
.gang_static_expr
= NULL
;
3903 construct_clauses
.vector
= false;
3904 construct_clauses
.vector_expr
= NULL
;
3905 construct_clauses
.worker
= false;
3906 construct_clauses
.worker_expr
= NULL
;
3907 construct_clauses
.seq
= false;
3908 construct_clauses
.par_auto
= false;
3909 construct_clauses
.independent
= false;
3910 construct_clauses
.independent
= false;
3911 construct_clauses
.tile_list
= NULL
;
3912 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3913 if (construct_code
== OACC_KERNELS
)
3914 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3915 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3918 if (!loop_clauses
.seq
)
3922 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3923 if (TREE_CODE (stmt
) != BIND_EXPR
)
3924 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3927 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3929 gfc_add_expr_to_block (&block
, stmt
);
3930 return gfc_finish_block (&block
);
3934 gfc_trans_omp_flush (void)
3936 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3937 return build_call_expr_loc (input_location
, decl
, 0);
3941 gfc_trans_omp_master (gfc_code
*code
)
3943 tree stmt
= gfc_trans_code (code
->block
->next
);
3944 if (IS_EMPTY_STMT (stmt
))
3946 return build1_v (OMP_MASTER
, stmt
);
3950 gfc_trans_omp_ordered (gfc_code
*code
)
3954 if (!code
->ext
.omp_clauses
->simd
)
3955 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
3956 code
->ext
.omp_clauses
->threads
= 0;
3958 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
3960 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3961 code
->block
? gfc_trans_code (code
->block
->next
)
3962 : NULL_TREE
, omp_clauses
);
3966 gfc_trans_omp_parallel (gfc_code
*code
)
3969 tree stmt
, omp_clauses
;
3971 gfc_start_block (&block
);
3972 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3975 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3976 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3977 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3979 gfc_add_expr_to_block (&block
, stmt
);
3980 return gfc_finish_block (&block
);
3987 GFC_OMP_SPLIT_PARALLEL
,
3988 GFC_OMP_SPLIT_DISTRIBUTE
,
3989 GFC_OMP_SPLIT_TEAMS
,
3990 GFC_OMP_SPLIT_TARGET
,
3991 GFC_OMP_SPLIT_TASKLOOP
,
3997 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3998 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3999 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
4000 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
4001 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
4002 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
4003 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
)
4007 gfc_split_omp_clauses (gfc_code
*code
,
4008 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
4010 int mask
= 0, innermost
= 0;
4011 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
4014 case EXEC_OMP_DISTRIBUTE
:
4015 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4017 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4018 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4019 innermost
= GFC_OMP_SPLIT_DO
;
4021 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4022 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
4023 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4024 innermost
= GFC_OMP_SPLIT_SIMD
;
4026 case EXEC_OMP_DISTRIBUTE_SIMD
:
4027 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4028 innermost
= GFC_OMP_SPLIT_SIMD
;
4031 innermost
= GFC_OMP_SPLIT_DO
;
4033 case EXEC_OMP_DO_SIMD
:
4034 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4035 innermost
= GFC_OMP_SPLIT_SIMD
;
4037 case EXEC_OMP_PARALLEL
:
4038 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4040 case EXEC_OMP_PARALLEL_DO
:
4041 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4042 innermost
= GFC_OMP_SPLIT_DO
;
4044 case EXEC_OMP_PARALLEL_DO_SIMD
:
4045 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4046 innermost
= GFC_OMP_SPLIT_SIMD
;
4049 innermost
= GFC_OMP_SPLIT_SIMD
;
4051 case EXEC_OMP_TARGET
:
4052 innermost
= GFC_OMP_SPLIT_TARGET
;
4054 case EXEC_OMP_TARGET_PARALLEL
:
4055 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
4056 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4058 case EXEC_OMP_TARGET_PARALLEL_DO
:
4059 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4060 innermost
= GFC_OMP_SPLIT_DO
;
4062 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4063 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
4064 | GFC_OMP_MASK_SIMD
;
4065 innermost
= GFC_OMP_SPLIT_SIMD
;
4067 case EXEC_OMP_TARGET_SIMD
:
4068 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
4069 innermost
= GFC_OMP_SPLIT_SIMD
;
4071 case EXEC_OMP_TARGET_TEAMS
:
4072 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
4073 innermost
= GFC_OMP_SPLIT_TEAMS
;
4075 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4076 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4077 | GFC_OMP_MASK_DISTRIBUTE
;
4078 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4080 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4081 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4082 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4083 innermost
= GFC_OMP_SPLIT_DO
;
4085 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4086 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4087 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4088 innermost
= GFC_OMP_SPLIT_SIMD
;
4090 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4091 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4092 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4093 innermost
= GFC_OMP_SPLIT_SIMD
;
4095 case EXEC_OMP_TASKLOOP
:
4096 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
4098 case EXEC_OMP_TASKLOOP_SIMD
:
4099 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
4100 innermost
= GFC_OMP_SPLIT_SIMD
;
4102 case EXEC_OMP_TEAMS
:
4103 innermost
= GFC_OMP_SPLIT_TEAMS
;
4105 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4106 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
4107 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4109 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4110 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4111 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4112 innermost
= GFC_OMP_SPLIT_DO
;
4114 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4115 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4116 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4117 innermost
= GFC_OMP_SPLIT_SIMD
;
4119 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4120 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4121 innermost
= GFC_OMP_SPLIT_SIMD
;
4128 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
4131 if (code
->ext
.omp_clauses
!= NULL
)
4133 if (mask
& GFC_OMP_MASK_TARGET
)
4135 /* First the clauses that are unique to some constructs. */
4136 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
4137 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
4138 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
4139 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
4140 clausesa
[GFC_OMP_SPLIT_TARGET
].device
4141 = code
->ext
.omp_clauses
->device
;
4142 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
4143 = code
->ext
.omp_clauses
->defaultmap
;
4144 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
4145 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
4146 /* And this is copied to all. */
4147 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4148 = code
->ext
.omp_clauses
->if_expr
;
4150 if (mask
& GFC_OMP_MASK_TEAMS
)
4152 /* First the clauses that are unique to some constructs. */
4153 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4154 = code
->ext
.omp_clauses
->num_teams
;
4155 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
4156 = code
->ext
.omp_clauses
->thread_limit
;
4157 /* Shared and default clauses are allowed on parallel, teams
4159 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
4160 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4161 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
4162 = code
->ext
.omp_clauses
->default_sharing
;
4164 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4166 /* First the clauses that are unique to some constructs. */
4167 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
4168 = code
->ext
.omp_clauses
->dist_sched_kind
;
4169 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
4170 = code
->ext
.omp_clauses
->dist_chunk_size
;
4171 /* Duplicate collapse. */
4172 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
4173 = code
->ext
.omp_clauses
->collapse
;
4175 if (mask
& GFC_OMP_MASK_PARALLEL
)
4177 /* First the clauses that are unique to some constructs. */
4178 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
4179 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
4180 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
4181 = code
->ext
.omp_clauses
->num_threads
;
4182 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
4183 = code
->ext
.omp_clauses
->proc_bind
;
4184 /* Shared and default clauses are allowed on parallel, teams
4186 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
4187 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4188 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
4189 = code
->ext
.omp_clauses
->default_sharing
;
4190 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
4191 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
4192 /* And this is copied to all. */
4193 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4194 = code
->ext
.omp_clauses
->if_expr
;
4196 if (mask
& GFC_OMP_MASK_DO
)
4198 /* First the clauses that are unique to some constructs. */
4199 clausesa
[GFC_OMP_SPLIT_DO
].ordered
4200 = code
->ext
.omp_clauses
->ordered
;
4201 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
4202 = code
->ext
.omp_clauses
->orderedc
;
4203 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
4204 = code
->ext
.omp_clauses
->sched_kind
;
4205 if (innermost
== GFC_OMP_SPLIT_SIMD
)
4206 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
4207 = code
->ext
.omp_clauses
->sched_simd
;
4208 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
4209 = code
->ext
.omp_clauses
->sched_monotonic
;
4210 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
4211 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
4212 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
4213 = code
->ext
.omp_clauses
->chunk_size
;
4214 clausesa
[GFC_OMP_SPLIT_DO
].nowait
4215 = code
->ext
.omp_clauses
->nowait
;
4216 /* Duplicate collapse. */
4217 clausesa
[GFC_OMP_SPLIT_DO
].collapse
4218 = code
->ext
.omp_clauses
->collapse
;
4220 if (mask
& GFC_OMP_MASK_SIMD
)
4222 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
4223 = code
->ext
.omp_clauses
->safelen_expr
;
4224 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
4225 = code
->ext
.omp_clauses
->simdlen_expr
;
4226 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
4227 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
4228 /* Duplicate collapse. */
4229 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
4230 = code
->ext
.omp_clauses
->collapse
;
4232 if (mask
& GFC_OMP_MASK_TASKLOOP
)
4234 /* First the clauses that are unique to some constructs. */
4235 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
4236 = code
->ext
.omp_clauses
->nogroup
;
4237 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
4238 = code
->ext
.omp_clauses
->grainsize
;
4239 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
4240 = code
->ext
.omp_clauses
->num_tasks
;
4241 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
4242 = code
->ext
.omp_clauses
->priority
;
4243 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
4244 = code
->ext
.omp_clauses
->final_expr
;
4245 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
4246 = code
->ext
.omp_clauses
->untied
;
4247 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
4248 = code
->ext
.omp_clauses
->mergeable
;
4249 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
4250 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
4251 /* And this is copied to all. */
4252 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
4253 = code
->ext
.omp_clauses
->if_expr
;
4254 /* Shared and default clauses are allowed on parallel, teams
4256 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
4257 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4258 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
4259 = code
->ext
.omp_clauses
->default_sharing
;
4260 /* Duplicate collapse. */
4261 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
4262 = code
->ext
.omp_clauses
->collapse
;
4264 /* Private clause is supported on all constructs,
4265 it is enough to put it on the innermost one. For
4266 !$ omp parallel do put it on parallel though,
4267 as that's what we did for OpenMP 3.1. */
4268 clausesa
[innermost
== GFC_OMP_SPLIT_DO
4269 ? (int) GFC_OMP_SPLIT_PARALLEL
4270 : innermost
].lists
[OMP_LIST_PRIVATE
]
4271 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
4272 /* Firstprivate clause is supported on all constructs but
4273 simd. Put it on the outermost of those and duplicate
4274 on parallel and teams. */
4275 if (mask
& GFC_OMP_MASK_TARGET
)
4276 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
4277 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4278 if (mask
& GFC_OMP_MASK_TEAMS
)
4279 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
4280 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4281 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4282 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
4283 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4284 if (mask
& GFC_OMP_MASK_PARALLEL
)
4285 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
4286 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4287 else if (mask
& GFC_OMP_MASK_DO
)
4288 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
4289 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4290 /* Lastprivate is allowed on distribute, do and simd.
4291 In parallel do{, simd} we actually want to put it on
4292 parallel rather than do. */
4293 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4294 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
4295 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4296 if (mask
& GFC_OMP_MASK_PARALLEL
)
4297 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
4298 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4299 else if (mask
& GFC_OMP_MASK_DO
)
4300 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
4301 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4302 if (mask
& GFC_OMP_MASK_SIMD
)
4303 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
4304 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4305 /* Reduction is allowed on simd, do, parallel and teams.
4306 Duplicate it on all of them, but omit on do if
4307 parallel is present. */
4308 if (mask
& GFC_OMP_MASK_TEAMS
)
4309 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
4310 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4311 if (mask
& GFC_OMP_MASK_PARALLEL
)
4312 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
4313 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4314 else if (mask
& GFC_OMP_MASK_DO
)
4315 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
4316 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4317 if (mask
& GFC_OMP_MASK_SIMD
)
4318 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
4319 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4320 /* Linear clause is supported on do and simd,
4321 put it on the innermost one. */
4322 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
4323 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
4325 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4326 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4327 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
4331 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4332 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
4335 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4336 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
4339 gfc_start_block (&block
);
4341 gfc_init_block (&block
);
4343 if (clausesa
== NULL
)
4345 clausesa
= clausesa_buf
;
4346 gfc_split_omp_clauses (code
, clausesa
);
4350 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
4351 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
4352 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
4355 if (TREE_CODE (body
) != BIND_EXPR
)
4356 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
4360 else if (TREE_CODE (body
) != BIND_EXPR
)
4361 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
4364 stmt
= make_node (OMP_FOR
);
4365 TREE_TYPE (stmt
) = void_type_node
;
4366 OMP_FOR_BODY (stmt
) = body
;
4367 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
4371 gfc_add_expr_to_block (&block
, stmt
);
4372 return gfc_finish_block (&block
);
4376 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
4377 gfc_omp_clauses
*clausesa
)
4379 stmtblock_t block
, *new_pblock
= pblock
;
4380 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4381 tree stmt
, omp_clauses
= NULL_TREE
;
4384 gfc_start_block (&block
);
4386 gfc_init_block (&block
);
4388 if (clausesa
== NULL
)
4390 clausesa
= clausesa_buf
;
4391 gfc_split_omp_clauses (code
, clausesa
);
4394 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4398 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
4399 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
4400 new_pblock
= &block
;
4404 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
4405 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
4408 if (TREE_CODE (stmt
) != BIND_EXPR
)
4409 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4413 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4414 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4415 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4417 OMP_PARALLEL_COMBINED (stmt
) = 1;
4418 gfc_add_expr_to_block (&block
, stmt
);
4419 return gfc_finish_block (&block
);
4423 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4424 gfc_omp_clauses
*clausesa
)
4427 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4428 tree stmt
, omp_clauses
= NULL_TREE
;
4431 gfc_start_block (&block
);
4433 gfc_init_block (&block
);
4435 if (clausesa
== NULL
)
4437 clausesa
= clausesa_buf
;
4438 gfc_split_omp_clauses (code
, clausesa
);
4442 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4446 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
4449 if (TREE_CODE (stmt
) != BIND_EXPR
)
4450 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4454 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4455 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4458 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4460 OMP_PARALLEL_COMBINED (stmt
) = 1;
4462 gfc_add_expr_to_block (&block
, stmt
);
4463 return gfc_finish_block (&block
);
4467 gfc_trans_omp_parallel_sections (gfc_code
*code
)
4470 gfc_omp_clauses section_clauses
;
4471 tree stmt
, omp_clauses
;
4473 memset (§ion_clauses
, 0, sizeof (section_clauses
));
4474 section_clauses
.nowait
= true;
4476 gfc_start_block (&block
);
4477 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4480 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
4481 if (TREE_CODE (stmt
) != BIND_EXPR
)
4482 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4485 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4487 OMP_PARALLEL_COMBINED (stmt
) = 1;
4488 gfc_add_expr_to_block (&block
, stmt
);
4489 return gfc_finish_block (&block
);
4493 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
4496 gfc_omp_clauses workshare_clauses
;
4497 tree stmt
, omp_clauses
;
4499 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
4500 workshare_clauses
.nowait
= true;
4502 gfc_start_block (&block
);
4503 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4506 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4507 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4508 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4510 OMP_PARALLEL_COMBINED (stmt
) = 1;
4511 gfc_add_expr_to_block (&block
, stmt
);
4512 return gfc_finish_block (&block
);
4516 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4518 stmtblock_t block
, body
;
4519 tree omp_clauses
, stmt
;
4520 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4522 gfc_start_block (&block
);
4524 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4526 gfc_init_block (&body
);
4527 for (code
= code
->block
; code
; code
= code
->block
)
4529 /* Last section is special because of lastprivate, so even if it
4530 is empty, chain it in. */
4531 stmt
= gfc_trans_omp_code (code
->next
,
4532 has_lastprivate
&& code
->block
== NULL
);
4533 if (! IS_EMPTY_STMT (stmt
))
4535 stmt
= build1_v (OMP_SECTION
, stmt
);
4536 gfc_add_expr_to_block (&body
, stmt
);
4539 stmt
= gfc_finish_block (&body
);
4541 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4543 gfc_add_expr_to_block (&block
, stmt
);
4545 return gfc_finish_block (&block
);
4549 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4551 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4552 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4553 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4559 gfc_trans_omp_task (gfc_code
*code
)
4562 tree stmt
, omp_clauses
;
4564 gfc_start_block (&block
);
4565 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4568 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4569 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4570 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4572 gfc_add_expr_to_block (&block
, stmt
);
4573 return gfc_finish_block (&block
);
4577 gfc_trans_omp_taskgroup (gfc_code
*code
)
4579 tree stmt
= gfc_trans_code (code
->block
->next
);
4580 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4584 gfc_trans_omp_taskwait (void)
4586 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4587 return build_call_expr_loc (input_location
, decl
, 0);
4591 gfc_trans_omp_taskyield (void)
4593 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4594 return build_call_expr_loc (input_location
, decl
, 0);
4598 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4601 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4602 tree stmt
, omp_clauses
= NULL_TREE
;
4604 gfc_start_block (&block
);
4605 if (clausesa
== NULL
)
4607 clausesa
= clausesa_buf
;
4608 gfc_split_omp_clauses (code
, clausesa
);
4612 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4616 case EXEC_OMP_DISTRIBUTE
:
4617 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4618 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4619 /* This is handled in gfc_trans_omp_do. */
4622 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4623 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4624 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4625 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4626 if (TREE_CODE (stmt
) != BIND_EXPR
)
4627 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4631 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4632 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4633 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4634 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4635 if (TREE_CODE (stmt
) != BIND_EXPR
)
4636 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4640 case EXEC_OMP_DISTRIBUTE_SIMD
:
4641 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4642 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4643 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4644 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4645 if (TREE_CODE (stmt
) != BIND_EXPR
)
4646 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4655 tree distribute
= make_node (OMP_DISTRIBUTE
);
4656 TREE_TYPE (distribute
) = void_type_node
;
4657 OMP_FOR_BODY (distribute
) = stmt
;
4658 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4661 gfc_add_expr_to_block (&block
, stmt
);
4662 return gfc_finish_block (&block
);
4666 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
4670 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4672 bool combined
= true;
4674 gfc_start_block (&block
);
4675 if (clausesa
== NULL
)
4677 clausesa
= clausesa_buf
;
4678 gfc_split_omp_clauses (code
, clausesa
);
4682 = chainon (omp_clauses
,
4683 gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4687 case EXEC_OMP_TARGET_TEAMS
:
4688 case EXEC_OMP_TEAMS
:
4689 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4692 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4693 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4694 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4695 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4699 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4704 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4707 OMP_TEAMS_COMBINED (stmt
) = 1;
4709 gfc_add_expr_to_block (&block
, stmt
);
4710 return gfc_finish_block (&block
);
4714 gfc_trans_omp_target (gfc_code
*code
)
4717 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4718 tree stmt
, omp_clauses
= NULL_TREE
;
4720 gfc_start_block (&block
);
4721 gfc_split_omp_clauses (code
, clausesa
);
4724 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4728 case EXEC_OMP_TARGET
:
4730 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4731 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4733 case EXEC_OMP_TARGET_PARALLEL
:
4737 gfc_start_block (&iblock
);
4739 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4741 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4742 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4744 gfc_add_expr_to_block (&iblock
, stmt
);
4745 stmt
= gfc_finish_block (&iblock
);
4746 if (TREE_CODE (stmt
) != BIND_EXPR
)
4747 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4752 case EXEC_OMP_TARGET_PARALLEL_DO
:
4753 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4754 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4755 if (TREE_CODE (stmt
) != BIND_EXPR
)
4756 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4760 case EXEC_OMP_TARGET_SIMD
:
4761 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4762 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4763 if (TREE_CODE (stmt
) != BIND_EXPR
)
4764 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4770 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4771 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
4773 gfc_omp_clauses clausesb
;
4775 /* For combined !$omp target teams, the num_teams and
4776 thread_limit clauses are evaluated before entering the
4777 target construct. */
4778 memset (&clausesb
, '\0', sizeof (clausesb
));
4779 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
4780 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
4781 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
4782 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
4784 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
4786 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
4791 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
4793 if (TREE_CODE (stmt
) != BIND_EXPR
)
4794 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4801 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4803 if (code
->op
!= EXEC_OMP_TARGET
)
4804 OMP_TARGET_COMBINED (stmt
) = 1;
4806 gfc_add_expr_to_block (&block
, stmt
);
4807 return gfc_finish_block (&block
);
4811 gfc_trans_omp_taskloop (gfc_code
*code
)
4814 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4815 tree stmt
, omp_clauses
= NULL_TREE
;
4817 gfc_start_block (&block
);
4818 gfc_split_omp_clauses (code
, clausesa
);
4821 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
4825 case EXEC_OMP_TASKLOOP
:
4826 /* This is handled in gfc_trans_omp_do. */
4829 case EXEC_OMP_TASKLOOP_SIMD
:
4830 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4831 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4832 if (TREE_CODE (stmt
) != BIND_EXPR
)
4833 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4842 tree taskloop
= make_node (OMP_TASKLOOP
);
4843 TREE_TYPE (taskloop
) = void_type_node
;
4844 OMP_FOR_BODY (taskloop
) = stmt
;
4845 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
4848 gfc_add_expr_to_block (&block
, stmt
);
4849 return gfc_finish_block (&block
);
4853 gfc_trans_omp_target_data (gfc_code
*code
)
4856 tree stmt
, omp_clauses
;
4858 gfc_start_block (&block
);
4859 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4861 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4862 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4864 gfc_add_expr_to_block (&block
, stmt
);
4865 return gfc_finish_block (&block
);
4869 gfc_trans_omp_target_enter_data (gfc_code
*code
)
4872 tree stmt
, omp_clauses
;
4874 gfc_start_block (&block
);
4875 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4877 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
4879 gfc_add_expr_to_block (&block
, stmt
);
4880 return gfc_finish_block (&block
);
4884 gfc_trans_omp_target_exit_data (gfc_code
*code
)
4887 tree stmt
, omp_clauses
;
4889 gfc_start_block (&block
);
4890 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4892 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
4894 gfc_add_expr_to_block (&block
, stmt
);
4895 return gfc_finish_block (&block
);
4899 gfc_trans_omp_target_update (gfc_code
*code
)
4902 tree stmt
, omp_clauses
;
4904 gfc_start_block (&block
);
4905 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4907 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4909 gfc_add_expr_to_block (&block
, stmt
);
4910 return gfc_finish_block (&block
);
4914 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4916 tree res
, tmp
, stmt
;
4917 stmtblock_t block
, *pblock
= NULL
;
4918 stmtblock_t singleblock
;
4919 int saved_ompws_flags
;
4920 bool singleblock_in_progress
= false;
4921 /* True if previous gfc_code in workshare construct is not workshared. */
4922 bool prev_singleunit
;
4924 code
= code
->block
->next
;
4928 gfc_start_block (&block
);
4931 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4932 prev_singleunit
= false;
4934 /* Translate statements one by one to trees until we reach
4935 the end of the workshare construct. Adjacent gfc_codes that
4936 are a single unit of work are clustered and encapsulated in a
4937 single OMP_SINGLE construct. */
4938 for (; code
; code
= code
->next
)
4940 if (code
->here
!= 0)
4942 res
= gfc_trans_label_here (code
);
4943 gfc_add_expr_to_block (pblock
, res
);
4946 /* No dependence analysis, use for clauses with wait.
4947 If this is the last gfc_code, use default omp_clauses. */
4948 if (code
->next
== NULL
&& clauses
->nowait
)
4949 ompws_flags
|= OMPWS_NOWAIT
;
4951 /* By default, every gfc_code is a single unit of work. */
4952 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4953 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4962 res
= gfc_trans_assign (code
);
4965 case EXEC_POINTER_ASSIGN
:
4966 res
= gfc_trans_pointer_assign (code
);
4969 case EXEC_INIT_ASSIGN
:
4970 res
= gfc_trans_init_assign (code
);
4974 res
= gfc_trans_forall (code
);
4978 res
= gfc_trans_where (code
);
4981 case EXEC_OMP_ATOMIC
:
4982 res
= gfc_trans_omp_directive (code
);
4985 case EXEC_OMP_PARALLEL
:
4986 case EXEC_OMP_PARALLEL_DO
:
4987 case EXEC_OMP_PARALLEL_SECTIONS
:
4988 case EXEC_OMP_PARALLEL_WORKSHARE
:
4989 case EXEC_OMP_CRITICAL
:
4990 saved_ompws_flags
= ompws_flags
;
4992 res
= gfc_trans_omp_directive (code
);
4993 ompws_flags
= saved_ompws_flags
;
4997 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
5000 gfc_set_backend_locus (&code
->loc
);
5002 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
5004 if (prev_singleunit
)
5006 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5007 /* Add current gfc_code to single block. */
5008 gfc_add_expr_to_block (&singleblock
, res
);
5011 /* Finish single block and add it to pblock. */
5012 tmp
= gfc_finish_block (&singleblock
);
5013 tmp
= build2_loc (input_location
, OMP_SINGLE
,
5014 void_type_node
, tmp
, NULL_TREE
);
5015 gfc_add_expr_to_block (pblock
, tmp
);
5016 /* Add current gfc_code to pblock. */
5017 gfc_add_expr_to_block (pblock
, res
);
5018 singleblock_in_progress
= false;
5023 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5025 /* Start single block. */
5026 gfc_init_block (&singleblock
);
5027 gfc_add_expr_to_block (&singleblock
, res
);
5028 singleblock_in_progress
= true;
5031 /* Add the new statement to the block. */
5032 gfc_add_expr_to_block (pblock
, res
);
5034 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
5038 /* Finish remaining SINGLE block, if we were in the middle of one. */
5039 if (singleblock_in_progress
)
5041 /* Finish single block and add it to pblock. */
5042 tmp
= gfc_finish_block (&singleblock
);
5043 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
5045 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
5047 gfc_add_expr_to_block (pblock
, tmp
);
5050 stmt
= gfc_finish_block (pblock
);
5051 if (TREE_CODE (stmt
) != BIND_EXPR
)
5053 if (!IS_EMPTY_STMT (stmt
))
5055 tree bindblock
= poplevel (1, 0);
5056 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
5064 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
5065 stmt
= gfc_trans_omp_barrier ();
5072 gfc_trans_oacc_declare (gfc_code
*code
)
5075 tree stmt
, oacc_clauses
;
5076 enum tree_code construct_code
;
5078 construct_code
= OACC_DATA
;
5080 gfc_start_block (&block
);
5082 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
5084 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5085 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
5087 gfc_add_expr_to_block (&block
, stmt
);
5089 return gfc_finish_block (&block
);
5093 gfc_trans_oacc_directive (gfc_code
*code
)
5097 case EXEC_OACC_PARALLEL_LOOP
:
5098 case EXEC_OACC_KERNELS_LOOP
:
5099 return gfc_trans_oacc_combined_directive (code
);
5100 case EXEC_OACC_PARALLEL
:
5101 case EXEC_OACC_KERNELS
:
5102 case EXEC_OACC_DATA
:
5103 case EXEC_OACC_HOST_DATA
:
5104 return gfc_trans_oacc_construct (code
);
5105 case EXEC_OACC_LOOP
:
5106 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5108 case EXEC_OACC_UPDATE
:
5109 case EXEC_OACC_CACHE
:
5110 case EXEC_OACC_ENTER_DATA
:
5111 case EXEC_OACC_EXIT_DATA
:
5112 return gfc_trans_oacc_executable_directive (code
);
5113 case EXEC_OACC_WAIT
:
5114 return gfc_trans_oacc_wait_directive (code
);
5115 case EXEC_OACC_ATOMIC
:
5116 return gfc_trans_omp_atomic (code
);
5117 case EXEC_OACC_DECLARE
:
5118 return gfc_trans_oacc_declare (code
);
5125 gfc_trans_omp_directive (gfc_code
*code
)
5129 case EXEC_OMP_ATOMIC
:
5130 return gfc_trans_omp_atomic (code
);
5131 case EXEC_OMP_BARRIER
:
5132 return gfc_trans_omp_barrier ();
5133 case EXEC_OMP_CANCEL
:
5134 return gfc_trans_omp_cancel (code
);
5135 case EXEC_OMP_CANCELLATION_POINT
:
5136 return gfc_trans_omp_cancellation_point (code
);
5137 case EXEC_OMP_CRITICAL
:
5138 return gfc_trans_omp_critical (code
);
5139 case EXEC_OMP_DISTRIBUTE
:
5142 case EXEC_OMP_TASKLOOP
:
5143 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5145 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5146 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5147 case EXEC_OMP_DISTRIBUTE_SIMD
:
5148 return gfc_trans_omp_distribute (code
, NULL
);
5149 case EXEC_OMP_DO_SIMD
:
5150 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
5151 case EXEC_OMP_FLUSH
:
5152 return gfc_trans_omp_flush ();
5153 case EXEC_OMP_MASTER
:
5154 return gfc_trans_omp_master (code
);
5155 case EXEC_OMP_ORDERED
:
5156 return gfc_trans_omp_ordered (code
);
5157 case EXEC_OMP_PARALLEL
:
5158 return gfc_trans_omp_parallel (code
);
5159 case EXEC_OMP_PARALLEL_DO
:
5160 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
5161 case EXEC_OMP_PARALLEL_DO_SIMD
:
5162 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
5163 case EXEC_OMP_PARALLEL_SECTIONS
:
5164 return gfc_trans_omp_parallel_sections (code
);
5165 case EXEC_OMP_PARALLEL_WORKSHARE
:
5166 return gfc_trans_omp_parallel_workshare (code
);
5167 case EXEC_OMP_SECTIONS
:
5168 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
5169 case EXEC_OMP_SINGLE
:
5170 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
5171 case EXEC_OMP_TARGET
:
5172 case EXEC_OMP_TARGET_PARALLEL
:
5173 case EXEC_OMP_TARGET_PARALLEL_DO
:
5174 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5175 case EXEC_OMP_TARGET_SIMD
:
5176 case EXEC_OMP_TARGET_TEAMS
:
5177 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5178 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5179 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5180 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5181 return gfc_trans_omp_target (code
);
5182 case EXEC_OMP_TARGET_DATA
:
5183 return gfc_trans_omp_target_data (code
);
5184 case EXEC_OMP_TARGET_ENTER_DATA
:
5185 return gfc_trans_omp_target_enter_data (code
);
5186 case EXEC_OMP_TARGET_EXIT_DATA
:
5187 return gfc_trans_omp_target_exit_data (code
);
5188 case EXEC_OMP_TARGET_UPDATE
:
5189 return gfc_trans_omp_target_update (code
);
5191 return gfc_trans_omp_task (code
);
5192 case EXEC_OMP_TASKGROUP
:
5193 return gfc_trans_omp_taskgroup (code
);
5194 case EXEC_OMP_TASKLOOP_SIMD
:
5195 return gfc_trans_omp_taskloop (code
);
5196 case EXEC_OMP_TASKWAIT
:
5197 return gfc_trans_omp_taskwait ();
5198 case EXEC_OMP_TASKYIELD
:
5199 return gfc_trans_omp_taskyield ();
5200 case EXEC_OMP_TEAMS
:
5201 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5202 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5203 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5204 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5205 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
5206 case EXEC_OMP_WORKSHARE
:
5207 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
5214 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
5219 gfc_omp_declare_simd
*ods
;
5220 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
5222 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
5223 tree fndecl
= ns
->proc_name
->backend_decl
;
5225 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
5226 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
5227 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
5228 DECL_ATTRIBUTES (fndecl
) = c
;