1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "gomp-constants.h"
39 #include "omp-general.h"
42 #define GCC_DIAG_STYLE __gcc_tdiag__
43 #include "diagnostic-core.h"
45 #define GCC_DIAG_STYLE __gcc_gfc__
49 /* True if OpenMP should privatize what this DECL points to rather
50 than the DECL itself. */
53 gfc_omp_privatize_by_reference (const_tree decl
)
55 tree type
= TREE_TYPE (decl
);
57 if (TREE_CODE (type
) == REFERENCE_TYPE
58 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
61 if (TREE_CODE (type
) == POINTER_TYPE
)
63 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
64 that have POINTER_TYPE type and aren't scalar pointers, scalar
65 allocatables, Cray pointees or C pointers are supposed to be
66 privatized by reference. */
67 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
68 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
69 || GFC_DECL_CRAY_POINTEE (decl
)
70 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
71 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
74 if (!DECL_ARTIFICIAL (decl
)
75 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
78 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
80 if (DECL_LANG_SPECIFIC (decl
)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
88 /* True if OpenMP sharing attribute of DECL is predetermined. */
90 enum omp_clause_default_kind
91 gfc_omp_predetermined_sharing (tree decl
)
93 /* Associate names preserve the association established during ASSOCIATE.
94 As they are implemented either as pointers to the selector or array
95 descriptor and shouldn't really change in the ASSOCIATE region,
96 this decl can be either shared or firstprivate. If it is a pointer,
97 use firstprivate, as it is cheaper that way, otherwise make it shared. */
98 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
100 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
101 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
103 return OMP_CLAUSE_DEFAULT_SHARED
;
106 if (DECL_ARTIFICIAL (decl
)
107 && ! GFC_DECL_RESULT (decl
)
108 && ! (DECL_LANG_SPECIFIC (decl
)
109 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
110 return OMP_CLAUSE_DEFAULT_SHARED
;
112 /* Cray pointees shouldn't be listed in any clauses and should be
113 gimplified to dereference of the corresponding Cray pointer.
114 Make them all private, so that they are emitted in the debug
116 if (GFC_DECL_CRAY_POINTEE (decl
))
117 return OMP_CLAUSE_DEFAULT_PRIVATE
;
119 /* Assumed-size arrays are predetermined shared. */
120 if (TREE_CODE (decl
) == PARM_DECL
121 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
122 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
123 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
124 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
126 return OMP_CLAUSE_DEFAULT_SHARED
;
128 /* Dummy procedures aren't considered variables by OpenMP, thus are
129 disallowed in OpenMP clauses. They are represented as PARM_DECLs
130 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
131 to avoid complaining about their uses with default(none). */
132 if (TREE_CODE (decl
) == PARM_DECL
133 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
134 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
135 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
137 /* COMMON and EQUIVALENCE decls are shared. They
138 are only referenced through DECL_VALUE_EXPR of the variables
139 contained in them. If those are privatized, they will not be
140 gimplified to the COMMON or EQUIVALENCE decls. */
141 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
142 return OMP_CLAUSE_DEFAULT_SHARED
;
144 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
145 return OMP_CLAUSE_DEFAULT_SHARED
;
147 /* These are either array or derived parameters, or vtables.
148 In the former cases, the OpenMP standard doesn't consider them to be
149 variables at all (they can't be redefined), but they can nevertheless appear
150 in parallel/task regions and for default(none) purposes treat them as shared.
151 For vtables likely the same handling is desirable. */
152 if (VAR_P (decl
) && TREE_READONLY (decl
) && TREE_STATIC (decl
))
153 return OMP_CLAUSE_DEFAULT_SHARED
;
155 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
158 /* Return decl that should be used when reporting DEFAULT(NONE)
162 gfc_omp_report_decl (tree decl
)
164 if (DECL_ARTIFICIAL (decl
)
165 && DECL_LANG_SPECIFIC (decl
)
166 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
167 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
172 /* Return true if TYPE has any allocatable components. */
175 gfc_has_alloc_comps (tree type
, tree decl
)
179 if (POINTER_TYPE_P (type
))
181 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
182 type
= TREE_TYPE (type
);
183 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
187 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
188 type
= gfc_get_element_type (type
);
190 if (TREE_CODE (type
) != RECORD_TYPE
)
193 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
195 ftype
= TREE_TYPE (field
);
196 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
198 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
199 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
201 if (gfc_has_alloc_comps (ftype
, field
))
207 /* Return true if DECL in private clause needs
208 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
210 gfc_omp_private_outer_ref (tree decl
)
212 tree type
= TREE_TYPE (decl
);
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (GFC_DESCRIPTOR_TYPE_P (type
)
218 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
221 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
224 if (gfc_has_alloc_comps (type
, decl
))
230 /* Callback for gfc_omp_unshare_expr. */
233 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
236 enum tree_code code
= TREE_CODE (t
);
238 /* Stop at types, decls, constants like copy_tree_r. */
239 if (TREE_CODE_CLASS (code
) == tcc_type
240 || TREE_CODE_CLASS (code
) == tcc_declaration
241 || TREE_CODE_CLASS (code
) == tcc_constant
244 else if (handled_component_p (t
)
245 || TREE_CODE (t
) == MEM_REF
)
247 *tp
= unshare_expr (t
);
254 /* Unshare in expr anything that the FE which normally doesn't
255 care much about tree sharing (because during gimplification
256 everything is unshared) could cause problems with tree sharing
257 at omp-low.c time. */
260 gfc_omp_unshare_expr (tree expr
)
262 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
266 enum walk_alloc_comps
268 WALK_ALLOC_COMPS_DTOR
,
269 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
270 WALK_ALLOC_COMPS_COPY_CTOR
273 /* Handle allocatable components in OpenMP clauses. */
276 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
277 enum walk_alloc_comps kind
)
279 stmtblock_t block
, tmpblock
;
280 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
281 gfc_init_block (&block
);
283 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
285 if (GFC_DESCRIPTOR_TYPE_P (type
))
287 gfc_init_block (&tmpblock
);
288 tem
= gfc_full_array_size (&tmpblock
, decl
,
289 GFC_TYPE_ARRAY_RANK (type
));
290 then_b
= gfc_finish_block (&tmpblock
);
291 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
292 tem
= gfc_omp_unshare_expr (tem
);
293 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
294 gfc_array_index_type
, tem
,
299 if (!TYPE_DOMAIN (type
)
300 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
301 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
302 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
304 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
305 TYPE_SIZE_UNIT (type
),
306 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
307 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
310 tem
= array_type_nelts (type
);
311 tem
= fold_convert (gfc_array_index_type
, tem
);
314 tree nelems
= gfc_evaluate_now (tem
, &block
);
315 tree index
= gfc_create_var (gfc_array_index_type
, "S");
317 gfc_init_block (&tmpblock
);
318 tem
= gfc_conv_array_data (decl
);
319 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
320 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
321 tree destvar
, destvref
= NULL_TREE
;
324 tem
= gfc_conv_array_data (dest
);
325 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
326 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
328 gfc_add_expr_to_block (&tmpblock
,
329 gfc_walk_alloc_comps (declvref
, destvref
,
333 gfc_init_loopinfo (&loop
);
335 loop
.from
[0] = gfc_index_zero_node
;
336 loop
.loopvar
[0] = index
;
338 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
339 gfc_add_block_to_block (&block
, &loop
.pre
);
340 return gfc_finish_block (&block
);
342 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
344 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
346 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
347 type
= TREE_TYPE (decl
);
350 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
351 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
353 tree ftype
= TREE_TYPE (field
);
354 tree declf
, destf
= NULL_TREE
;
355 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
356 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
357 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
358 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
361 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
362 decl
, field
, NULL_TREE
);
364 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
365 dest
, field
, NULL_TREE
);
370 case WALK_ALLOC_COMPS_DTOR
:
372 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
373 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
374 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
376 gfc_add_modify (&block
, unshare_expr (destf
),
377 unshare_expr (declf
));
378 tem
= gfc_duplicate_allocatable_nocopy
379 (destf
, declf
, ftype
,
380 GFC_TYPE_ARRAY_RANK (ftype
));
382 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
383 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
385 case WALK_ALLOC_COMPS_COPY_CTOR
:
386 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
387 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
388 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
389 GFC_TYPE_ARRAY_RANK (ftype
),
391 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
392 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
397 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
400 gfc_init_block (&tmpblock
);
401 gfc_add_expr_to_block (&tmpblock
,
402 gfc_walk_alloc_comps (declf
, destf
,
404 then_b
= gfc_finish_block (&tmpblock
);
405 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
406 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
407 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
408 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
409 tem
= unshare_expr (declf
);
414 tem
= fold_convert (pvoid_type_node
, tem
);
415 tem
= fold_build2_loc (input_location
, NE_EXPR
,
416 logical_type_node
, tem
,
418 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
420 build_empty_stmt (input_location
));
422 gfc_add_expr_to_block (&block
, then_b
);
424 if (kind
== WALK_ALLOC_COMPS_DTOR
)
426 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
427 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
429 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
430 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
431 NULL_TREE
, NULL_TREE
, true,
433 GFC_CAF_COARRAY_NOCOARRAY
);
434 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
438 tem
= gfc_call_free (unshare_expr (declf
));
439 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
444 return gfc_finish_block (&block
);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
451 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
453 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
454 stmtblock_t block
, cond_block
;
456 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
462 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
465 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
468 gfc_start_block (&block
);
469 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
470 OMP_CLAUSE_DECL (clause
),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
472 gfc_add_expr_to_block (&block
, tem
);
473 return gfc_finish_block (&block
);
478 gcc_assert (outer
!= NULL_TREE
);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block
);
485 gfc_init_block (&cond_block
);
487 if (GFC_DESCRIPTOR_TYPE_P (type
))
489 gfc_add_modify (&cond_block
, decl
, outer
);
490 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
491 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
492 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
494 gfc_conv_descriptor_lbound_get (decl
, rank
));
495 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
496 size
, gfc_index_one_node
);
497 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
498 size
= fold_build2_loc (input_location
, MULT_EXPR
,
499 gfc_array_index_type
, size
,
500 gfc_conv_descriptor_stride_get (decl
, rank
));
501 tree esize
= fold_convert (gfc_array_index_type
,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
503 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
505 size
= unshare_expr (size
);
506 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
510 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
511 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
512 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
513 if (GFC_DESCRIPTOR_TYPE_P (type
))
514 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
516 gfc_add_modify (&cond_block
, unshare_expr (decl
),
517 fold_convert (TREE_TYPE (decl
), ptr
));
518 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
520 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
521 OMP_CLAUSE_DECL (clause
),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
523 gfc_add_expr_to_block (&cond_block
, tem
);
525 then_b
= gfc_finish_block (&cond_block
);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
530 gfc_init_block (&cond_block
);
531 if (GFC_DESCRIPTOR_TYPE_P (type
))
532 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
535 gfc_add_modify (&cond_block
, unshare_expr (decl
),
536 build_zero_cst (TREE_TYPE (decl
)));
537 else_b
= gfc_finish_block (&cond_block
);
539 tree tem
= fold_convert (pvoid_type_node
,
540 GFC_DESCRIPTOR_TYPE_P (type
)
541 ? gfc_conv_descriptor_data_get (outer
) : outer
);
542 tem
= unshare_expr (tem
);
543 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
544 tem
, null_pointer_node
);
545 gfc_add_expr_to_block (&block
,
546 build3_loc (input_location
, COND_EXPR
,
547 void_type_node
, cond
, then_b
,
551 gfc_add_expr_to_block (&block
, then_b
);
553 return gfc_finish_block (&block
);
556 /* Build and return code for a copy constructor from SRC to DEST. */
559 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
561 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
562 tree cond
, then_b
, else_b
;
563 stmtblock_t block
, cond_block
;
565 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
569 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
572 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
574 gfc_start_block (&block
);
575 gfc_add_modify (&block
, dest
, src
);
576 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
577 WALK_ALLOC_COMPS_COPY_CTOR
);
578 gfc_add_expr_to_block (&block
, tem
);
579 return gfc_finish_block (&block
);
582 return build2_v (MODIFY_EXPR
, dest
, src
);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block
);
589 gfc_init_block (&cond_block
);
591 gfc_add_modify (&cond_block
, dest
, src
);
592 if (GFC_DESCRIPTOR_TYPE_P (type
))
594 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
595 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
596 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
598 gfc_conv_descriptor_lbound_get (dest
, rank
));
599 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
600 size
, gfc_index_one_node
);
601 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
602 size
= fold_build2_loc (input_location
, MULT_EXPR
,
603 gfc_array_index_type
, size
,
604 gfc_conv_descriptor_stride_get (dest
, rank
));
605 tree esize
= fold_convert (gfc_array_index_type
,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
607 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
609 size
= unshare_expr (size
);
610 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
614 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
615 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
616 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
617 if (GFC_DESCRIPTOR_TYPE_P (type
))
618 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
620 gfc_add_modify (&cond_block
, unshare_expr (dest
),
621 fold_convert (TREE_TYPE (dest
), ptr
));
623 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
624 ? gfc_conv_descriptor_data_get (src
) : src
;
625 srcptr
= unshare_expr (srcptr
);
626 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
627 call
= build_call_expr_loc (input_location
,
628 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
630 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
631 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
633 tree tem
= gfc_walk_alloc_comps (src
, dest
,
634 OMP_CLAUSE_DECL (clause
),
635 WALK_ALLOC_COMPS_COPY_CTOR
);
636 gfc_add_expr_to_block (&cond_block
, tem
);
638 then_b
= gfc_finish_block (&cond_block
);
640 gfc_init_block (&cond_block
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
645 gfc_add_modify (&cond_block
, unshare_expr (dest
),
646 build_zero_cst (TREE_TYPE (dest
)));
647 else_b
= gfc_finish_block (&cond_block
);
649 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
650 unshare_expr (srcptr
), null_pointer_node
);
651 gfc_add_expr_to_block (&block
,
652 build3_loc (input_location
, COND_EXPR
,
653 void_type_node
, cond
, then_b
, else_b
));
655 return gfc_finish_block (&block
);
658 /* Similarly, except use an intrinsic or pointer assignment operator
662 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
664 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
665 tree cond
, then_b
, else_b
;
666 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
669 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
672 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
674 gfc_start_block (&block
);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
677 OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_DTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block
, dest
, src
);
682 /* Finally allocate any allocatable components and copy. */
683 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
684 WALK_ALLOC_COMPS_COPY_CTOR
);
685 gfc_add_expr_to_block (&block
, tem
);
686 return gfc_finish_block (&block
);
689 return build2_v (MODIFY_EXPR
, dest
, src
);
692 gfc_start_block (&block
);
694 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
696 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
697 WALK_ALLOC_COMPS_DTOR
);
698 tree tem
= fold_convert (pvoid_type_node
,
699 GFC_DESCRIPTOR_TYPE_P (type
)
700 ? gfc_conv_descriptor_data_get (dest
) : dest
);
701 tem
= unshare_expr (tem
);
702 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
703 tem
, null_pointer_node
);
704 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
705 then_b
, build_empty_stmt (input_location
));
706 gfc_add_expr_to_block (&block
, tem
);
709 gfc_init_block (&cond_block
);
711 if (GFC_DESCRIPTOR_TYPE_P (type
))
713 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
714 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
715 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
717 gfc_conv_descriptor_lbound_get (src
, rank
));
718 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
719 size
, gfc_index_one_node
);
720 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
721 size
= fold_build2_loc (input_location
, MULT_EXPR
,
722 gfc_array_index_type
, size
,
723 gfc_conv_descriptor_stride_get (src
, rank
));
724 tree esize
= fold_convert (gfc_array_index_type
,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
726 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
728 size
= unshare_expr (size
);
729 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
733 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
734 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
736 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
737 ? gfc_conv_descriptor_data_get (dest
) : dest
;
738 destptr
= unshare_expr (destptr
);
739 destptr
= fold_convert (pvoid_type_node
, destptr
);
740 gfc_add_modify (&cond_block
, ptr
, destptr
);
742 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
743 destptr
, null_pointer_node
);
745 if (GFC_DESCRIPTOR_TYPE_P (type
))
748 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
750 tree rank
= gfc_rank_cst
[i
];
751 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
752 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
753 gfc_array_index_type
, tem
,
754 gfc_conv_descriptor_lbound_get (src
, rank
));
755 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
756 gfc_array_index_type
, tem
,
757 gfc_conv_descriptor_lbound_get (dest
, rank
));
758 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
759 tem
, gfc_conv_descriptor_ubound_get (dest
,
761 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
762 logical_type_node
, cond
, tem
);
766 gfc_init_block (&cond_block2
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
770 gfc_init_block (&inner_block
);
771 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
772 then_b
= gfc_finish_block (&inner_block
);
774 gfc_init_block (&inner_block
);
775 gfc_add_modify (&inner_block
, ptr
,
776 gfc_call_realloc (&inner_block
, ptr
, size
));
777 else_b
= gfc_finish_block (&inner_block
);
779 gfc_add_expr_to_block (&cond_block2
,
780 build3_loc (input_location
, COND_EXPR
,
782 unshare_expr (nonalloc
),
784 gfc_add_modify (&cond_block2
, dest
, src
);
785 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
789 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
790 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
791 fold_convert (type
, ptr
));
793 then_b
= gfc_finish_block (&cond_block2
);
794 else_b
= build_empty_stmt (input_location
);
796 gfc_add_expr_to_block (&cond_block
,
797 build3_loc (input_location
, COND_EXPR
,
798 void_type_node
, unshare_expr (cond
),
801 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
802 ? gfc_conv_descriptor_data_get (src
) : src
;
803 srcptr
= unshare_expr (srcptr
);
804 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
805 call
= build_call_expr_loc (input_location
,
806 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
808 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
809 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
811 tree tem
= gfc_walk_alloc_comps (src
, dest
,
812 OMP_CLAUSE_DECL (clause
),
813 WALK_ALLOC_COMPS_COPY_CTOR
);
814 gfc_add_expr_to_block (&cond_block
, tem
);
816 then_b
= gfc_finish_block (&cond_block
);
818 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
820 gfc_init_block (&cond_block
);
821 if (GFC_DESCRIPTOR_TYPE_P (type
))
823 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
824 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
825 NULL_TREE
, NULL_TREE
, true, NULL
,
826 GFC_CAF_COARRAY_NOCOARRAY
);
827 gfc_add_expr_to_block (&cond_block
, tmp
);
831 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
832 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
833 gfc_add_modify (&cond_block
, unshare_expr (dest
),
834 build_zero_cst (TREE_TYPE (dest
)));
836 else_b
= gfc_finish_block (&cond_block
);
838 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
839 unshare_expr (srcptr
), null_pointer_node
);
840 gfc_add_expr_to_block (&block
,
841 build3_loc (input_location
, COND_EXPR
,
842 void_type_node
, cond
,
846 gfc_add_expr_to_block (&block
, then_b
);
848 return gfc_finish_block (&block
);
852 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
853 tree add
, tree nelems
)
855 stmtblock_t tmpblock
;
856 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
857 nelems
= gfc_evaluate_now (nelems
, block
);
859 gfc_init_block (&tmpblock
);
860 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
862 desta
= gfc_build_array_ref (dest
, index
, NULL
);
863 srca
= gfc_build_array_ref (src
, index
, NULL
);
867 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
868 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
869 fold_convert (sizetype
, index
),
870 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
871 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
872 TREE_TYPE (dest
), dest
,
874 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
875 TREE_TYPE (src
), src
,
878 gfc_add_modify (&tmpblock
, desta
,
879 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
883 gfc_init_loopinfo (&loop
);
885 loop
.from
[0] = gfc_index_zero_node
;
886 loop
.loopvar
[0] = index
;
888 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
889 gfc_add_block_to_block (block
, &loop
.pre
);
892 /* Build and return code for a constructor of DEST that initializes
893 it to SRC plus ADD (ADD is scalar integer). */
896 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
898 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
901 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
903 gfc_start_block (&block
);
904 add
= gfc_evaluate_now (add
, &block
);
906 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
907 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
908 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
910 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
911 if (!TYPE_DOMAIN (type
)
912 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
913 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
914 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
916 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
917 TYPE_SIZE_UNIT (type
),
918 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
919 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
922 nelems
= array_type_nelts (type
);
923 nelems
= fold_convert (gfc_array_index_type
, nelems
);
925 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
926 return gfc_finish_block (&block
);
929 /* Allocatable arrays in LINEAR clauses need to be allocated
930 and copied from SRC. */
931 gfc_add_modify (&block
, dest
, src
);
932 if (GFC_DESCRIPTOR_TYPE_P (type
))
934 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
935 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
936 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
938 gfc_conv_descriptor_lbound_get (dest
, rank
));
939 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
940 size
, gfc_index_one_node
);
941 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
942 size
= fold_build2_loc (input_location
, MULT_EXPR
,
943 gfc_array_index_type
, size
,
944 gfc_conv_descriptor_stride_get (dest
, rank
));
945 tree esize
= fold_convert (gfc_array_index_type
,
946 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
947 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
948 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
949 nelems
, unshare_expr (esize
));
950 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
952 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
953 gfc_array_index_type
, nelems
,
957 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
958 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
959 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
960 if (GFC_DESCRIPTOR_TYPE_P (type
))
962 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
963 tree etype
= gfc_get_element_type (type
);
964 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
965 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
966 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
967 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
971 gfc_add_modify (&block
, unshare_expr (dest
),
972 fold_convert (TREE_TYPE (dest
), ptr
));
973 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
974 tree dstm
= build_fold_indirect_ref (ptr
);
975 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
976 gfc_add_modify (&block
, dstm
,
977 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
979 return gfc_finish_block (&block
);
982 /* Build and return code destructing DECL. Return NULL if nothing
986 gfc_omp_clause_dtor (tree clause
, tree decl
)
988 tree type
= TREE_TYPE (decl
), tem
;
990 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
991 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
992 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
994 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
996 OMP_CLAUSE_DECL (clause
),
997 WALK_ALLOC_COMPS_DTOR
);
1001 if (GFC_DESCRIPTOR_TYPE_P (type
))
1003 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1004 to be deallocated if they were allocated. */
1005 tem
= gfc_conv_descriptor_data_get (decl
);
1006 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1007 NULL_TREE
, true, NULL
,
1008 GFC_CAF_COARRAY_NOCOARRAY
);
1011 tem
= gfc_call_free (decl
);
1012 tem
= gfc_omp_unshare_expr (tem
);
1014 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1019 gfc_init_block (&block
);
1020 gfc_add_expr_to_block (&block
,
1021 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1022 OMP_CLAUSE_DECL (clause
),
1023 WALK_ALLOC_COMPS_DTOR
));
1024 gfc_add_expr_to_block (&block
, tem
);
1025 then_b
= gfc_finish_block (&block
);
1027 tem
= fold_convert (pvoid_type_node
,
1028 GFC_DESCRIPTOR_TYPE_P (type
)
1029 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1030 tem
= unshare_expr (tem
);
1031 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1032 tem
, null_pointer_node
);
1033 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1034 then_b
, build_empty_stmt (input_location
));
1041 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1043 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1046 tree decl
= OMP_CLAUSE_DECL (c
);
1048 /* Assumed-size arrays can't be mapped implicitly, they have to be
1049 mapped explicitly using array sections. */
1050 if (TREE_CODE (decl
) == PARM_DECL
1051 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1052 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1053 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1054 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1057 error_at (OMP_CLAUSE_LOCATION (c
),
1058 "implicit mapping of assumed size array %qD", decl
);
1062 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1063 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1065 if (!gfc_omp_privatize_by_reference (decl
)
1066 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1067 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1068 && !GFC_DECL_CRAY_POINTEE (decl
)
1069 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1071 tree orig_decl
= decl
;
1072 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1073 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1074 OMP_CLAUSE_DECL (c4
) = decl
;
1075 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1076 decl
= build_fold_indirect_ref (decl
);
1077 OMP_CLAUSE_DECL (c
) = decl
;
1078 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1079 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1080 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1081 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1083 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1084 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1085 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1086 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1087 decl
= build_fold_indirect_ref (decl
);
1088 OMP_CLAUSE_DECL (c
) = decl
;
1091 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1094 gfc_start_block (&block
);
1095 tree type
= TREE_TYPE (decl
);
1096 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1097 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1098 ptr
= build_fold_indirect_ref (ptr
);
1099 OMP_CLAUSE_DECL (c
) = ptr
;
1100 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1101 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1102 OMP_CLAUSE_DECL (c2
) = decl
;
1103 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1104 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1105 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1106 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1107 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1108 tree size
= create_tmp_var (gfc_array_index_type
);
1109 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1110 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1111 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1112 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1114 stmtblock_t cond_block
;
1115 tree tem
, then_b
, else_b
, zero
, cond
;
1117 gfc_init_block (&cond_block
);
1118 tem
= gfc_full_array_size (&cond_block
, decl
,
1119 GFC_TYPE_ARRAY_RANK (type
));
1120 gfc_add_modify (&cond_block
, size
, tem
);
1121 gfc_add_modify (&cond_block
, size
,
1122 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1124 then_b
= gfc_finish_block (&cond_block
);
1125 gfc_init_block (&cond_block
);
1126 zero
= build_int_cst (gfc_array_index_type
, 0);
1127 gfc_add_modify (&cond_block
, size
, zero
);
1128 else_b
= gfc_finish_block (&cond_block
);
1129 tem
= gfc_conv_descriptor_data_get (decl
);
1130 tem
= fold_convert (pvoid_type_node
, tem
);
1131 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1132 logical_type_node
, tem
, null_pointer_node
);
1133 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1134 void_type_node
, cond
,
1139 gfc_add_modify (&block
, size
,
1140 gfc_full_array_size (&block
, decl
,
1141 GFC_TYPE_ARRAY_RANK (type
)));
1142 gfc_add_modify (&block
, size
,
1143 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1146 OMP_CLAUSE_SIZE (c
) = size
;
1147 tree stmt
= gfc_finish_block (&block
);
1148 gimplify_and_add (stmt
, pre_p
);
1151 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1153 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1154 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1157 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1158 OMP_CLAUSE_CHAIN (last
) = c2
;
1163 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1164 OMP_CLAUSE_CHAIN (last
) = c3
;
1169 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1170 OMP_CLAUSE_CHAIN (last
) = c4
;
1176 /* Return true if DECL is a scalar variable (for the purpose of
1177 implicit firstprivatization). */
1180 gfc_omp_scalar_p (tree decl
)
1182 tree type
= TREE_TYPE (decl
);
1183 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1184 type
= TREE_TYPE (type
);
1185 if (TREE_CODE (type
) == POINTER_TYPE
)
1187 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1188 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1189 type
= TREE_TYPE (type
);
1190 if (GFC_ARRAY_TYPE_P (type
)
1191 || GFC_CLASS_TYPE_P (type
))
1194 if (TYPE_STRING_FLAG (type
))
1196 if (INTEGRAL_TYPE_P (type
)
1197 || SCALAR_FLOAT_TYPE_P (type
)
1198 || COMPLEX_FLOAT_TYPE_P (type
))
1204 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1205 disregarded in OpenMP construct, because it is going to be
1206 remapped during OpenMP lowering. SHARED is true if DECL
1207 is going to be shared, false if it is going to be privatized. */
1210 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1212 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1213 && DECL_HAS_VALUE_EXPR_P (decl
))
1215 tree value
= DECL_VALUE_EXPR (decl
);
1217 if (TREE_CODE (value
) == COMPONENT_REF
1218 && VAR_P (TREE_OPERAND (value
, 0))
1219 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1221 /* If variable in COMMON or EQUIVALENCE is privatized, return
1222 true, as just that variable is supposed to be privatized,
1223 not the whole COMMON or whole EQUIVALENCE.
1224 For shared variables in COMMON or EQUIVALENCE, let them be
1225 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1226 from the same COMMON or EQUIVALENCE just one sharing of the
1227 whole COMMON or EQUIVALENCE is enough. */
1232 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1238 /* Return true if DECL that is shared iff SHARED is true should
1239 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1243 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1245 if (GFC_DECL_CRAY_POINTEE (decl
))
1248 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1249 && DECL_HAS_VALUE_EXPR_P (decl
))
1251 tree value
= DECL_VALUE_EXPR (decl
);
1253 if (TREE_CODE (value
) == COMPONENT_REF
1254 && VAR_P (TREE_OPERAND (value
, 0))
1255 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1262 /* Register language specific type size variables as potentially OpenMP
1263 firstprivate variables. */
1266 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1268 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1272 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1273 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1275 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1276 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1277 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1279 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1280 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1286 gfc_trans_add_clause (tree node
, tree tail
)
1288 OMP_CLAUSE_CHAIN (node
) = tail
;
1293 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1298 gfc_symbol
*proc_sym
;
1299 gfc_formal_arglist
*f
;
1301 gcc_assert (sym
->attr
.dummy
);
1302 proc_sym
= sym
->ns
->proc_name
;
1303 if (proc_sym
->attr
.entry_master
)
1305 if (gfc_return_by_reference (proc_sym
))
1308 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1311 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1317 return build_int_cst (integer_type_node
, cnt
);
1320 tree t
= gfc_get_symbol_decl (sym
);
1324 bool alternate_entry
;
1327 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1328 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1329 && sym
->result
== sym
;
1330 entry_master
= sym
->attr
.result
1331 && sym
->ns
->proc_name
->attr
.entry_master
1332 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1333 parent_decl
= current_function_decl
1334 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1336 if ((t
== parent_decl
&& return_value
)
1337 || (sym
->ns
&& sym
->ns
->proc_name
1338 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1339 && (alternate_entry
|| entry_master
)))
1344 /* Special case for assigning the return value of a function.
1345 Self recursive functions must have an explicit return value. */
1346 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1347 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1349 /* Similarly for alternate entry points. */
1350 else if (alternate_entry
1351 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1354 gfc_entry_list
*el
= NULL
;
1356 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1359 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1364 else if (entry_master
1365 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1367 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1373 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1374 gfc_omp_namelist
*namelist
, tree list
,
1377 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1378 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1380 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1381 if (t
!= error_mark_node
)
1383 tree node
= build_omp_clause (input_location
, code
);
1384 OMP_CLAUSE_DECL (node
) = t
;
1385 list
= gfc_trans_add_clause (node
, list
);
1391 struct omp_udr_find_orig_data
1393 gfc_omp_udr
*omp_udr
;
1398 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1401 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1402 if ((*e
)->expr_type
== EXPR_VARIABLE
1403 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1404 cd
->omp_orig_seen
= true;
1410 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1412 gfc_symbol
*sym
= n
->sym
;
1413 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1414 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1415 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1416 gfc_symbol omp_var_copy
[4];
1417 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1419 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1420 locus old_loc
= gfc_current_locus
;
1423 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1425 decl
= OMP_CLAUSE_DECL (c
);
1426 gfc_current_locus
= where
;
1427 type
= TREE_TYPE (decl
);
1428 outer_decl
= create_tmp_var_raw (type
);
1429 if (TREE_CODE (decl
) == PARM_DECL
1430 && TREE_CODE (type
) == REFERENCE_TYPE
1431 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1432 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1434 decl
= build_fold_indirect_ref (decl
);
1435 type
= TREE_TYPE (type
);
1438 /* Create a fake symbol for init value. */
1439 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1440 init_val_sym
.ns
= sym
->ns
;
1441 init_val_sym
.name
= sym
->name
;
1442 init_val_sym
.ts
= sym
->ts
;
1443 init_val_sym
.attr
.referenced
= 1;
1444 init_val_sym
.declared_at
= where
;
1445 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1446 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1447 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1448 else if (udr
->initializer_ns
)
1449 backend_decl
= NULL
;
1451 switch (sym
->ts
.type
)
1457 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1460 backend_decl
= NULL_TREE
;
1463 init_val_sym
.backend_decl
= backend_decl
;
1465 /* Create a fake symbol for the outer array reference. */
1468 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1469 outer_sym
.attr
.dummy
= 0;
1470 outer_sym
.attr
.result
= 0;
1471 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1472 outer_sym
.backend_decl
= outer_decl
;
1473 if (decl
!= OMP_CLAUSE_DECL (c
))
1474 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1476 /* Create fake symtrees for it. */
1477 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1478 symtree1
->n
.sym
= sym
;
1479 gcc_assert (symtree1
== root1
);
1481 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1482 symtree2
->n
.sym
= &init_val_sym
;
1483 gcc_assert (symtree2
== root2
);
1485 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1486 symtree3
->n
.sym
= &outer_sym
;
1487 gcc_assert (symtree3
== root3
);
1489 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1492 omp_var_copy
[0] = *udr
->omp_out
;
1493 omp_var_copy
[1] = *udr
->omp_in
;
1494 *udr
->omp_out
= outer_sym
;
1495 *udr
->omp_in
= *sym
;
1496 if (udr
->initializer_ns
)
1498 omp_var_copy
[2] = *udr
->omp_priv
;
1499 omp_var_copy
[3] = *udr
->omp_orig
;
1500 *udr
->omp_priv
= *sym
;
1501 *udr
->omp_orig
= outer_sym
;
1505 /* Create expressions. */
1506 e1
= gfc_get_expr ();
1507 e1
->expr_type
= EXPR_VARIABLE
;
1509 e1
->symtree
= symtree1
;
1511 if (sym
->attr
.dimension
)
1513 e1
->ref
= ref
= gfc_get_ref ();
1514 ref
->type
= REF_ARRAY
;
1515 ref
->u
.ar
.where
= where
;
1516 ref
->u
.ar
.as
= sym
->as
;
1517 ref
->u
.ar
.type
= AR_FULL
;
1518 ref
->u
.ar
.dimen
= 0;
1520 t
= gfc_resolve_expr (e1
);
1524 if (backend_decl
!= NULL_TREE
)
1526 e2
= gfc_get_expr ();
1527 e2
->expr_type
= EXPR_VARIABLE
;
1529 e2
->symtree
= symtree2
;
1531 t
= gfc_resolve_expr (e2
);
1534 else if (udr
->initializer_ns
== NULL
)
1536 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1537 e2
= gfc_default_initializer (&sym
->ts
);
1539 t
= gfc_resolve_expr (e2
);
1542 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1544 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1545 t
= gfc_resolve_expr (e2
);
1548 if (udr
&& udr
->initializer_ns
)
1550 struct omp_udr_find_orig_data cd
;
1552 cd
.omp_orig_seen
= false;
1553 gfc_code_walker (&n
->udr
->initializer
,
1554 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1555 if (cd
.omp_orig_seen
)
1556 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1559 e3
= gfc_copy_expr (e1
);
1560 e3
->symtree
= symtree3
;
1561 t
= gfc_resolve_expr (e3
);
1566 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1570 e4
= gfc_add (e3
, e1
);
1573 e4
= gfc_multiply (e3
, e1
);
1575 case TRUTH_ANDIF_EXPR
:
1576 e4
= gfc_and (e3
, e1
);
1578 case TRUTH_ORIF_EXPR
:
1579 e4
= gfc_or (e3
, e1
);
1582 e4
= gfc_eqv (e3
, e1
);
1585 e4
= gfc_neqv (e3
, e1
);
1603 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1606 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1607 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1608 t
= gfc_resolve_expr (e3
);
1610 t
= gfc_resolve_expr (e4
);
1619 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1620 intrinsic_sym
.ns
= sym
->ns
;
1621 intrinsic_sym
.name
= iname
;
1622 intrinsic_sym
.ts
= sym
->ts
;
1623 intrinsic_sym
.attr
.referenced
= 1;
1624 intrinsic_sym
.attr
.intrinsic
= 1;
1625 intrinsic_sym
.attr
.function
= 1;
1626 intrinsic_sym
.attr
.implicit_type
= 1;
1627 intrinsic_sym
.result
= &intrinsic_sym
;
1628 intrinsic_sym
.declared_at
= where
;
1630 symtree4
= gfc_new_symtree (&root4
, iname
);
1631 symtree4
->n
.sym
= &intrinsic_sym
;
1632 gcc_assert (symtree4
== root4
);
1634 e4
= gfc_get_expr ();
1635 e4
->expr_type
= EXPR_FUNCTION
;
1637 e4
->symtree
= symtree4
;
1638 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1639 e4
->value
.function
.actual
->expr
= e3
;
1640 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1641 e4
->value
.function
.actual
->next
->expr
= e1
;
1643 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1645 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1646 e1
= gfc_copy_expr (e1
);
1647 e3
= gfc_copy_expr (e3
);
1648 t
= gfc_resolve_expr (e4
);
1652 /* Create the init statement list. */
1655 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1657 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1658 NULL_TREE
, NULL_TREE
, false);
1659 if (TREE_CODE (stmt
) != BIND_EXPR
)
1660 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1663 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1665 /* Create the merge statement list. */
1668 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1670 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1671 NULL_TREE
, NULL_TREE
, false);
1672 if (TREE_CODE (stmt
) != BIND_EXPR
)
1673 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1676 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1678 /* And stick the placeholder VAR_DECL into the clause as well. */
1679 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1681 gfc_current_locus
= old_loc
;
1694 gfc_free_array_spec (outer_sym
.as
);
1698 *udr
->omp_out
= omp_var_copy
[0];
1699 *udr
->omp_in
= omp_var_copy
[1];
1700 if (udr
->initializer_ns
)
1702 *udr
->omp_priv
= omp_var_copy
[2];
1703 *udr
->omp_orig
= omp_var_copy
[3];
1709 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1710 locus where
, bool mark_addressable
)
1712 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1713 if (namelist
->sym
->attr
.referenced
)
1715 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1716 if (t
!= error_mark_node
)
1718 tree node
= build_omp_clause (where
.lb
->location
,
1719 OMP_CLAUSE_REDUCTION
);
1720 OMP_CLAUSE_DECL (node
) = t
;
1721 if (mark_addressable
)
1722 TREE_ADDRESSABLE (t
) = 1;
1723 switch (namelist
->u
.reduction_op
)
1725 case OMP_REDUCTION_PLUS
:
1726 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1728 case OMP_REDUCTION_MINUS
:
1729 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1731 case OMP_REDUCTION_TIMES
:
1732 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1734 case OMP_REDUCTION_AND
:
1735 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1737 case OMP_REDUCTION_OR
:
1738 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1740 case OMP_REDUCTION_EQV
:
1741 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1743 case OMP_REDUCTION_NEQV
:
1744 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1746 case OMP_REDUCTION_MAX
:
1747 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1749 case OMP_REDUCTION_MIN
:
1750 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1752 case OMP_REDUCTION_IAND
:
1753 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1755 case OMP_REDUCTION_IOR
:
1756 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1758 case OMP_REDUCTION_IEOR
:
1759 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1761 case OMP_REDUCTION_USER
:
1762 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1767 if (namelist
->sym
->attr
.dimension
1768 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1769 || namelist
->sym
->attr
.allocatable
)
1770 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1771 list
= gfc_trans_add_clause (node
, list
);
1778 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1783 gfc_init_se (&se
, NULL
);
1784 gfc_conv_expr (&se
, expr
);
1785 gfc_add_block_to_block (block
, &se
.pre
);
1786 result
= gfc_evaluate_now (se
.expr
, block
);
1787 gfc_add_block_to_block (block
, &se
.post
);
1792 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
1795 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1796 locus where
, bool declare_simd
= false)
1798 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1800 enum omp_clause_code clause_code
;
1803 if (clauses
== NULL
)
1806 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1808 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1814 case OMP_LIST_REDUCTION
:
1815 /* An OpenACC async clause indicates the need to set reduction
1816 arguments addressable, to allow asynchronous copy-out. */
1817 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1820 case OMP_LIST_PRIVATE
:
1821 clause_code
= OMP_CLAUSE_PRIVATE
;
1823 case OMP_LIST_SHARED
:
1824 clause_code
= OMP_CLAUSE_SHARED
;
1826 case OMP_LIST_FIRSTPRIVATE
:
1827 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1829 case OMP_LIST_LASTPRIVATE
:
1830 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1832 case OMP_LIST_COPYIN
:
1833 clause_code
= OMP_CLAUSE_COPYIN
;
1835 case OMP_LIST_COPYPRIVATE
:
1836 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1838 case OMP_LIST_UNIFORM
:
1839 clause_code
= OMP_CLAUSE_UNIFORM
;
1841 case OMP_LIST_USE_DEVICE
:
1842 case OMP_LIST_USE_DEVICE_PTR
:
1843 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1845 case OMP_LIST_IS_DEVICE_PTR
:
1846 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
1851 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1854 case OMP_LIST_ALIGNED
:
1855 for (; n
!= NULL
; n
= n
->next
)
1856 if (n
->sym
->attr
.referenced
|| declare_simd
)
1858 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1859 if (t
!= error_mark_node
)
1861 tree node
= build_omp_clause (input_location
,
1862 OMP_CLAUSE_ALIGNED
);
1863 OMP_CLAUSE_DECL (node
) = t
;
1869 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1872 gfc_init_se (&se
, NULL
);
1873 gfc_conv_expr (&se
, n
->expr
);
1874 gfc_add_block_to_block (block
, &se
.pre
);
1875 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1876 gfc_add_block_to_block (block
, &se
.post
);
1878 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1880 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1884 case OMP_LIST_LINEAR
:
1886 gfc_expr
*last_step_expr
= NULL
;
1887 tree last_step
= NULL_TREE
;
1888 bool last_step_parm
= false;
1890 for (; n
!= NULL
; n
= n
->next
)
1894 last_step_expr
= n
->expr
;
1895 last_step
= NULL_TREE
;
1896 last_step_parm
= false;
1898 if (n
->sym
->attr
.referenced
|| declare_simd
)
1900 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1901 if (t
!= error_mark_node
)
1903 tree node
= build_omp_clause (input_location
,
1905 OMP_CLAUSE_DECL (node
) = t
;
1906 omp_clause_linear_kind kind
;
1907 switch (n
->u
.linear_op
)
1909 case OMP_LINEAR_DEFAULT
:
1910 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
1912 case OMP_LINEAR_REF
:
1913 kind
= OMP_CLAUSE_LINEAR_REF
;
1915 case OMP_LINEAR_VAL
:
1916 kind
= OMP_CLAUSE_LINEAR_VAL
;
1918 case OMP_LINEAR_UVAL
:
1919 kind
= OMP_CLAUSE_LINEAR_UVAL
;
1924 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
1925 if (last_step_expr
&& last_step
== NULL_TREE
)
1929 gfc_init_se (&se
, NULL
);
1930 gfc_conv_expr (&se
, last_step_expr
);
1931 gfc_add_block_to_block (block
, &se
.pre
);
1932 last_step
= gfc_evaluate_now (se
.expr
, block
);
1933 gfc_add_block_to_block (block
, &se
.post
);
1935 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
1937 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
1938 last_step
= gfc_trans_omp_variable (s
, true);
1939 last_step_parm
= true;
1943 = gfc_conv_constant_to_tree (last_step_expr
);
1947 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
1948 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
1952 tree type
= gfc_typenode_for_spec (&n
->sym
->ts
);
1953 OMP_CLAUSE_LINEAR_STEP (node
)
1954 = fold_convert (type
, last_step
);
1956 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1957 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1958 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1964 case OMP_LIST_DEPEND
:
1965 for (; n
!= NULL
; n
= n
->next
)
1967 if (n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
1969 tree vec
= NULL_TREE
;
1973 tree addend
= integer_zero_node
, t
;
1977 addend
= gfc_conv_constant_to_tree (n
->expr
);
1978 if (TREE_CODE (addend
) == INTEGER_CST
1979 && tree_int_cst_sgn (addend
) == -1)
1982 addend
= const_unop (NEGATE_EXPR
,
1983 TREE_TYPE (addend
), addend
);
1986 t
= gfc_trans_omp_variable (n
->sym
, false);
1987 if (t
!= error_mark_node
)
1989 if (i
< vec_safe_length (doacross_steps
)
1990 && !integer_zerop (addend
)
1991 && (*doacross_steps
)[i
])
1993 tree step
= (*doacross_steps
)[i
];
1994 addend
= fold_convert (TREE_TYPE (step
), addend
);
1995 addend
= build2 (TRUNC_DIV_EXPR
,
1996 TREE_TYPE (step
), addend
, step
);
1998 vec
= tree_cons (addend
, t
, vec
);
2000 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2003 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2007 if (vec
== NULL_TREE
)
2010 tree node
= build_omp_clause (input_location
,
2012 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2013 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2014 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2018 if (!n
->sym
->attr
.referenced
)
2021 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
2022 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2024 tree decl
= gfc_get_symbol_decl (n
->sym
);
2025 if (gfc_omp_privatize_by_reference (decl
))
2026 decl
= build_fold_indirect_ref (decl
);
2027 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2029 decl
= gfc_conv_descriptor_data_get (decl
);
2030 decl
= fold_convert (build_pointer_type (char_type_node
),
2032 decl
= build_fold_indirect_ref (decl
);
2034 else if (DECL_P (decl
))
2035 TREE_ADDRESSABLE (decl
) = 1;
2036 OMP_CLAUSE_DECL (node
) = decl
;
2041 gfc_init_se (&se
, NULL
);
2042 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2044 gfc_conv_expr_reference (&se
, n
->expr
);
2049 gfc_conv_expr_descriptor (&se
, n
->expr
);
2050 ptr
= gfc_conv_array_data (se
.expr
);
2052 gfc_add_block_to_block (block
, &se
.pre
);
2053 gfc_add_block_to_block (block
, &se
.post
);
2054 ptr
= fold_convert (build_pointer_type (char_type_node
),
2056 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2058 switch (n
->u
.depend_op
)
2061 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2063 case OMP_DEPEND_OUT
:
2064 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2066 case OMP_DEPEND_INOUT
:
2067 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2072 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2076 for (; n
!= NULL
; n
= n
->next
)
2078 if (!n
->sym
->attr
.referenced
)
2081 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2082 tree node2
= NULL_TREE
;
2083 tree node3
= NULL_TREE
;
2084 tree node4
= NULL_TREE
;
2085 tree decl
= gfc_get_symbol_decl (n
->sym
);
2087 TREE_ADDRESSABLE (decl
) = 1;
2088 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2090 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2091 && (gfc_omp_privatize_by_reference (decl
)
2092 || GFC_DECL_GET_SCALAR_POINTER (decl
)
2093 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
2094 || GFC_DECL_CRAY_POINTEE (decl
)
2095 || GFC_DESCRIPTOR_TYPE_P
2096 (TREE_TYPE (TREE_TYPE (decl
)))))
2098 tree orig_decl
= decl
;
2099 node4
= build_omp_clause (input_location
,
2101 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2102 OMP_CLAUSE_DECL (node4
) = decl
;
2103 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2104 decl
= build_fold_indirect_ref (decl
);
2105 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
2106 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
2107 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
2109 node3
= build_omp_clause (input_location
,
2111 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2112 OMP_CLAUSE_DECL (node3
) = decl
;
2113 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2114 decl
= build_fold_indirect_ref (decl
);
2117 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2119 tree type
= TREE_TYPE (decl
);
2120 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2121 ptr
= fold_convert (build_pointer_type (char_type_node
),
2123 ptr
= build_fold_indirect_ref (ptr
);
2124 OMP_CLAUSE_DECL (node
) = ptr
;
2125 node2
= build_omp_clause (input_location
,
2127 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2128 OMP_CLAUSE_DECL (node2
) = decl
;
2129 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2130 node3
= build_omp_clause (input_location
,
2132 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2133 OMP_CLAUSE_DECL (node3
)
2134 = gfc_conv_descriptor_data_get (decl
);
2135 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2137 /* We have to check for n->sym->attr.dimension because
2138 of scalar coarrays. */
2139 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
2141 stmtblock_t cond_block
;
2143 = gfc_create_var (gfc_array_index_type
, NULL
);
2144 tree tem
, then_b
, else_b
, zero
, cond
;
2146 gfc_init_block (&cond_block
);
2148 = gfc_full_array_size (&cond_block
, decl
,
2149 GFC_TYPE_ARRAY_RANK (type
));
2150 gfc_add_modify (&cond_block
, size
, tem
);
2151 then_b
= gfc_finish_block (&cond_block
);
2152 gfc_init_block (&cond_block
);
2153 zero
= build_int_cst (gfc_array_index_type
, 0);
2154 gfc_add_modify (&cond_block
, size
, zero
);
2155 else_b
= gfc_finish_block (&cond_block
);
2156 tem
= gfc_conv_descriptor_data_get (decl
);
2157 tem
= fold_convert (pvoid_type_node
, tem
);
2158 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2160 tem
, null_pointer_node
);
2161 gfc_add_expr_to_block (block
,
2162 build3_loc (input_location
,
2167 OMP_CLAUSE_SIZE (node
) = size
;
2169 else if (n
->sym
->attr
.dimension
)
2170 OMP_CLAUSE_SIZE (node
)
2171 = gfc_full_array_size (block
, decl
,
2172 GFC_TYPE_ARRAY_RANK (type
));
2173 if (n
->sym
->attr
.dimension
)
2176 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2177 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2178 OMP_CLAUSE_SIZE (node
)
2179 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2180 OMP_CLAUSE_SIZE (node
), elemsz
);
2184 OMP_CLAUSE_DECL (node
) = decl
;
2189 gfc_init_se (&se
, NULL
);
2190 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2192 gfc_conv_expr_reference (&se
, n
->expr
);
2193 gfc_add_block_to_block (block
, &se
.pre
);
2195 OMP_CLAUSE_SIZE (node
)
2196 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2200 gfc_conv_expr_descriptor (&se
, n
->expr
);
2201 ptr
= gfc_conv_array_data (se
.expr
);
2202 tree type
= TREE_TYPE (se
.expr
);
2203 gfc_add_block_to_block (block
, &se
.pre
);
2204 OMP_CLAUSE_SIZE (node
)
2205 = gfc_full_array_size (block
, se
.expr
,
2206 GFC_TYPE_ARRAY_RANK (type
));
2208 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2209 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2210 OMP_CLAUSE_SIZE (node
)
2211 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2212 OMP_CLAUSE_SIZE (node
), elemsz
);
2214 gfc_add_block_to_block (block
, &se
.post
);
2215 ptr
= fold_convert (build_pointer_type (char_type_node
),
2217 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2219 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2220 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2222 node4
= build_omp_clause (input_location
,
2224 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2225 OMP_CLAUSE_DECL (node4
) = decl
;
2226 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2227 decl
= build_fold_indirect_ref (decl
);
2229 ptr
= fold_convert (sizetype
, ptr
);
2230 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2232 tree type
= TREE_TYPE (decl
);
2233 ptr2
= gfc_conv_descriptor_data_get (decl
);
2234 node2
= build_omp_clause (input_location
,
2236 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2237 OMP_CLAUSE_DECL (node2
) = decl
;
2238 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2239 node3
= build_omp_clause (input_location
,
2241 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2242 OMP_CLAUSE_DECL (node3
)
2243 = gfc_conv_descriptor_data_get (decl
);
2247 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2248 ptr2
= build_fold_addr_expr (decl
);
2251 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2254 node3
= build_omp_clause (input_location
,
2256 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2257 OMP_CLAUSE_DECL (node3
) = decl
;
2259 ptr2
= fold_convert (sizetype
, ptr2
);
2260 OMP_CLAUSE_SIZE (node3
)
2261 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2263 switch (n
->u
.map_op
)
2266 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2269 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2272 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2274 case OMP_MAP_TOFROM
:
2275 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2277 case OMP_MAP_ALWAYS_TO
:
2278 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2280 case OMP_MAP_ALWAYS_FROM
:
2281 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2283 case OMP_MAP_ALWAYS_TOFROM
:
2284 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2286 case OMP_MAP_RELEASE
:
2287 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2289 case OMP_MAP_DELETE
:
2290 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2292 case OMP_MAP_FORCE_ALLOC
:
2293 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2295 case OMP_MAP_FORCE_TO
:
2296 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2298 case OMP_MAP_FORCE_FROM
:
2299 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2301 case OMP_MAP_FORCE_TOFROM
:
2302 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2304 case OMP_MAP_FORCE_PRESENT
:
2305 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2307 case OMP_MAP_FORCE_DEVICEPTR
:
2308 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2313 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2315 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2317 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2319 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2324 case OMP_LIST_CACHE
:
2325 for (; n
!= NULL
; n
= n
->next
)
2327 if (!n
->sym
->attr
.referenced
)
2333 clause_code
= OMP_CLAUSE_TO
;
2336 clause_code
= OMP_CLAUSE_FROM
;
2338 case OMP_LIST_CACHE
:
2339 clause_code
= OMP_CLAUSE__CACHE_
;
2344 tree node
= build_omp_clause (input_location
, clause_code
);
2345 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2347 tree decl
= gfc_get_symbol_decl (n
->sym
);
2348 if (gfc_omp_privatize_by_reference (decl
))
2349 decl
= build_fold_indirect_ref (decl
);
2350 else if (DECL_P (decl
))
2351 TREE_ADDRESSABLE (decl
) = 1;
2352 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2354 tree type
= TREE_TYPE (decl
);
2355 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2356 ptr
= fold_convert (build_pointer_type (char_type_node
),
2358 ptr
= build_fold_indirect_ref (ptr
);
2359 OMP_CLAUSE_DECL (node
) = ptr
;
2360 OMP_CLAUSE_SIZE (node
)
2361 = gfc_full_array_size (block
, decl
,
2362 GFC_TYPE_ARRAY_RANK (type
));
2364 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2365 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2366 OMP_CLAUSE_SIZE (node
)
2367 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2368 OMP_CLAUSE_SIZE (node
), elemsz
);
2371 OMP_CLAUSE_DECL (node
) = decl
;
2376 gfc_init_se (&se
, NULL
);
2377 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2379 gfc_conv_expr_reference (&se
, n
->expr
);
2381 gfc_add_block_to_block (block
, &se
.pre
);
2382 OMP_CLAUSE_SIZE (node
)
2383 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2387 gfc_conv_expr_descriptor (&se
, n
->expr
);
2388 ptr
= gfc_conv_array_data (se
.expr
);
2389 tree type
= TREE_TYPE (se
.expr
);
2390 gfc_add_block_to_block (block
, &se
.pre
);
2391 OMP_CLAUSE_SIZE (node
)
2392 = gfc_full_array_size (block
, se
.expr
,
2393 GFC_TYPE_ARRAY_RANK (type
));
2395 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2396 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2397 OMP_CLAUSE_SIZE (node
)
2398 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2399 OMP_CLAUSE_SIZE (node
), elemsz
);
2401 gfc_add_block_to_block (block
, &se
.post
);
2402 ptr
= fold_convert (build_pointer_type (char_type_node
),
2404 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2406 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2414 if (clauses
->if_expr
)
2418 gfc_init_se (&se
, NULL
);
2419 gfc_conv_expr (&se
, clauses
->if_expr
);
2420 gfc_add_block_to_block (block
, &se
.pre
);
2421 if_var
= gfc_evaluate_now (se
.expr
, block
);
2422 gfc_add_block_to_block (block
, &se
.post
);
2424 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2425 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2426 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2427 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2429 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
2430 if (clauses
->if_exprs
[ifc
])
2434 gfc_init_se (&se
, NULL
);
2435 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
2436 gfc_add_block_to_block (block
, &se
.pre
);
2437 if_var
= gfc_evaluate_now (se
.expr
, block
);
2438 gfc_add_block_to_block (block
, &se
.post
);
2440 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2443 case OMP_IF_PARALLEL
:
2444 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
2447 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
2449 case OMP_IF_TASKLOOP
:
2450 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
2453 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
2455 case OMP_IF_TARGET_DATA
:
2456 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
2458 case OMP_IF_TARGET_UPDATE
:
2459 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
2461 case OMP_IF_TARGET_ENTER_DATA
:
2462 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
2464 case OMP_IF_TARGET_EXIT_DATA
:
2465 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
2470 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2471 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2474 if (clauses
->final_expr
)
2478 gfc_init_se (&se
, NULL
);
2479 gfc_conv_expr (&se
, clauses
->final_expr
);
2480 gfc_add_block_to_block (block
, &se
.pre
);
2481 final_var
= gfc_evaluate_now (se
.expr
, block
);
2482 gfc_add_block_to_block (block
, &se
.post
);
2484 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2485 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2486 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2489 if (clauses
->num_threads
)
2493 gfc_init_se (&se
, NULL
);
2494 gfc_conv_expr (&se
, clauses
->num_threads
);
2495 gfc_add_block_to_block (block
, &se
.pre
);
2496 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2497 gfc_add_block_to_block (block
, &se
.post
);
2499 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2500 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2501 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2504 chunk_size
= NULL_TREE
;
2505 if (clauses
->chunk_size
)
2507 gfc_init_se (&se
, NULL
);
2508 gfc_conv_expr (&se
, clauses
->chunk_size
);
2509 gfc_add_block_to_block (block
, &se
.pre
);
2510 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2511 gfc_add_block_to_block (block
, &se
.post
);
2514 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2516 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2517 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2518 switch (clauses
->sched_kind
)
2520 case OMP_SCHED_STATIC
:
2521 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2523 case OMP_SCHED_DYNAMIC
:
2524 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2526 case OMP_SCHED_GUIDED
:
2527 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2529 case OMP_SCHED_RUNTIME
:
2530 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2532 case OMP_SCHED_AUTO
:
2533 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2538 if (clauses
->sched_monotonic
)
2539 OMP_CLAUSE_SCHEDULE_KIND (c
)
2540 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2541 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
2542 else if (clauses
->sched_nonmonotonic
)
2543 OMP_CLAUSE_SCHEDULE_KIND (c
)
2544 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2545 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
2546 if (clauses
->sched_simd
)
2547 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
2548 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2551 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2553 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2554 switch (clauses
->default_sharing
)
2556 case OMP_DEFAULT_NONE
:
2557 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2559 case OMP_DEFAULT_SHARED
:
2560 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2562 case OMP_DEFAULT_PRIVATE
:
2563 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2565 case OMP_DEFAULT_FIRSTPRIVATE
:
2566 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2568 case OMP_DEFAULT_PRESENT
:
2569 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
2574 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2577 if (clauses
->nowait
)
2579 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2580 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2583 if (clauses
->ordered
)
2585 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2586 OMP_CLAUSE_ORDERED_EXPR (c
)
2587 = clauses
->orderedc
? build_int_cst (integer_type_node
,
2588 clauses
->orderedc
) : NULL_TREE
;
2589 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2592 if (clauses
->untied
)
2594 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2595 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2598 if (clauses
->mergeable
)
2600 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2601 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2604 if (clauses
->collapse
)
2606 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2607 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2608 = build_int_cst (integer_type_node
, clauses
->collapse
);
2609 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2612 if (clauses
->inbranch
)
2614 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2615 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2618 if (clauses
->notinbranch
)
2620 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2621 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2624 switch (clauses
->cancel
)
2626 case OMP_CANCEL_UNKNOWN
:
2628 case OMP_CANCEL_PARALLEL
:
2629 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2630 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2632 case OMP_CANCEL_SECTIONS
:
2633 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2634 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2637 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2638 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2640 case OMP_CANCEL_TASKGROUP
:
2641 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2642 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2646 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2648 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2649 switch (clauses
->proc_bind
)
2651 case OMP_PROC_BIND_MASTER
:
2652 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2654 case OMP_PROC_BIND_SPREAD
:
2655 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2657 case OMP_PROC_BIND_CLOSE
:
2658 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2663 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2666 if (clauses
->safelen_expr
)
2670 gfc_init_se (&se
, NULL
);
2671 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2672 gfc_add_block_to_block (block
, &se
.pre
);
2673 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2674 gfc_add_block_to_block (block
, &se
.post
);
2676 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2677 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2678 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2681 if (clauses
->simdlen_expr
)
2685 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2686 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2687 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2688 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2694 gfc_init_se (&se
, NULL
);
2695 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
2696 gfc_add_block_to_block (block
, &se
.pre
);
2697 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
2698 gfc_add_block_to_block (block
, &se
.post
);
2700 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2701 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
2702 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2706 if (clauses
->num_teams
)
2710 gfc_init_se (&se
, NULL
);
2711 gfc_conv_expr (&se
, clauses
->num_teams
);
2712 gfc_add_block_to_block (block
, &se
.pre
);
2713 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2714 gfc_add_block_to_block (block
, &se
.post
);
2716 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2717 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2718 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2721 if (clauses
->device
)
2725 gfc_init_se (&se
, NULL
);
2726 gfc_conv_expr (&se
, clauses
->device
);
2727 gfc_add_block_to_block (block
, &se
.pre
);
2728 device
= gfc_evaluate_now (se
.expr
, block
);
2729 gfc_add_block_to_block (block
, &se
.post
);
2731 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2732 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2733 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2736 if (clauses
->thread_limit
)
2740 gfc_init_se (&se
, NULL
);
2741 gfc_conv_expr (&se
, clauses
->thread_limit
);
2742 gfc_add_block_to_block (block
, &se
.pre
);
2743 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2744 gfc_add_block_to_block (block
, &se
.post
);
2746 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2747 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2748 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2751 chunk_size
= NULL_TREE
;
2752 if (clauses
->dist_chunk_size
)
2754 gfc_init_se (&se
, NULL
);
2755 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2756 gfc_add_block_to_block (block
, &se
.pre
);
2757 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2758 gfc_add_block_to_block (block
, &se
.post
);
2761 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2763 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2764 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2765 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2768 if (clauses
->grainsize
)
2772 gfc_init_se (&se
, NULL
);
2773 gfc_conv_expr (&se
, clauses
->grainsize
);
2774 gfc_add_block_to_block (block
, &se
.pre
);
2775 grainsize
= gfc_evaluate_now (se
.expr
, block
);
2776 gfc_add_block_to_block (block
, &se
.post
);
2778 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GRAINSIZE
);
2779 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
2780 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2783 if (clauses
->num_tasks
)
2787 gfc_init_se (&se
, NULL
);
2788 gfc_conv_expr (&se
, clauses
->num_tasks
);
2789 gfc_add_block_to_block (block
, &se
.pre
);
2790 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
2791 gfc_add_block_to_block (block
, &se
.post
);
2793 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TASKS
);
2794 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
2795 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2798 if (clauses
->priority
)
2802 gfc_init_se (&se
, NULL
);
2803 gfc_conv_expr (&se
, clauses
->priority
);
2804 gfc_add_block_to_block (block
, &se
.pre
);
2805 priority
= gfc_evaluate_now (se
.expr
, block
);
2806 gfc_add_block_to_block (block
, &se
.post
);
2808 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PRIORITY
);
2809 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
2810 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2817 gfc_init_se (&se
, NULL
);
2818 gfc_conv_expr (&se
, clauses
->hint
);
2819 gfc_add_block_to_block (block
, &se
.pre
);
2820 hint
= gfc_evaluate_now (se
.expr
, block
);
2821 gfc_add_block_to_block (block
, &se
.post
);
2823 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_HINT
);
2824 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
2825 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2830 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMD
);
2831 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2833 if (clauses
->threads
)
2835 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREADS
);
2836 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2838 if (clauses
->nogroup
)
2840 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOGROUP
);
2841 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2843 if (clauses
->defaultmap
)
2845 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULTMAP
);
2846 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2848 if (clauses
->depend_source
)
2850 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEPEND
);
2851 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
2852 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2857 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2858 if (clauses
->async_expr
)
2859 OMP_CLAUSE_ASYNC_EXPR (c
)
2860 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2862 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2863 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2867 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2868 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2870 if (clauses
->par_auto
)
2872 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2873 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2875 if (clauses
->independent
)
2877 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2878 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2880 if (clauses
->wait_list
)
2884 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2886 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2887 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2888 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2892 if (clauses
->num_gangs_expr
)
2895 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2896 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2897 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2898 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2900 if (clauses
->num_workers_expr
)
2902 tree num_workers_var
2903 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2904 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2905 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2906 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2908 if (clauses
->vector_length_expr
)
2910 tree vector_length_var
2911 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2912 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2913 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2914 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2916 if (clauses
->tile_list
)
2918 vec
<tree
, va_gc
> *tvec
;
2921 vec_alloc (tvec
, 4);
2923 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2924 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2926 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2927 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2928 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2931 if (clauses
->vector
)
2933 if (clauses
->vector_expr
)
2936 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2937 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2938 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2939 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2943 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2944 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2947 if (clauses
->worker
)
2949 if (clauses
->worker_expr
)
2952 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2953 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2954 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2955 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2959 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2960 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2966 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2967 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2968 if (clauses
->gang_num_expr
)
2970 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
2971 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
2973 if (clauses
->gang_static
)
2975 arg
= clauses
->gang_static_expr
2976 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
2977 : integer_minus_one_node
;
2978 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
2982 return nreverse (omp_clauses
);
2985 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2988 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2993 stmt
= gfc_trans_code (code
);
2994 if (TREE_CODE (stmt
) != BIND_EXPR
)
2996 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2998 tree block
= poplevel (1, 0);
2999 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
3009 /* Trans OpenACC directives. */
3010 /* parallel, kernels, data and host_data. */
3012 gfc_trans_oacc_construct (gfc_code
*code
)
3015 tree stmt
, oacc_clauses
;
3016 enum tree_code construct_code
;
3020 case EXEC_OACC_PARALLEL
:
3021 construct_code
= OACC_PARALLEL
;
3023 case EXEC_OACC_KERNELS
:
3024 construct_code
= OACC_KERNELS
;
3026 case EXEC_OACC_DATA
:
3027 construct_code
= OACC_DATA
;
3029 case EXEC_OACC_HOST_DATA
:
3030 construct_code
= OACC_HOST_DATA
;
3036 gfc_start_block (&block
);
3037 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3039 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3040 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3042 gfc_add_expr_to_block (&block
, stmt
);
3043 return gfc_finish_block (&block
);
3046 /* update, enter_data, exit_data, cache. */
3048 gfc_trans_oacc_executable_directive (gfc_code
*code
)
3051 tree stmt
, oacc_clauses
;
3052 enum tree_code construct_code
;
3056 case EXEC_OACC_UPDATE
:
3057 construct_code
= OACC_UPDATE
;
3059 case EXEC_OACC_ENTER_DATA
:
3060 construct_code
= OACC_ENTER_DATA
;
3062 case EXEC_OACC_EXIT_DATA
:
3063 construct_code
= OACC_EXIT_DATA
;
3065 case EXEC_OACC_CACHE
:
3066 construct_code
= OACC_CACHE
;
3072 gfc_start_block (&block
);
3073 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3075 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
3077 gfc_add_expr_to_block (&block
, stmt
);
3078 return gfc_finish_block (&block
);
3082 gfc_trans_oacc_wait_directive (gfc_code
*code
)
3086 vec
<tree
, va_gc
> *args
;
3089 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3090 location_t loc
= input_location
;
3092 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3095 vec_alloc (args
, nparms
+ 2);
3096 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
3098 gfc_start_block (&block
);
3100 if (clauses
->async_expr
)
3101 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
3103 t
= build_int_cst (integer_type_node
, -2);
3105 args
->quick_push (t
);
3106 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
3108 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3109 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
3111 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
3112 gfc_add_expr_to_block (&block
, stmt
);
3116 return gfc_finish_block (&block
);
3119 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
3120 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
3123 gfc_trans_omp_atomic (gfc_code
*code
)
3125 gfc_code
*atomic_code
= code
;
3129 gfc_expr
*expr2
, *e
;
3132 tree lhsaddr
, type
, rhs
, x
;
3133 enum tree_code op
= ERROR_MARK
;
3134 enum tree_code aop
= OMP_ATOMIC
;
3135 bool var_on_left
= false;
3136 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
3138 code
= code
->block
->next
;
3139 gcc_assert (code
->op
== EXEC_ASSIGN
);
3140 var
= code
->expr1
->symtree
->n
.sym
;
3142 gfc_init_se (&lse
, NULL
);
3143 gfc_init_se (&rse
, NULL
);
3144 gfc_init_se (&vse
, NULL
);
3145 gfc_start_block (&block
);
3147 expr2
= code
->expr2
;
3148 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3149 != GFC_OMP_ATOMIC_WRITE
)
3150 && (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
) == 0
3151 && expr2
->expr_type
== EXPR_FUNCTION
3152 && expr2
->value
.function
.isym
3153 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3154 expr2
= expr2
->value
.function
.actual
->expr
;
3156 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3158 case GFC_OMP_ATOMIC_READ
:
3159 gfc_conv_expr (&vse
, code
->expr1
);
3160 gfc_add_block_to_block (&block
, &vse
.pre
);
3162 gfc_conv_expr (&lse
, expr2
);
3163 gfc_add_block_to_block (&block
, &lse
.pre
);
3164 type
= TREE_TYPE (lse
.expr
);
3165 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3167 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
3168 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3169 x
= convert (TREE_TYPE (vse
.expr
), x
);
3170 gfc_add_modify (&block
, vse
.expr
, x
);
3172 gfc_add_block_to_block (&block
, &lse
.pre
);
3173 gfc_add_block_to_block (&block
, &rse
.pre
);
3175 return gfc_finish_block (&block
);
3176 case GFC_OMP_ATOMIC_CAPTURE
:
3177 aop
= OMP_ATOMIC_CAPTURE_NEW
;
3178 if (expr2
->expr_type
== EXPR_VARIABLE
)
3180 aop
= OMP_ATOMIC_CAPTURE_OLD
;
3181 gfc_conv_expr (&vse
, code
->expr1
);
3182 gfc_add_block_to_block (&block
, &vse
.pre
);
3184 gfc_conv_expr (&lse
, expr2
);
3185 gfc_add_block_to_block (&block
, &lse
.pre
);
3186 gfc_init_se (&lse
, NULL
);
3188 var
= code
->expr1
->symtree
->n
.sym
;
3189 expr2
= code
->expr2
;
3190 if (expr2
->expr_type
== EXPR_FUNCTION
3191 && expr2
->value
.function
.isym
3192 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3193 expr2
= expr2
->value
.function
.actual
->expr
;
3200 gfc_conv_expr (&lse
, code
->expr1
);
3201 gfc_add_block_to_block (&block
, &lse
.pre
);
3202 type
= TREE_TYPE (lse
.expr
);
3203 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3205 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3206 == GFC_OMP_ATOMIC_WRITE
)
3207 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3209 gfc_conv_expr (&rse
, expr2
);
3210 gfc_add_block_to_block (&block
, &rse
.pre
);
3212 else if (expr2
->expr_type
== EXPR_OP
)
3215 switch (expr2
->value
.op
.op
)
3217 case INTRINSIC_PLUS
:
3220 case INTRINSIC_TIMES
:
3223 case INTRINSIC_MINUS
:
3226 case INTRINSIC_DIVIDE
:
3227 if (expr2
->ts
.type
== BT_INTEGER
)
3228 op
= TRUNC_DIV_EXPR
;
3233 op
= TRUTH_ANDIF_EXPR
;
3236 op
= TRUTH_ORIF_EXPR
;
3241 case INTRINSIC_NEQV
:
3247 e
= expr2
->value
.op
.op1
;
3248 if (e
->expr_type
== EXPR_FUNCTION
3249 && e
->value
.function
.isym
3250 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3251 e
= e
->value
.function
.actual
->expr
;
3252 if (e
->expr_type
== EXPR_VARIABLE
3253 && e
->symtree
!= NULL
3254 && e
->symtree
->n
.sym
== var
)
3256 expr2
= expr2
->value
.op
.op2
;
3261 e
= expr2
->value
.op
.op2
;
3262 if (e
->expr_type
== EXPR_FUNCTION
3263 && e
->value
.function
.isym
3264 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3265 e
= e
->value
.function
.actual
->expr
;
3266 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3267 && e
->symtree
!= NULL
3268 && e
->symtree
->n
.sym
== var
);
3269 expr2
= expr2
->value
.op
.op1
;
3270 var_on_left
= false;
3272 gfc_conv_expr (&rse
, expr2
);
3273 gfc_add_block_to_block (&block
, &rse
.pre
);
3277 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
3278 switch (expr2
->value
.function
.isym
->id
)
3298 e
= expr2
->value
.function
.actual
->expr
;
3299 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3300 && e
->symtree
!= NULL
3301 && e
->symtree
->n
.sym
== var
);
3303 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
3304 gfc_add_block_to_block (&block
, &rse
.pre
);
3305 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
3307 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
3308 gfc_actual_arglist
*arg
;
3310 gfc_add_modify (&block
, accum
, rse
.expr
);
3311 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
3314 gfc_init_block (&rse
.pre
);
3315 gfc_conv_expr (&rse
, arg
->expr
);
3316 gfc_add_block_to_block (&block
, &rse
.pre
);
3317 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
3319 gfc_add_modify (&block
, accum
, x
);
3325 expr2
= expr2
->value
.function
.actual
->next
->expr
;
3328 lhsaddr
= save_expr (lhsaddr
);
3329 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3330 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3331 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3333 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3334 it even after unsharing function body. */
3335 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3336 DECL_CONTEXT (var
) = current_function_decl
;
3337 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3338 NULL_TREE
, NULL_TREE
);
3341 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3343 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3344 == GFC_OMP_ATOMIC_WRITE
)
3345 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3349 x
= convert (TREE_TYPE (rhs
),
3350 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3352 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3354 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3357 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3358 && TREE_CODE (type
) != COMPLEX_TYPE
)
3359 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3360 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3362 gfc_add_block_to_block (&block
, &lse
.pre
);
3363 gfc_add_block_to_block (&block
, &rse
.pre
);
3365 if (aop
== OMP_ATOMIC
)
3367 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3368 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3369 gfc_add_expr_to_block (&block
, x
);
3373 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3376 expr2
= code
->expr2
;
3377 if (expr2
->expr_type
== EXPR_FUNCTION
3378 && expr2
->value
.function
.isym
3379 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3380 expr2
= expr2
->value
.function
.actual
->expr
;
3382 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3383 gfc_conv_expr (&vse
, code
->expr1
);
3384 gfc_add_block_to_block (&block
, &vse
.pre
);
3386 gfc_init_se (&lse
, NULL
);
3387 gfc_conv_expr (&lse
, expr2
);
3388 gfc_add_block_to_block (&block
, &lse
.pre
);
3390 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3391 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3392 x
= convert (TREE_TYPE (vse
.expr
), x
);
3393 gfc_add_modify (&block
, vse
.expr
, x
);
3396 return gfc_finish_block (&block
);
3400 gfc_trans_omp_barrier (void)
3402 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3403 return build_call_expr_loc (input_location
, decl
, 0);
3407 gfc_trans_omp_cancel (gfc_code
*code
)
3410 tree ifc
= boolean_true_node
;
3412 switch (code
->ext
.omp_clauses
->cancel
)
3414 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3415 case OMP_CANCEL_DO
: mask
= 2; break;
3416 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3417 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3418 default: gcc_unreachable ();
3420 gfc_start_block (&block
);
3421 if (code
->ext
.omp_clauses
->if_expr
)
3426 gfc_init_se (&se
, NULL
);
3427 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3428 gfc_add_block_to_block (&block
, &se
.pre
);
3429 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3430 gfc_add_block_to_block (&block
, &se
.post
);
3431 tree type
= TREE_TYPE (if_var
);
3432 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3433 boolean_type_node
, if_var
,
3434 build_zero_cst (type
));
3436 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3437 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3438 ifc
= fold_convert (c_bool_type
, ifc
);
3439 gfc_add_expr_to_block (&block
,
3440 build_call_expr_loc (input_location
, decl
, 2,
3441 build_int_cst (integer_type_node
,
3443 return gfc_finish_block (&block
);
3447 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3450 switch (code
->ext
.omp_clauses
->cancel
)
3452 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3453 case OMP_CANCEL_DO
: mask
= 2; break;
3454 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3455 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3456 default: gcc_unreachable ();
3458 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3459 return build_call_expr_loc (input_location
, decl
, 1,
3460 build_int_cst (integer_type_node
, mask
));
3464 gfc_trans_omp_critical (gfc_code
*code
)
3466 tree name
= NULL_TREE
, stmt
;
3467 if (code
->ext
.omp_clauses
!= NULL
)
3468 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
3469 stmt
= gfc_trans_code (code
->block
->next
);
3470 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3474 typedef struct dovar_init_d
{
3481 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3482 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3485 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
3486 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3489 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3490 int i
, collapse
= clauses
->collapse
;
3491 vec
<dovar_init
> inits
= vNULL
;
3494 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
3495 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
3497 /* Both collapsed and tiled loops are lowered the same way. In
3498 OpenACC, those clauses are not compatible, so prioritize the tile
3499 clause, if present. */
3503 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
3507 doacross_steps
= NULL
;
3508 if (clauses
->orderedc
)
3509 collapse
= clauses
->orderedc
;
3513 code
= code
->block
->next
;
3514 gcc_assert (code
->op
== EXEC_DO
);
3516 init
= make_tree_vec (collapse
);
3517 cond
= make_tree_vec (collapse
);
3518 incr
= make_tree_vec (collapse
);
3519 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
3523 gfc_start_block (&block
);
3527 /* simd schedule modifier is only useful for composite do simd and other
3528 constructs including that, where gfc_trans_omp_do is only called
3529 on the simd construct and DO's clauses are translated elsewhere. */
3530 do_clauses
->sched_simd
= false;
3532 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3534 for (i
= 0; i
< collapse
; i
++)
3537 int dovar_found
= 0;
3542 gfc_omp_namelist
*n
= NULL
;
3543 if (op
!= EXEC_OMP_DISTRIBUTE
)
3544 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3545 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3546 n
!= NULL
; n
= n
->next
)
3547 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3551 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3552 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3553 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3559 /* Evaluate all the expressions in the iterator. */
3560 gfc_init_se (&se
, NULL
);
3561 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3562 gfc_add_block_to_block (pblock
, &se
.pre
);
3564 type
= TREE_TYPE (dovar
);
3565 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3567 gfc_init_se (&se
, NULL
);
3568 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3569 gfc_add_block_to_block (pblock
, &se
.pre
);
3570 from
= gfc_evaluate_now (se
.expr
, pblock
);
3572 gfc_init_se (&se
, NULL
);
3573 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3574 gfc_add_block_to_block (pblock
, &se
.pre
);
3575 to
= gfc_evaluate_now (se
.expr
, pblock
);
3577 gfc_init_se (&se
, NULL
);
3578 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3579 gfc_add_block_to_block (pblock
, &se
.pre
);
3580 step
= gfc_evaluate_now (se
.expr
, pblock
);
3583 /* Special case simple loops. */
3586 if (integer_onep (step
))
3588 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3593 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3599 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3600 /* The condition should not be folded. */
3601 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3602 ? LE_EXPR
: GE_EXPR
,
3603 logical_type_node
, dovar
, to
);
3604 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3606 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3609 TREE_VEC_ELT (incr
, i
));
3613 /* STEP is not 1 or -1. Use:
3614 for (count = 0; count < (to + step - from) / step; count++)
3616 dovar = from + count * step;
3620 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3621 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3622 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3624 tmp
= gfc_evaluate_now (tmp
, pblock
);
3625 count
= gfc_create_var (type
, "count");
3626 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3627 build_int_cst (type
, 0));
3628 /* The condition should not be folded. */
3629 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3632 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3634 build_int_cst (type
, 1));
3635 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3636 MODIFY_EXPR
, type
, count
,
3637 TREE_VEC_ELT (incr
, i
));
3639 /* Initialize DOVAR. */
3640 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3641 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3642 dovar_init e
= {dovar
, tmp
};
3643 inits
.safe_push (e
);
3644 if (clauses
->orderedc
)
3646 if (doacross_steps
== NULL
)
3647 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
);
3648 (*doacross_steps
)[i
] = step
;
3652 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
3654 if (dovar_found
== 2
3655 && op
== EXEC_OMP_SIMD
3659 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3660 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3661 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3663 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3669 if (op
== EXEC_OMP_SIMD
)
3673 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3674 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3675 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3678 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3683 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3684 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3685 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3687 if (dovar_found
== 2)
3694 /* If dovar is lastprivate, but different counter is used,
3695 dovar += step needs to be added to
3696 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3697 will have the value on entry of the last loop, rather
3698 than value after iterator increment. */
3699 if (clauses
->orderedc
)
3701 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
3704 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3705 type
, count
, build_one_cst (type
));
3706 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
3708 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3713 tmp
= gfc_evaluate_now (step
, pblock
);
3714 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3717 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3719 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3720 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3721 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3723 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3726 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3727 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3729 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3733 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3735 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3736 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3737 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3739 tree l
= build_omp_clause (input_location
,
3740 OMP_CLAUSE_LASTPRIVATE
);
3741 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3742 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3743 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3745 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3749 gcc_assert (simple
|| c
!= NULL
);
3753 if (op
!= EXEC_OMP_SIMD
)
3754 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3755 else if (collapse
== 1)
3757 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3758 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3759 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3760 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3763 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3764 OMP_CLAUSE_DECL (tmp
) = count
;
3765 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3768 if (i
+ 1 < collapse
)
3769 code
= code
->block
->next
;
3772 if (pblock
!= &block
)
3775 gfc_start_block (&block
);
3778 gfc_start_block (&body
);
3780 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3781 gfc_add_modify (&body
, di
->var
, di
->init
);
3784 /* Cycle statement is implemented with a goto. Exit statement must not be
3785 present for this loop. */
3786 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3788 /* Put these labels where they can be found later. */
3790 code
->cycle_label
= cycle_label
;
3791 code
->exit_label
= NULL_TREE
;
3793 /* Main loop body. */
3794 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3795 gfc_add_expr_to_block (&body
, tmp
);
3797 /* Label for cycle statements (if needed). */
3798 if (TREE_USED (cycle_label
))
3800 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3801 gfc_add_expr_to_block (&body
, tmp
);
3804 /* End of loop body. */
3807 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3808 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3809 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3810 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
3811 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3812 default: gcc_unreachable ();
3815 TREE_TYPE (stmt
) = void_type_node
;
3816 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3817 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3818 OMP_FOR_INIT (stmt
) = init
;
3819 OMP_FOR_COND (stmt
) = cond
;
3820 OMP_FOR_INCR (stmt
) = incr
;
3822 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
3823 gfc_add_expr_to_block (&block
, stmt
);
3825 vec_free (doacross_steps
);
3826 doacross_steps
= saved_doacross_steps
;
3828 return gfc_finish_block (&block
);
3831 /* parallel loop and kernels loop. */
3833 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3835 stmtblock_t block
, *pblock
= NULL
;
3836 gfc_omp_clauses construct_clauses
, loop_clauses
;
3837 tree stmt
, oacc_clauses
= NULL_TREE
;
3838 enum tree_code construct_code
;
3842 case EXEC_OACC_PARALLEL_LOOP
:
3843 construct_code
= OACC_PARALLEL
;
3845 case EXEC_OACC_KERNELS_LOOP
:
3846 construct_code
= OACC_KERNELS
;
3852 gfc_start_block (&block
);
3854 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3855 if (code
->ext
.omp_clauses
!= NULL
)
3857 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3858 sizeof (construct_clauses
));
3859 loop_clauses
.collapse
= construct_clauses
.collapse
;
3860 loop_clauses
.gang
= construct_clauses
.gang
;
3861 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3862 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3863 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3864 loop_clauses
.vector
= construct_clauses
.vector
;
3865 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3866 loop_clauses
.worker
= construct_clauses
.worker
;
3867 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3868 loop_clauses
.seq
= construct_clauses
.seq
;
3869 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3870 loop_clauses
.independent
= construct_clauses
.independent
;
3871 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3872 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3873 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3874 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3875 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3876 construct_clauses
.gang
= false;
3877 construct_clauses
.gang_static
= false;
3878 construct_clauses
.gang_num_expr
= NULL
;
3879 construct_clauses
.gang_static_expr
= NULL
;
3880 construct_clauses
.vector
= false;
3881 construct_clauses
.vector_expr
= NULL
;
3882 construct_clauses
.worker
= false;
3883 construct_clauses
.worker_expr
= NULL
;
3884 construct_clauses
.seq
= false;
3885 construct_clauses
.par_auto
= false;
3886 construct_clauses
.independent
= false;
3887 construct_clauses
.independent
= false;
3888 construct_clauses
.tile_list
= NULL
;
3889 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3890 if (construct_code
== OACC_KERNELS
)
3891 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3892 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3895 if (!loop_clauses
.seq
)
3899 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3900 if (TREE_CODE (stmt
) != BIND_EXPR
)
3901 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3904 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3906 gfc_add_expr_to_block (&block
, stmt
);
3907 return gfc_finish_block (&block
);
3911 gfc_trans_omp_flush (void)
3913 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3914 return build_call_expr_loc (input_location
, decl
, 0);
3918 gfc_trans_omp_master (gfc_code
*code
)
3920 tree stmt
= gfc_trans_code (code
->block
->next
);
3921 if (IS_EMPTY_STMT (stmt
))
3923 return build1_v (OMP_MASTER
, stmt
);
3927 gfc_trans_omp_ordered (gfc_code
*code
)
3931 if (!code
->ext
.omp_clauses
->simd
)
3932 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
3933 code
->ext
.omp_clauses
->threads
= 0;
3935 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
3937 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3938 code
->block
? gfc_trans_code (code
->block
->next
)
3939 : NULL_TREE
, omp_clauses
);
3943 gfc_trans_omp_parallel (gfc_code
*code
)
3946 tree stmt
, omp_clauses
;
3948 gfc_start_block (&block
);
3949 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3952 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3953 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3954 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3956 gfc_add_expr_to_block (&block
, stmt
);
3957 return gfc_finish_block (&block
);
3964 GFC_OMP_SPLIT_PARALLEL
,
3965 GFC_OMP_SPLIT_DISTRIBUTE
,
3966 GFC_OMP_SPLIT_TEAMS
,
3967 GFC_OMP_SPLIT_TARGET
,
3968 GFC_OMP_SPLIT_TASKLOOP
,
3974 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3975 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3976 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3977 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3978 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3979 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
3980 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
)
3984 gfc_split_omp_clauses (gfc_code
*code
,
3985 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3987 int mask
= 0, innermost
= 0;
3988 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3991 case EXEC_OMP_DISTRIBUTE
:
3992 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3994 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3995 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3996 innermost
= GFC_OMP_SPLIT_DO
;
3998 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3999 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
4000 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4001 innermost
= GFC_OMP_SPLIT_SIMD
;
4003 case EXEC_OMP_DISTRIBUTE_SIMD
:
4004 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4005 innermost
= GFC_OMP_SPLIT_SIMD
;
4008 innermost
= GFC_OMP_SPLIT_DO
;
4010 case EXEC_OMP_DO_SIMD
:
4011 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4012 innermost
= GFC_OMP_SPLIT_SIMD
;
4014 case EXEC_OMP_PARALLEL
:
4015 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4017 case EXEC_OMP_PARALLEL_DO
:
4018 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4019 innermost
= GFC_OMP_SPLIT_DO
;
4021 case EXEC_OMP_PARALLEL_DO_SIMD
:
4022 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4023 innermost
= GFC_OMP_SPLIT_SIMD
;
4026 innermost
= GFC_OMP_SPLIT_SIMD
;
4028 case EXEC_OMP_TARGET
:
4029 innermost
= GFC_OMP_SPLIT_TARGET
;
4031 case EXEC_OMP_TARGET_PARALLEL
:
4032 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
4033 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4035 case EXEC_OMP_TARGET_PARALLEL_DO
:
4036 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4037 innermost
= GFC_OMP_SPLIT_DO
;
4039 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4040 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
4041 | GFC_OMP_MASK_SIMD
;
4042 innermost
= GFC_OMP_SPLIT_SIMD
;
4044 case EXEC_OMP_TARGET_SIMD
:
4045 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
4046 innermost
= GFC_OMP_SPLIT_SIMD
;
4048 case EXEC_OMP_TARGET_TEAMS
:
4049 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
4050 innermost
= GFC_OMP_SPLIT_TEAMS
;
4052 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4053 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4054 | GFC_OMP_MASK_DISTRIBUTE
;
4055 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4057 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4058 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4059 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4060 innermost
= GFC_OMP_SPLIT_DO
;
4062 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4063 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4064 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4065 innermost
= GFC_OMP_SPLIT_SIMD
;
4067 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4068 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4069 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4070 innermost
= GFC_OMP_SPLIT_SIMD
;
4072 case EXEC_OMP_TASKLOOP
:
4073 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
4075 case EXEC_OMP_TASKLOOP_SIMD
:
4076 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
4077 innermost
= GFC_OMP_SPLIT_SIMD
;
4079 case EXEC_OMP_TEAMS
:
4080 innermost
= GFC_OMP_SPLIT_TEAMS
;
4082 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4083 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
4084 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4086 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4087 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4088 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4089 innermost
= GFC_OMP_SPLIT_DO
;
4091 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4092 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4093 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4094 innermost
= GFC_OMP_SPLIT_SIMD
;
4096 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4097 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4098 innermost
= GFC_OMP_SPLIT_SIMD
;
4105 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
4108 if (code
->ext
.omp_clauses
!= NULL
)
4110 if (mask
& GFC_OMP_MASK_TARGET
)
4112 /* First the clauses that are unique to some constructs. */
4113 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
4114 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
4115 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
4116 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
4117 clausesa
[GFC_OMP_SPLIT_TARGET
].device
4118 = code
->ext
.omp_clauses
->device
;
4119 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
4120 = code
->ext
.omp_clauses
->defaultmap
;
4121 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
4122 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
4123 /* And this is copied to all. */
4124 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4125 = code
->ext
.omp_clauses
->if_expr
;
4127 if (mask
& GFC_OMP_MASK_TEAMS
)
4129 /* First the clauses that are unique to some constructs. */
4130 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4131 = code
->ext
.omp_clauses
->num_teams
;
4132 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
4133 = code
->ext
.omp_clauses
->thread_limit
;
4134 /* Shared and default clauses are allowed on parallel, teams
4136 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
4137 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4138 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
4139 = code
->ext
.omp_clauses
->default_sharing
;
4141 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4143 /* First the clauses that are unique to some constructs. */
4144 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
4145 = code
->ext
.omp_clauses
->dist_sched_kind
;
4146 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
4147 = code
->ext
.omp_clauses
->dist_chunk_size
;
4148 /* Duplicate collapse. */
4149 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
4150 = code
->ext
.omp_clauses
->collapse
;
4152 if (mask
& GFC_OMP_MASK_PARALLEL
)
4154 /* First the clauses that are unique to some constructs. */
4155 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
4156 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
4157 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
4158 = code
->ext
.omp_clauses
->num_threads
;
4159 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
4160 = code
->ext
.omp_clauses
->proc_bind
;
4161 /* Shared and default clauses are allowed on parallel, teams
4163 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
4164 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4165 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
4166 = code
->ext
.omp_clauses
->default_sharing
;
4167 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
4168 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
4169 /* And this is copied to all. */
4170 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4171 = code
->ext
.omp_clauses
->if_expr
;
4173 if (mask
& GFC_OMP_MASK_DO
)
4175 /* First the clauses that are unique to some constructs. */
4176 clausesa
[GFC_OMP_SPLIT_DO
].ordered
4177 = code
->ext
.omp_clauses
->ordered
;
4178 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
4179 = code
->ext
.omp_clauses
->orderedc
;
4180 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
4181 = code
->ext
.omp_clauses
->sched_kind
;
4182 if (innermost
== GFC_OMP_SPLIT_SIMD
)
4183 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
4184 = code
->ext
.omp_clauses
->sched_simd
;
4185 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
4186 = code
->ext
.omp_clauses
->sched_monotonic
;
4187 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
4188 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
4189 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
4190 = code
->ext
.omp_clauses
->chunk_size
;
4191 clausesa
[GFC_OMP_SPLIT_DO
].nowait
4192 = code
->ext
.omp_clauses
->nowait
;
4193 /* Duplicate collapse. */
4194 clausesa
[GFC_OMP_SPLIT_DO
].collapse
4195 = code
->ext
.omp_clauses
->collapse
;
4197 if (mask
& GFC_OMP_MASK_SIMD
)
4199 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
4200 = code
->ext
.omp_clauses
->safelen_expr
;
4201 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
4202 = code
->ext
.omp_clauses
->simdlen_expr
;
4203 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
4204 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
4205 /* Duplicate collapse. */
4206 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
4207 = code
->ext
.omp_clauses
->collapse
;
4209 if (mask
& GFC_OMP_MASK_TASKLOOP
)
4211 /* First the clauses that are unique to some constructs. */
4212 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
4213 = code
->ext
.omp_clauses
->nogroup
;
4214 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
4215 = code
->ext
.omp_clauses
->grainsize
;
4216 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
4217 = code
->ext
.omp_clauses
->num_tasks
;
4218 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
4219 = code
->ext
.omp_clauses
->priority
;
4220 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
4221 = code
->ext
.omp_clauses
->final_expr
;
4222 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
4223 = code
->ext
.omp_clauses
->untied
;
4224 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
4225 = code
->ext
.omp_clauses
->mergeable
;
4226 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
4227 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
4228 /* And this is copied to all. */
4229 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
4230 = code
->ext
.omp_clauses
->if_expr
;
4231 /* Shared and default clauses are allowed on parallel, teams
4233 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
4234 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4235 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
4236 = code
->ext
.omp_clauses
->default_sharing
;
4237 /* Duplicate collapse. */
4238 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
4239 = code
->ext
.omp_clauses
->collapse
;
4241 /* Private clause is supported on all constructs,
4242 it is enough to put it on the innermost one. For
4243 !$ omp parallel do put it on parallel though,
4244 as that's what we did for OpenMP 3.1. */
4245 clausesa
[innermost
== GFC_OMP_SPLIT_DO
4246 ? (int) GFC_OMP_SPLIT_PARALLEL
4247 : innermost
].lists
[OMP_LIST_PRIVATE
]
4248 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
4249 /* Firstprivate clause is supported on all constructs but
4250 simd. Put it on the outermost of those and duplicate
4251 on parallel and teams. */
4252 if (mask
& GFC_OMP_MASK_TARGET
)
4253 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
4254 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4255 if (mask
& GFC_OMP_MASK_TEAMS
)
4256 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
4257 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4258 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4259 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
4260 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4261 if (mask
& GFC_OMP_MASK_PARALLEL
)
4262 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
4263 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4264 else if (mask
& GFC_OMP_MASK_DO
)
4265 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
4266 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4267 /* Lastprivate is allowed on distribute, do and simd.
4268 In parallel do{, simd} we actually want to put it on
4269 parallel rather than do. */
4270 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4271 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
4272 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4273 if (mask
& GFC_OMP_MASK_PARALLEL
)
4274 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
4275 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4276 else if (mask
& GFC_OMP_MASK_DO
)
4277 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
4278 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4279 if (mask
& GFC_OMP_MASK_SIMD
)
4280 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
4281 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4282 /* Reduction is allowed on simd, do, parallel and teams.
4283 Duplicate it on all of them, but omit on do if
4284 parallel is present. */
4285 if (mask
& GFC_OMP_MASK_TEAMS
)
4286 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
4287 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4288 if (mask
& GFC_OMP_MASK_PARALLEL
)
4289 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
4290 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4291 else if (mask
& GFC_OMP_MASK_DO
)
4292 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
4293 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4294 if (mask
& GFC_OMP_MASK_SIMD
)
4295 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
4296 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4297 /* Linear clause is supported on do and simd,
4298 put it on the innermost one. */
4299 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
4300 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
4302 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4303 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4304 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
4308 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4309 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
4312 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4313 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
4316 gfc_start_block (&block
);
4318 gfc_init_block (&block
);
4320 if (clausesa
== NULL
)
4322 clausesa
= clausesa_buf
;
4323 gfc_split_omp_clauses (code
, clausesa
);
4327 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
4328 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
4329 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
4332 if (TREE_CODE (body
) != BIND_EXPR
)
4333 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
4337 else if (TREE_CODE (body
) != BIND_EXPR
)
4338 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
4341 stmt
= make_node (OMP_FOR
);
4342 TREE_TYPE (stmt
) = void_type_node
;
4343 OMP_FOR_BODY (stmt
) = body
;
4344 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
4348 gfc_add_expr_to_block (&block
, stmt
);
4349 return gfc_finish_block (&block
);
4353 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
4354 gfc_omp_clauses
*clausesa
)
4356 stmtblock_t block
, *new_pblock
= pblock
;
4357 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4358 tree stmt
, omp_clauses
= NULL_TREE
;
4361 gfc_start_block (&block
);
4363 gfc_init_block (&block
);
4365 if (clausesa
== NULL
)
4367 clausesa
= clausesa_buf
;
4368 gfc_split_omp_clauses (code
, clausesa
);
4371 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4375 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
4376 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
4377 new_pblock
= &block
;
4381 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
4382 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
4385 if (TREE_CODE (stmt
) != BIND_EXPR
)
4386 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4390 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4391 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4392 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4394 OMP_PARALLEL_COMBINED (stmt
) = 1;
4395 gfc_add_expr_to_block (&block
, stmt
);
4396 return gfc_finish_block (&block
);
4400 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4401 gfc_omp_clauses
*clausesa
)
4404 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4405 tree stmt
, omp_clauses
= NULL_TREE
;
4408 gfc_start_block (&block
);
4410 gfc_init_block (&block
);
4412 if (clausesa
== NULL
)
4414 clausesa
= clausesa_buf
;
4415 gfc_split_omp_clauses (code
, clausesa
);
4419 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4423 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
4426 if (TREE_CODE (stmt
) != BIND_EXPR
)
4427 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4431 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4432 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4435 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4437 OMP_PARALLEL_COMBINED (stmt
) = 1;
4439 gfc_add_expr_to_block (&block
, stmt
);
4440 return gfc_finish_block (&block
);
4444 gfc_trans_omp_parallel_sections (gfc_code
*code
)
4447 gfc_omp_clauses section_clauses
;
4448 tree stmt
, omp_clauses
;
4450 memset (§ion_clauses
, 0, sizeof (section_clauses
));
4451 section_clauses
.nowait
= true;
4453 gfc_start_block (&block
);
4454 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4457 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
4458 if (TREE_CODE (stmt
) != BIND_EXPR
)
4459 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4462 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4464 OMP_PARALLEL_COMBINED (stmt
) = 1;
4465 gfc_add_expr_to_block (&block
, stmt
);
4466 return gfc_finish_block (&block
);
4470 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
4473 gfc_omp_clauses workshare_clauses
;
4474 tree stmt
, omp_clauses
;
4476 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
4477 workshare_clauses
.nowait
= true;
4479 gfc_start_block (&block
);
4480 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4483 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4484 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4485 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4487 OMP_PARALLEL_COMBINED (stmt
) = 1;
4488 gfc_add_expr_to_block (&block
, stmt
);
4489 return gfc_finish_block (&block
);
4493 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4495 stmtblock_t block
, body
;
4496 tree omp_clauses
, stmt
;
4497 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4499 gfc_start_block (&block
);
4501 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4503 gfc_init_block (&body
);
4504 for (code
= code
->block
; code
; code
= code
->block
)
4506 /* Last section is special because of lastprivate, so even if it
4507 is empty, chain it in. */
4508 stmt
= gfc_trans_omp_code (code
->next
,
4509 has_lastprivate
&& code
->block
== NULL
);
4510 if (! IS_EMPTY_STMT (stmt
))
4512 stmt
= build1_v (OMP_SECTION
, stmt
);
4513 gfc_add_expr_to_block (&body
, stmt
);
4516 stmt
= gfc_finish_block (&body
);
4518 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4520 gfc_add_expr_to_block (&block
, stmt
);
4522 return gfc_finish_block (&block
);
4526 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4528 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4529 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4530 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4536 gfc_trans_omp_task (gfc_code
*code
)
4539 tree stmt
, omp_clauses
;
4541 gfc_start_block (&block
);
4542 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4545 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4546 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4547 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4549 gfc_add_expr_to_block (&block
, stmt
);
4550 return gfc_finish_block (&block
);
4554 gfc_trans_omp_taskgroup (gfc_code
*code
)
4556 tree stmt
= gfc_trans_code (code
->block
->next
);
4557 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4561 gfc_trans_omp_taskwait (void)
4563 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4564 return build_call_expr_loc (input_location
, decl
, 0);
4568 gfc_trans_omp_taskyield (void)
4570 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4571 return build_call_expr_loc (input_location
, decl
, 0);
4575 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4578 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4579 tree stmt
, omp_clauses
= NULL_TREE
;
4581 gfc_start_block (&block
);
4582 if (clausesa
== NULL
)
4584 clausesa
= clausesa_buf
;
4585 gfc_split_omp_clauses (code
, clausesa
);
4589 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4593 case EXEC_OMP_DISTRIBUTE
:
4594 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4595 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4596 /* This is handled in gfc_trans_omp_do. */
4599 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4600 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4601 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4602 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4603 if (TREE_CODE (stmt
) != BIND_EXPR
)
4604 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4608 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4609 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4610 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4611 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4612 if (TREE_CODE (stmt
) != BIND_EXPR
)
4613 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4617 case EXEC_OMP_DISTRIBUTE_SIMD
:
4618 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4619 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4620 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4621 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4622 if (TREE_CODE (stmt
) != BIND_EXPR
)
4623 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4632 tree distribute
= make_node (OMP_DISTRIBUTE
);
4633 TREE_TYPE (distribute
) = void_type_node
;
4634 OMP_FOR_BODY (distribute
) = stmt
;
4635 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4638 gfc_add_expr_to_block (&block
, stmt
);
4639 return gfc_finish_block (&block
);
4643 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
4647 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4649 bool combined
= true;
4651 gfc_start_block (&block
);
4652 if (clausesa
== NULL
)
4654 clausesa
= clausesa_buf
;
4655 gfc_split_omp_clauses (code
, clausesa
);
4659 = chainon (omp_clauses
,
4660 gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4664 case EXEC_OMP_TARGET_TEAMS
:
4665 case EXEC_OMP_TEAMS
:
4666 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4669 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4670 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4671 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4672 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4676 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4681 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4684 OMP_TEAMS_COMBINED (stmt
) = 1;
4686 gfc_add_expr_to_block (&block
, stmt
);
4687 return gfc_finish_block (&block
);
4691 gfc_trans_omp_target (gfc_code
*code
)
4694 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4695 tree stmt
, omp_clauses
= NULL_TREE
;
4697 gfc_start_block (&block
);
4698 gfc_split_omp_clauses (code
, clausesa
);
4701 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4705 case EXEC_OMP_TARGET
:
4707 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4708 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4710 case EXEC_OMP_TARGET_PARALLEL
:
4714 gfc_start_block (&iblock
);
4716 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4718 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4719 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4721 gfc_add_expr_to_block (&iblock
, stmt
);
4722 stmt
= gfc_finish_block (&iblock
);
4723 if (TREE_CODE (stmt
) != BIND_EXPR
)
4724 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4729 case EXEC_OMP_TARGET_PARALLEL_DO
:
4730 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4731 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4732 if (TREE_CODE (stmt
) != BIND_EXPR
)
4733 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4737 case EXEC_OMP_TARGET_SIMD
:
4738 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4739 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4740 if (TREE_CODE (stmt
) != BIND_EXPR
)
4741 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4747 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4748 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
4750 gfc_omp_clauses clausesb
;
4752 /* For combined !$omp target teams, the num_teams and
4753 thread_limit clauses are evaluated before entering the
4754 target construct. */
4755 memset (&clausesb
, '\0', sizeof (clausesb
));
4756 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
4757 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
4758 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
4759 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
4761 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
4763 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
4768 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
4770 if (TREE_CODE (stmt
) != BIND_EXPR
)
4771 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4778 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4780 if (code
->op
!= EXEC_OMP_TARGET
)
4781 OMP_TARGET_COMBINED (stmt
) = 1;
4783 gfc_add_expr_to_block (&block
, stmt
);
4784 return gfc_finish_block (&block
);
4788 gfc_trans_omp_taskloop (gfc_code
*code
)
4791 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4792 tree stmt
, omp_clauses
= NULL_TREE
;
4794 gfc_start_block (&block
);
4795 gfc_split_omp_clauses (code
, clausesa
);
4798 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
4802 case EXEC_OMP_TASKLOOP
:
4803 /* This is handled in gfc_trans_omp_do. */
4806 case EXEC_OMP_TASKLOOP_SIMD
:
4807 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4808 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4809 if (TREE_CODE (stmt
) != BIND_EXPR
)
4810 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4819 tree taskloop
= make_node (OMP_TASKLOOP
);
4820 TREE_TYPE (taskloop
) = void_type_node
;
4821 OMP_FOR_BODY (taskloop
) = stmt
;
4822 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
4825 gfc_add_expr_to_block (&block
, stmt
);
4826 return gfc_finish_block (&block
);
4830 gfc_trans_omp_target_data (gfc_code
*code
)
4833 tree stmt
, omp_clauses
;
4835 gfc_start_block (&block
);
4836 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4838 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4839 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4841 gfc_add_expr_to_block (&block
, stmt
);
4842 return gfc_finish_block (&block
);
4846 gfc_trans_omp_target_enter_data (gfc_code
*code
)
4849 tree stmt
, omp_clauses
;
4851 gfc_start_block (&block
);
4852 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4854 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
4856 gfc_add_expr_to_block (&block
, stmt
);
4857 return gfc_finish_block (&block
);
4861 gfc_trans_omp_target_exit_data (gfc_code
*code
)
4864 tree stmt
, omp_clauses
;
4866 gfc_start_block (&block
);
4867 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4869 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
4871 gfc_add_expr_to_block (&block
, stmt
);
4872 return gfc_finish_block (&block
);
4876 gfc_trans_omp_target_update (gfc_code
*code
)
4879 tree stmt
, omp_clauses
;
4881 gfc_start_block (&block
);
4882 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4884 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4886 gfc_add_expr_to_block (&block
, stmt
);
4887 return gfc_finish_block (&block
);
4891 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4893 tree res
, tmp
, stmt
;
4894 stmtblock_t block
, *pblock
= NULL
;
4895 stmtblock_t singleblock
;
4896 int saved_ompws_flags
;
4897 bool singleblock_in_progress
= false;
4898 /* True if previous gfc_code in workshare construct is not workshared. */
4899 bool prev_singleunit
;
4901 code
= code
->block
->next
;
4905 gfc_start_block (&block
);
4908 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4909 prev_singleunit
= false;
4911 /* Translate statements one by one to trees until we reach
4912 the end of the workshare construct. Adjacent gfc_codes that
4913 are a single unit of work are clustered and encapsulated in a
4914 single OMP_SINGLE construct. */
4915 for (; code
; code
= code
->next
)
4917 if (code
->here
!= 0)
4919 res
= gfc_trans_label_here (code
);
4920 gfc_add_expr_to_block (pblock
, res
);
4923 /* No dependence analysis, use for clauses with wait.
4924 If this is the last gfc_code, use default omp_clauses. */
4925 if (code
->next
== NULL
&& clauses
->nowait
)
4926 ompws_flags
|= OMPWS_NOWAIT
;
4928 /* By default, every gfc_code is a single unit of work. */
4929 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4930 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4939 res
= gfc_trans_assign (code
);
4942 case EXEC_POINTER_ASSIGN
:
4943 res
= gfc_trans_pointer_assign (code
);
4946 case EXEC_INIT_ASSIGN
:
4947 res
= gfc_trans_init_assign (code
);
4951 res
= gfc_trans_forall (code
);
4955 res
= gfc_trans_where (code
);
4958 case EXEC_OMP_ATOMIC
:
4959 res
= gfc_trans_omp_directive (code
);
4962 case EXEC_OMP_PARALLEL
:
4963 case EXEC_OMP_PARALLEL_DO
:
4964 case EXEC_OMP_PARALLEL_SECTIONS
:
4965 case EXEC_OMP_PARALLEL_WORKSHARE
:
4966 case EXEC_OMP_CRITICAL
:
4967 saved_ompws_flags
= ompws_flags
;
4969 res
= gfc_trans_omp_directive (code
);
4970 ompws_flags
= saved_ompws_flags
;
4974 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4977 gfc_set_backend_locus (&code
->loc
);
4979 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4981 if (prev_singleunit
)
4983 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4984 /* Add current gfc_code to single block. */
4985 gfc_add_expr_to_block (&singleblock
, res
);
4988 /* Finish single block and add it to pblock. */
4989 tmp
= gfc_finish_block (&singleblock
);
4990 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4991 void_type_node
, tmp
, NULL_TREE
);
4992 gfc_add_expr_to_block (pblock
, tmp
);
4993 /* Add current gfc_code to pblock. */
4994 gfc_add_expr_to_block (pblock
, res
);
4995 singleblock_in_progress
= false;
5000 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5002 /* Start single block. */
5003 gfc_init_block (&singleblock
);
5004 gfc_add_expr_to_block (&singleblock
, res
);
5005 singleblock_in_progress
= true;
5008 /* Add the new statement to the block. */
5009 gfc_add_expr_to_block (pblock
, res
);
5011 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
5015 /* Finish remaining SINGLE block, if we were in the middle of one. */
5016 if (singleblock_in_progress
)
5018 /* Finish single block and add it to pblock. */
5019 tmp
= gfc_finish_block (&singleblock
);
5020 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
5022 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
5024 gfc_add_expr_to_block (pblock
, tmp
);
5027 stmt
= gfc_finish_block (pblock
);
5028 if (TREE_CODE (stmt
) != BIND_EXPR
)
5030 if (!IS_EMPTY_STMT (stmt
))
5032 tree bindblock
= poplevel (1, 0);
5033 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
5041 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
5042 stmt
= gfc_trans_omp_barrier ();
5049 gfc_trans_oacc_declare (gfc_code
*code
)
5052 tree stmt
, oacc_clauses
;
5053 enum tree_code construct_code
;
5055 construct_code
= OACC_DATA
;
5057 gfc_start_block (&block
);
5059 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
5061 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5062 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
5064 gfc_add_expr_to_block (&block
, stmt
);
5066 return gfc_finish_block (&block
);
5070 gfc_trans_oacc_directive (gfc_code
*code
)
5074 case EXEC_OACC_PARALLEL_LOOP
:
5075 case EXEC_OACC_KERNELS_LOOP
:
5076 return gfc_trans_oacc_combined_directive (code
);
5077 case EXEC_OACC_PARALLEL
:
5078 case EXEC_OACC_KERNELS
:
5079 case EXEC_OACC_DATA
:
5080 case EXEC_OACC_HOST_DATA
:
5081 return gfc_trans_oacc_construct (code
);
5082 case EXEC_OACC_LOOP
:
5083 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5085 case EXEC_OACC_UPDATE
:
5086 case EXEC_OACC_CACHE
:
5087 case EXEC_OACC_ENTER_DATA
:
5088 case EXEC_OACC_EXIT_DATA
:
5089 return gfc_trans_oacc_executable_directive (code
);
5090 case EXEC_OACC_WAIT
:
5091 return gfc_trans_oacc_wait_directive (code
);
5092 case EXEC_OACC_ATOMIC
:
5093 return gfc_trans_omp_atomic (code
);
5094 case EXEC_OACC_DECLARE
:
5095 return gfc_trans_oacc_declare (code
);
5102 gfc_trans_omp_directive (gfc_code
*code
)
5106 case EXEC_OMP_ATOMIC
:
5107 return gfc_trans_omp_atomic (code
);
5108 case EXEC_OMP_BARRIER
:
5109 return gfc_trans_omp_barrier ();
5110 case EXEC_OMP_CANCEL
:
5111 return gfc_trans_omp_cancel (code
);
5112 case EXEC_OMP_CANCELLATION_POINT
:
5113 return gfc_trans_omp_cancellation_point (code
);
5114 case EXEC_OMP_CRITICAL
:
5115 return gfc_trans_omp_critical (code
);
5116 case EXEC_OMP_DISTRIBUTE
:
5119 case EXEC_OMP_TASKLOOP
:
5120 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5122 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5123 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5124 case EXEC_OMP_DISTRIBUTE_SIMD
:
5125 return gfc_trans_omp_distribute (code
, NULL
);
5126 case EXEC_OMP_DO_SIMD
:
5127 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
5128 case EXEC_OMP_FLUSH
:
5129 return gfc_trans_omp_flush ();
5130 case EXEC_OMP_MASTER
:
5131 return gfc_trans_omp_master (code
);
5132 case EXEC_OMP_ORDERED
:
5133 return gfc_trans_omp_ordered (code
);
5134 case EXEC_OMP_PARALLEL
:
5135 return gfc_trans_omp_parallel (code
);
5136 case EXEC_OMP_PARALLEL_DO
:
5137 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
5138 case EXEC_OMP_PARALLEL_DO_SIMD
:
5139 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
5140 case EXEC_OMP_PARALLEL_SECTIONS
:
5141 return gfc_trans_omp_parallel_sections (code
);
5142 case EXEC_OMP_PARALLEL_WORKSHARE
:
5143 return gfc_trans_omp_parallel_workshare (code
);
5144 case EXEC_OMP_SECTIONS
:
5145 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
5146 case EXEC_OMP_SINGLE
:
5147 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
5148 case EXEC_OMP_TARGET
:
5149 case EXEC_OMP_TARGET_PARALLEL
:
5150 case EXEC_OMP_TARGET_PARALLEL_DO
:
5151 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5152 case EXEC_OMP_TARGET_SIMD
:
5153 case EXEC_OMP_TARGET_TEAMS
:
5154 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5155 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5156 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5157 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5158 return gfc_trans_omp_target (code
);
5159 case EXEC_OMP_TARGET_DATA
:
5160 return gfc_trans_omp_target_data (code
);
5161 case EXEC_OMP_TARGET_ENTER_DATA
:
5162 return gfc_trans_omp_target_enter_data (code
);
5163 case EXEC_OMP_TARGET_EXIT_DATA
:
5164 return gfc_trans_omp_target_exit_data (code
);
5165 case EXEC_OMP_TARGET_UPDATE
:
5166 return gfc_trans_omp_target_update (code
);
5168 return gfc_trans_omp_task (code
);
5169 case EXEC_OMP_TASKGROUP
:
5170 return gfc_trans_omp_taskgroup (code
);
5171 case EXEC_OMP_TASKLOOP_SIMD
:
5172 return gfc_trans_omp_taskloop (code
);
5173 case EXEC_OMP_TASKWAIT
:
5174 return gfc_trans_omp_taskwait ();
5175 case EXEC_OMP_TASKYIELD
:
5176 return gfc_trans_omp_taskyield ();
5177 case EXEC_OMP_TEAMS
:
5178 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5179 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5180 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5181 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5182 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
5183 case EXEC_OMP_WORKSHARE
:
5184 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
5191 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
5196 gfc_omp_declare_simd
*ods
;
5197 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
5199 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
5200 tree fndecl
= ns
->proc_name
->backend_decl
;
5202 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
5203 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
5204 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
5205 DECL_ATTRIBUTES (fndecl
) = c
;