1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "gomp-constants.h"
39 #include "omp-general.h"
42 #define GCC_DIAG_STYLE __gcc_tdiag__
43 #include "diagnostic-core.h"
45 #define GCC_DIAG_STYLE __gcc_gfc__
49 /* True if OpenMP should privatize what this DECL points to rather
50 than the DECL itself. */
53 gfc_omp_privatize_by_reference (const_tree decl
)
55 tree type
= TREE_TYPE (decl
);
57 if (TREE_CODE (type
) == REFERENCE_TYPE
58 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
61 if (TREE_CODE (type
) == POINTER_TYPE
)
63 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
64 that have POINTER_TYPE type and aren't scalar pointers, scalar
65 allocatables, Cray pointees or C pointers are supposed to be
66 privatized by reference. */
67 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
68 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
69 || GFC_DECL_CRAY_POINTEE (decl
)
70 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
71 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
74 if (!DECL_ARTIFICIAL (decl
)
75 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
78 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
80 if (DECL_LANG_SPECIFIC (decl
)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
88 /* True if OpenMP sharing attribute of DECL is predetermined. */
90 enum omp_clause_default_kind
91 gfc_omp_predetermined_sharing (tree decl
)
93 /* Associate names preserve the association established during ASSOCIATE.
94 As they are implemented either as pointers to the selector or array
95 descriptor and shouldn't really change in the ASSOCIATE region,
96 this decl can be either shared or firstprivate. If it is a pointer,
97 use firstprivate, as it is cheaper that way, otherwise make it shared. */
98 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
100 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
101 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
103 return OMP_CLAUSE_DEFAULT_SHARED
;
106 if (DECL_ARTIFICIAL (decl
)
107 && ! GFC_DECL_RESULT (decl
)
108 && ! (DECL_LANG_SPECIFIC (decl
)
109 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
110 return OMP_CLAUSE_DEFAULT_SHARED
;
112 /* Cray pointees shouldn't be listed in any clauses and should be
113 gimplified to dereference of the corresponding Cray pointer.
114 Make them all private, so that they are emitted in the debug
116 if (GFC_DECL_CRAY_POINTEE (decl
))
117 return OMP_CLAUSE_DEFAULT_PRIVATE
;
119 /* Assumed-size arrays are predetermined shared. */
120 if (TREE_CODE (decl
) == PARM_DECL
121 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
122 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
123 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
124 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
126 return OMP_CLAUSE_DEFAULT_SHARED
;
128 /* Dummy procedures aren't considered variables by OpenMP, thus are
129 disallowed in OpenMP clauses. They are represented as PARM_DECLs
130 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
131 to avoid complaining about their uses with default(none). */
132 if (TREE_CODE (decl
) == PARM_DECL
133 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
134 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
135 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
137 /* COMMON and EQUIVALENCE decls are shared. They
138 are only referenced through DECL_VALUE_EXPR of the variables
139 contained in them. If those are privatized, they will not be
140 gimplified to the COMMON or EQUIVALENCE decls. */
141 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
142 return OMP_CLAUSE_DEFAULT_SHARED
;
144 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
145 return OMP_CLAUSE_DEFAULT_SHARED
;
147 /* These are either array or derived parameters, or vtables.
148 In the former cases, the OpenMP standard doesn't consider them to be
149 variables at all (they can't be redefined), but they can nevertheless appear
150 in parallel/task regions and for default(none) purposes treat them as shared.
151 For vtables likely the same handling is desirable. */
152 if (VAR_P (decl
) && TREE_READONLY (decl
) && TREE_STATIC (decl
))
153 return OMP_CLAUSE_DEFAULT_SHARED
;
155 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
158 /* Return decl that should be used when reporting DEFAULT(NONE)
162 gfc_omp_report_decl (tree decl
)
164 if (DECL_ARTIFICIAL (decl
)
165 && DECL_LANG_SPECIFIC (decl
)
166 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
167 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
172 /* Return true if TYPE has any allocatable components. */
175 gfc_has_alloc_comps (tree type
, tree decl
)
179 if (POINTER_TYPE_P (type
))
181 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
182 type
= TREE_TYPE (type
);
183 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
187 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
188 type
= gfc_get_element_type (type
);
190 if (TREE_CODE (type
) != RECORD_TYPE
)
193 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
195 ftype
= TREE_TYPE (field
);
196 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
198 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
199 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
201 if (gfc_has_alloc_comps (ftype
, field
))
207 /* Return true if DECL in private clause needs
208 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
210 gfc_omp_private_outer_ref (tree decl
)
212 tree type
= TREE_TYPE (decl
);
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (GFC_DESCRIPTOR_TYPE_P (type
)
218 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
221 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
224 if (gfc_has_alloc_comps (type
, decl
))
230 /* Callback for gfc_omp_unshare_expr. */
233 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
236 enum tree_code code
= TREE_CODE (t
);
238 /* Stop at types, decls, constants like copy_tree_r. */
239 if (TREE_CODE_CLASS (code
) == tcc_type
240 || TREE_CODE_CLASS (code
) == tcc_declaration
241 || TREE_CODE_CLASS (code
) == tcc_constant
244 else if (handled_component_p (t
)
245 || TREE_CODE (t
) == MEM_REF
)
247 *tp
= unshare_expr (t
);
254 /* Unshare in expr anything that the FE which normally doesn't
255 care much about tree sharing (because during gimplification
256 everything is unshared) could cause problems with tree sharing
257 at omp-low.c time. */
260 gfc_omp_unshare_expr (tree expr
)
262 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
266 enum walk_alloc_comps
268 WALK_ALLOC_COMPS_DTOR
,
269 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
270 WALK_ALLOC_COMPS_COPY_CTOR
273 /* Handle allocatable components in OpenMP clauses. */
276 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
277 enum walk_alloc_comps kind
)
279 stmtblock_t block
, tmpblock
;
280 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
281 gfc_init_block (&block
);
283 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
285 if (GFC_DESCRIPTOR_TYPE_P (type
))
287 gfc_init_block (&tmpblock
);
288 tem
= gfc_full_array_size (&tmpblock
, decl
,
289 GFC_TYPE_ARRAY_RANK (type
));
290 then_b
= gfc_finish_block (&tmpblock
);
291 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
292 tem
= gfc_omp_unshare_expr (tem
);
293 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
294 gfc_array_index_type
, tem
,
299 if (!TYPE_DOMAIN (type
)
300 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
301 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
302 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
304 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
305 TYPE_SIZE_UNIT (type
),
306 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
307 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
310 tem
= array_type_nelts (type
);
311 tem
= fold_convert (gfc_array_index_type
, tem
);
314 tree nelems
= gfc_evaluate_now (tem
, &block
);
315 tree index
= gfc_create_var (gfc_array_index_type
, "S");
317 gfc_init_block (&tmpblock
);
318 tem
= gfc_conv_array_data (decl
);
319 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
320 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
321 tree destvar
, destvref
= NULL_TREE
;
324 tem
= gfc_conv_array_data (dest
);
325 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
326 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
328 gfc_add_expr_to_block (&tmpblock
,
329 gfc_walk_alloc_comps (declvref
, destvref
,
333 gfc_init_loopinfo (&loop
);
335 loop
.from
[0] = gfc_index_zero_node
;
336 loop
.loopvar
[0] = index
;
338 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
339 gfc_add_block_to_block (&block
, &loop
.pre
);
340 return gfc_finish_block (&block
);
342 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
344 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
346 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
347 type
= TREE_TYPE (decl
);
350 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
351 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
353 tree ftype
= TREE_TYPE (field
);
354 tree declf
, destf
= NULL_TREE
;
355 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
356 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
357 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
358 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
361 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
362 decl
, field
, NULL_TREE
);
364 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
365 dest
, field
, NULL_TREE
);
370 case WALK_ALLOC_COMPS_DTOR
:
372 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
373 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
374 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
376 gfc_add_modify (&block
, unshare_expr (destf
),
377 unshare_expr (declf
));
378 tem
= gfc_duplicate_allocatable_nocopy
379 (destf
, declf
, ftype
,
380 GFC_TYPE_ARRAY_RANK (ftype
));
382 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
383 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
385 case WALK_ALLOC_COMPS_COPY_CTOR
:
386 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
387 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
388 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
389 GFC_TYPE_ARRAY_RANK (ftype
),
391 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
392 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
397 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
400 gfc_init_block (&tmpblock
);
401 gfc_add_expr_to_block (&tmpblock
,
402 gfc_walk_alloc_comps (declf
, destf
,
404 then_b
= gfc_finish_block (&tmpblock
);
405 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
406 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
407 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
408 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
409 tem
= unshare_expr (declf
);
414 tem
= fold_convert (pvoid_type_node
, tem
);
415 tem
= fold_build2_loc (input_location
, NE_EXPR
,
416 boolean_type_node
, tem
,
418 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
420 build_empty_stmt (input_location
));
422 gfc_add_expr_to_block (&block
, then_b
);
424 if (kind
== WALK_ALLOC_COMPS_DTOR
)
426 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
427 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
429 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
430 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
431 NULL_TREE
, NULL_TREE
, true,
433 GFC_CAF_COARRAY_NOCOARRAY
);
434 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
438 tem
= gfc_call_free (unshare_expr (declf
));
439 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
444 return gfc_finish_block (&block
);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
451 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
453 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
454 stmtblock_t block
, cond_block
;
456 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
462 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
465 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
468 gfc_start_block (&block
);
469 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
470 OMP_CLAUSE_DECL (clause
),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
472 gfc_add_expr_to_block (&block
, tem
);
473 return gfc_finish_block (&block
);
478 gcc_assert (outer
!= NULL_TREE
);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block
);
485 gfc_init_block (&cond_block
);
487 if (GFC_DESCRIPTOR_TYPE_P (type
))
489 gfc_add_modify (&cond_block
, decl
, outer
);
490 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
491 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
492 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
494 gfc_conv_descriptor_lbound_get (decl
, rank
));
495 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
496 size
, gfc_index_one_node
);
497 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
498 size
= fold_build2_loc (input_location
, MULT_EXPR
,
499 gfc_array_index_type
, size
,
500 gfc_conv_descriptor_stride_get (decl
, rank
));
501 tree esize
= fold_convert (gfc_array_index_type
,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
503 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
505 size
= unshare_expr (size
);
506 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
510 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
511 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
512 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
513 if (GFC_DESCRIPTOR_TYPE_P (type
))
514 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
516 gfc_add_modify (&cond_block
, unshare_expr (decl
),
517 fold_convert (TREE_TYPE (decl
), ptr
));
518 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
520 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
521 OMP_CLAUSE_DECL (clause
),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
523 gfc_add_expr_to_block (&cond_block
, tem
);
525 then_b
= gfc_finish_block (&cond_block
);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
530 gfc_init_block (&cond_block
);
531 if (GFC_DESCRIPTOR_TYPE_P (type
))
532 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
535 gfc_add_modify (&cond_block
, unshare_expr (decl
),
536 build_zero_cst (TREE_TYPE (decl
)));
537 else_b
= gfc_finish_block (&cond_block
);
539 tree tem
= fold_convert (pvoid_type_node
,
540 GFC_DESCRIPTOR_TYPE_P (type
)
541 ? gfc_conv_descriptor_data_get (outer
) : outer
);
542 tem
= unshare_expr (tem
);
543 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
544 tem
, null_pointer_node
);
545 gfc_add_expr_to_block (&block
,
546 build3_loc (input_location
, COND_EXPR
,
547 void_type_node
, cond
, then_b
,
551 gfc_add_expr_to_block (&block
, then_b
);
553 return gfc_finish_block (&block
);
556 /* Build and return code for a copy constructor from SRC to DEST. */
559 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
561 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
562 tree cond
, then_b
, else_b
;
563 stmtblock_t block
, cond_block
;
565 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
569 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
572 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
574 gfc_start_block (&block
);
575 gfc_add_modify (&block
, dest
, src
);
576 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
577 WALK_ALLOC_COMPS_COPY_CTOR
);
578 gfc_add_expr_to_block (&block
, tem
);
579 return gfc_finish_block (&block
);
582 return build2_v (MODIFY_EXPR
, dest
, src
);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block
);
589 gfc_init_block (&cond_block
);
591 gfc_add_modify (&cond_block
, dest
, src
);
592 if (GFC_DESCRIPTOR_TYPE_P (type
))
594 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
595 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
596 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
598 gfc_conv_descriptor_lbound_get (dest
, rank
));
599 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
600 size
, gfc_index_one_node
);
601 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
602 size
= fold_build2_loc (input_location
, MULT_EXPR
,
603 gfc_array_index_type
, size
,
604 gfc_conv_descriptor_stride_get (dest
, rank
));
605 tree esize
= fold_convert (gfc_array_index_type
,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
607 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
609 size
= unshare_expr (size
);
610 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
614 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
615 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
616 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
617 if (GFC_DESCRIPTOR_TYPE_P (type
))
618 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
620 gfc_add_modify (&cond_block
, unshare_expr (dest
),
621 fold_convert (TREE_TYPE (dest
), ptr
));
623 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
624 ? gfc_conv_descriptor_data_get (src
) : src
;
625 srcptr
= unshare_expr (srcptr
);
626 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
627 call
= build_call_expr_loc (input_location
,
628 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
630 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
631 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
633 tree tem
= gfc_walk_alloc_comps (src
, dest
,
634 OMP_CLAUSE_DECL (clause
),
635 WALK_ALLOC_COMPS_COPY_CTOR
);
636 gfc_add_expr_to_block (&cond_block
, tem
);
638 then_b
= gfc_finish_block (&cond_block
);
640 gfc_init_block (&cond_block
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
645 gfc_add_modify (&cond_block
, unshare_expr (dest
),
646 build_zero_cst (TREE_TYPE (dest
)));
647 else_b
= gfc_finish_block (&cond_block
);
649 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
650 unshare_expr (srcptr
), null_pointer_node
);
651 gfc_add_expr_to_block (&block
,
652 build3_loc (input_location
, COND_EXPR
,
653 void_type_node
, cond
, then_b
, else_b
));
655 return gfc_finish_block (&block
);
658 /* Similarly, except use an intrinsic or pointer assignment operator
662 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
664 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
665 tree cond
, then_b
, else_b
;
666 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
669 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
672 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
674 gfc_start_block (&block
);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
677 OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_DTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block
, dest
, src
);
682 /* Finally allocate any allocatable components and copy. */
683 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
684 WALK_ALLOC_COMPS_COPY_CTOR
);
685 gfc_add_expr_to_block (&block
, tem
);
686 return gfc_finish_block (&block
);
689 return build2_v (MODIFY_EXPR
, dest
, src
);
692 gfc_start_block (&block
);
694 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
696 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
697 WALK_ALLOC_COMPS_DTOR
);
698 tree tem
= fold_convert (pvoid_type_node
,
699 GFC_DESCRIPTOR_TYPE_P (type
)
700 ? gfc_conv_descriptor_data_get (dest
) : dest
);
701 tem
= unshare_expr (tem
);
702 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
703 tem
, null_pointer_node
);
704 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
705 then_b
, build_empty_stmt (input_location
));
706 gfc_add_expr_to_block (&block
, tem
);
709 gfc_init_block (&cond_block
);
711 if (GFC_DESCRIPTOR_TYPE_P (type
))
713 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
714 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
715 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
717 gfc_conv_descriptor_lbound_get (src
, rank
));
718 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
719 size
, gfc_index_one_node
);
720 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
721 size
= fold_build2_loc (input_location
, MULT_EXPR
,
722 gfc_array_index_type
, size
,
723 gfc_conv_descriptor_stride_get (src
, rank
));
724 tree esize
= fold_convert (gfc_array_index_type
,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
726 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
728 size
= unshare_expr (size
);
729 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
733 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
734 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
736 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
737 ? gfc_conv_descriptor_data_get (dest
) : dest
;
738 destptr
= unshare_expr (destptr
);
739 destptr
= fold_convert (pvoid_type_node
, destptr
);
740 gfc_add_modify (&cond_block
, ptr
, destptr
);
742 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
743 destptr
, null_pointer_node
);
745 if (GFC_DESCRIPTOR_TYPE_P (type
))
748 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
750 tree rank
= gfc_rank_cst
[i
];
751 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
752 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
753 gfc_array_index_type
, tem
,
754 gfc_conv_descriptor_lbound_get (src
, rank
));
755 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
756 gfc_array_index_type
, tem
,
757 gfc_conv_descriptor_lbound_get (dest
, rank
));
758 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
759 tem
, gfc_conv_descriptor_ubound_get (dest
,
761 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
762 boolean_type_node
, cond
, tem
);
766 gfc_init_block (&cond_block2
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
770 gfc_init_block (&inner_block
);
771 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
772 then_b
= gfc_finish_block (&inner_block
);
774 gfc_init_block (&inner_block
);
775 gfc_add_modify (&inner_block
, ptr
,
776 gfc_call_realloc (&inner_block
, ptr
, size
));
777 else_b
= gfc_finish_block (&inner_block
);
779 gfc_add_expr_to_block (&cond_block2
,
780 build3_loc (input_location
, COND_EXPR
,
782 unshare_expr (nonalloc
),
784 gfc_add_modify (&cond_block2
, dest
, src
);
785 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
789 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
790 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
791 fold_convert (type
, ptr
));
793 then_b
= gfc_finish_block (&cond_block2
);
794 else_b
= build_empty_stmt (input_location
);
796 gfc_add_expr_to_block (&cond_block
,
797 build3_loc (input_location
, COND_EXPR
,
798 void_type_node
, unshare_expr (cond
),
801 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
802 ? gfc_conv_descriptor_data_get (src
) : src
;
803 srcptr
= unshare_expr (srcptr
);
804 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
805 call
= build_call_expr_loc (input_location
,
806 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
808 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
809 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
811 tree tem
= gfc_walk_alloc_comps (src
, dest
,
812 OMP_CLAUSE_DECL (clause
),
813 WALK_ALLOC_COMPS_COPY_CTOR
);
814 gfc_add_expr_to_block (&cond_block
, tem
);
816 then_b
= gfc_finish_block (&cond_block
);
818 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
820 gfc_init_block (&cond_block
);
821 if (GFC_DESCRIPTOR_TYPE_P (type
))
823 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
824 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
825 NULL_TREE
, NULL_TREE
, true, NULL
,
826 GFC_CAF_COARRAY_NOCOARRAY
);
827 gfc_add_expr_to_block (&cond_block
, tmp
);
831 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
832 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
833 gfc_add_modify (&cond_block
, unshare_expr (dest
),
834 build_zero_cst (TREE_TYPE (dest
)));
836 else_b
= gfc_finish_block (&cond_block
);
838 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
839 unshare_expr (srcptr
), null_pointer_node
);
840 gfc_add_expr_to_block (&block
,
841 build3_loc (input_location
, COND_EXPR
,
842 void_type_node
, cond
,
846 gfc_add_expr_to_block (&block
, then_b
);
848 return gfc_finish_block (&block
);
852 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
853 tree add
, tree nelems
)
855 stmtblock_t tmpblock
;
856 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
857 nelems
= gfc_evaluate_now (nelems
, block
);
859 gfc_init_block (&tmpblock
);
860 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
862 desta
= gfc_build_array_ref (dest
, index
, NULL
);
863 srca
= gfc_build_array_ref (src
, index
, NULL
);
867 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
868 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
869 fold_convert (sizetype
, index
),
870 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
871 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
872 TREE_TYPE (dest
), dest
,
874 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
875 TREE_TYPE (src
), src
,
878 gfc_add_modify (&tmpblock
, desta
,
879 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
883 gfc_init_loopinfo (&loop
);
885 loop
.from
[0] = gfc_index_zero_node
;
886 loop
.loopvar
[0] = index
;
888 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
889 gfc_add_block_to_block (block
, &loop
.pre
);
892 /* Build and return code for a constructor of DEST that initializes
893 it to SRC plus ADD (ADD is scalar integer). */
896 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
898 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
901 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
903 gfc_start_block (&block
);
904 add
= gfc_evaluate_now (add
, &block
);
906 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
907 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
908 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
910 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
911 if (!TYPE_DOMAIN (type
)
912 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
913 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
914 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
916 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
917 TYPE_SIZE_UNIT (type
),
918 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
919 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
922 nelems
= array_type_nelts (type
);
923 nelems
= fold_convert (gfc_array_index_type
, nelems
);
925 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
926 return gfc_finish_block (&block
);
929 /* Allocatable arrays in LINEAR clauses need to be allocated
930 and copied from SRC. */
931 gfc_add_modify (&block
, dest
, src
);
932 if (GFC_DESCRIPTOR_TYPE_P (type
))
934 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
935 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
936 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
938 gfc_conv_descriptor_lbound_get (dest
, rank
));
939 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
940 size
, gfc_index_one_node
);
941 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
942 size
= fold_build2_loc (input_location
, MULT_EXPR
,
943 gfc_array_index_type
, size
,
944 gfc_conv_descriptor_stride_get (dest
, rank
));
945 tree esize
= fold_convert (gfc_array_index_type
,
946 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
947 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
948 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
949 nelems
, unshare_expr (esize
));
950 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
952 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
953 gfc_array_index_type
, nelems
,
957 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
958 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
959 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
960 if (GFC_DESCRIPTOR_TYPE_P (type
))
962 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
963 tree etype
= gfc_get_element_type (type
);
964 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
965 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
966 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
967 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
971 gfc_add_modify (&block
, unshare_expr (dest
),
972 fold_convert (TREE_TYPE (dest
), ptr
));
973 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
974 tree dstm
= build_fold_indirect_ref (ptr
);
975 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
976 gfc_add_modify (&block
, dstm
,
977 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
979 return gfc_finish_block (&block
);
982 /* Build and return code destructing DECL. Return NULL if nothing
986 gfc_omp_clause_dtor (tree clause
, tree decl
)
988 tree type
= TREE_TYPE (decl
), tem
;
990 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
991 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
992 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
994 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
996 OMP_CLAUSE_DECL (clause
),
997 WALK_ALLOC_COMPS_DTOR
);
1001 if (GFC_DESCRIPTOR_TYPE_P (type
))
1003 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1004 to be deallocated if they were allocated. */
1005 tem
= gfc_conv_descriptor_data_get (decl
);
1006 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1007 NULL_TREE
, true, NULL
,
1008 GFC_CAF_COARRAY_NOCOARRAY
);
1011 tem
= gfc_call_free (decl
);
1012 tem
= gfc_omp_unshare_expr (tem
);
1014 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1019 gfc_init_block (&block
);
1020 gfc_add_expr_to_block (&block
,
1021 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1022 OMP_CLAUSE_DECL (clause
),
1023 WALK_ALLOC_COMPS_DTOR
));
1024 gfc_add_expr_to_block (&block
, tem
);
1025 then_b
= gfc_finish_block (&block
);
1027 tem
= fold_convert (pvoid_type_node
,
1028 GFC_DESCRIPTOR_TYPE_P (type
)
1029 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1030 tem
= unshare_expr (tem
);
1031 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1032 tem
, null_pointer_node
);
1033 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1034 then_b
, build_empty_stmt (input_location
));
1041 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1043 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1046 tree decl
= OMP_CLAUSE_DECL (c
);
1048 /* Assumed-size arrays can't be mapped implicitly, they have to be
1049 mapped explicitly using array sections. */
1050 if (TREE_CODE (decl
) == PARM_DECL
1051 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1052 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1053 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1054 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1057 error_at (OMP_CLAUSE_LOCATION (c
),
1058 "implicit mapping of assumed size array %qD", decl
);
1062 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1063 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1065 if (!gfc_omp_privatize_by_reference (decl
)
1066 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1067 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1068 && !GFC_DECL_CRAY_POINTEE (decl
)
1069 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1071 tree orig_decl
= decl
;
1072 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1073 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1074 OMP_CLAUSE_DECL (c4
) = decl
;
1075 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1076 decl
= build_fold_indirect_ref (decl
);
1077 OMP_CLAUSE_DECL (c
) = decl
;
1078 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1079 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1080 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1081 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1083 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1084 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1085 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1086 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1087 decl
= build_fold_indirect_ref (decl
);
1088 OMP_CLAUSE_DECL (c
) = decl
;
1091 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1094 gfc_start_block (&block
);
1095 tree type
= TREE_TYPE (decl
);
1096 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1097 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1098 ptr
= build_fold_indirect_ref (ptr
);
1099 OMP_CLAUSE_DECL (c
) = ptr
;
1100 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1101 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1102 OMP_CLAUSE_DECL (c2
) = decl
;
1103 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1104 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1105 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1106 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1107 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1108 tree size
= create_tmp_var (gfc_array_index_type
);
1109 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1110 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1111 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1112 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1114 stmtblock_t cond_block
;
1115 tree tem
, then_b
, else_b
, zero
, cond
;
1117 gfc_init_block (&cond_block
);
1118 tem
= gfc_full_array_size (&cond_block
, decl
,
1119 GFC_TYPE_ARRAY_RANK (type
));
1120 gfc_add_modify (&cond_block
, size
, tem
);
1121 gfc_add_modify (&cond_block
, size
,
1122 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1124 then_b
= gfc_finish_block (&cond_block
);
1125 gfc_init_block (&cond_block
);
1126 zero
= build_int_cst (gfc_array_index_type
, 0);
1127 gfc_add_modify (&cond_block
, size
, zero
);
1128 else_b
= gfc_finish_block (&cond_block
);
1129 tem
= gfc_conv_descriptor_data_get (decl
);
1130 tem
= fold_convert (pvoid_type_node
, tem
);
1131 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1132 boolean_type_node
, tem
, null_pointer_node
);
1133 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1134 void_type_node
, cond
,
1139 gfc_add_modify (&block
, size
,
1140 gfc_full_array_size (&block
, decl
,
1141 GFC_TYPE_ARRAY_RANK (type
)));
1142 gfc_add_modify (&block
, size
,
1143 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1146 OMP_CLAUSE_SIZE (c
) = size
;
1147 tree stmt
= gfc_finish_block (&block
);
1148 gimplify_and_add (stmt
, pre_p
);
1151 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1153 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1154 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1157 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1158 OMP_CLAUSE_CHAIN (last
) = c2
;
1163 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1164 OMP_CLAUSE_CHAIN (last
) = c3
;
1169 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1170 OMP_CLAUSE_CHAIN (last
) = c4
;
1176 /* Return true if DECL is a scalar variable (for the purpose of
1177 implicit firstprivatization). */
1180 gfc_omp_scalar_p (tree decl
)
1182 tree type
= TREE_TYPE (decl
);
1183 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1184 type
= TREE_TYPE (type
);
1185 if (TREE_CODE (type
) == POINTER_TYPE
)
1187 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1188 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1189 type
= TREE_TYPE (type
);
1190 if (GFC_ARRAY_TYPE_P (type
)
1191 || GFC_CLASS_TYPE_P (type
))
1194 if (TYPE_STRING_FLAG (type
))
1196 if (INTEGRAL_TYPE_P (type
)
1197 || SCALAR_FLOAT_TYPE_P (type
)
1198 || COMPLEX_FLOAT_TYPE_P (type
))
1204 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1205 disregarded in OpenMP construct, because it is going to be
1206 remapped during OpenMP lowering. SHARED is true if DECL
1207 is going to be shared, false if it is going to be privatized. */
1210 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1212 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1213 && DECL_HAS_VALUE_EXPR_P (decl
))
1215 tree value
= DECL_VALUE_EXPR (decl
);
1217 if (TREE_CODE (value
) == COMPONENT_REF
1218 && VAR_P (TREE_OPERAND (value
, 0))
1219 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1221 /* If variable in COMMON or EQUIVALENCE is privatized, return
1222 true, as just that variable is supposed to be privatized,
1223 not the whole COMMON or whole EQUIVALENCE.
1224 For shared variables in COMMON or EQUIVALENCE, let them be
1225 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1226 from the same COMMON or EQUIVALENCE just one sharing of the
1227 whole COMMON or EQUIVALENCE is enough. */
1232 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1238 /* Return true if DECL that is shared iff SHARED is true should
1239 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1243 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1245 if (GFC_DECL_CRAY_POINTEE (decl
))
1248 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1249 && DECL_HAS_VALUE_EXPR_P (decl
))
1251 tree value
= DECL_VALUE_EXPR (decl
);
1253 if (TREE_CODE (value
) == COMPONENT_REF
1254 && VAR_P (TREE_OPERAND (value
, 0))
1255 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1262 /* Register language specific type size variables as potentially OpenMP
1263 firstprivate variables. */
1266 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1268 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1272 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1273 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1275 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1276 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1277 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1279 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1280 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1286 gfc_trans_add_clause (tree node
, tree tail
)
1288 OMP_CLAUSE_CHAIN (node
) = tail
;
1293 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1298 gfc_symbol
*proc_sym
;
1299 gfc_formal_arglist
*f
;
1301 gcc_assert (sym
->attr
.dummy
);
1302 proc_sym
= sym
->ns
->proc_name
;
1303 if (proc_sym
->attr
.entry_master
)
1305 if (gfc_return_by_reference (proc_sym
))
1308 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1311 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1317 return build_int_cst (integer_type_node
, cnt
);
1320 tree t
= gfc_get_symbol_decl (sym
);
1324 bool alternate_entry
;
1327 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1328 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1329 && sym
->result
== sym
;
1330 entry_master
= sym
->attr
.result
1331 && sym
->ns
->proc_name
->attr
.entry_master
1332 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1333 parent_decl
= current_function_decl
1334 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1336 if ((t
== parent_decl
&& return_value
)
1337 || (sym
->ns
&& sym
->ns
->proc_name
1338 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1339 && (alternate_entry
|| entry_master
)))
1344 /* Special case for assigning the return value of a function.
1345 Self recursive functions must have an explicit return value. */
1346 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1347 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1349 /* Similarly for alternate entry points. */
1350 else if (alternate_entry
1351 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1354 gfc_entry_list
*el
= NULL
;
1356 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1359 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1364 else if (entry_master
1365 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1367 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1373 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1374 gfc_omp_namelist
*namelist
, tree list
,
1377 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1378 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1380 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1381 if (t
!= error_mark_node
)
1383 tree node
= build_omp_clause (input_location
, code
);
1384 OMP_CLAUSE_DECL (node
) = t
;
1385 list
= gfc_trans_add_clause (node
, list
);
1391 struct omp_udr_find_orig_data
1393 gfc_omp_udr
*omp_udr
;
1398 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1401 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1402 if ((*e
)->expr_type
== EXPR_VARIABLE
1403 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1404 cd
->omp_orig_seen
= true;
1410 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1412 gfc_symbol
*sym
= n
->sym
;
1413 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1414 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1415 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1416 gfc_symbol omp_var_copy
[4];
1417 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1419 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1420 locus old_loc
= gfc_current_locus
;
1423 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1425 decl
= OMP_CLAUSE_DECL (c
);
1426 gfc_current_locus
= where
;
1427 type
= TREE_TYPE (decl
);
1428 outer_decl
= create_tmp_var_raw (type
);
1429 if (TREE_CODE (decl
) == PARM_DECL
1430 && TREE_CODE (type
) == REFERENCE_TYPE
1431 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1432 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1434 decl
= build_fold_indirect_ref (decl
);
1435 type
= TREE_TYPE (type
);
1438 /* Create a fake symbol for init value. */
1439 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1440 init_val_sym
.ns
= sym
->ns
;
1441 init_val_sym
.name
= sym
->name
;
1442 init_val_sym
.ts
= sym
->ts
;
1443 init_val_sym
.attr
.referenced
= 1;
1444 init_val_sym
.declared_at
= where
;
1445 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1446 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1447 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1448 else if (udr
->initializer_ns
)
1449 backend_decl
= NULL
;
1451 switch (sym
->ts
.type
)
1457 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1460 backend_decl
= NULL_TREE
;
1463 init_val_sym
.backend_decl
= backend_decl
;
1465 /* Create a fake symbol for the outer array reference. */
1468 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1469 outer_sym
.attr
.dummy
= 0;
1470 outer_sym
.attr
.result
= 0;
1471 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1472 outer_sym
.backend_decl
= outer_decl
;
1473 if (decl
!= OMP_CLAUSE_DECL (c
))
1474 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1476 /* Create fake symtrees for it. */
1477 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1478 symtree1
->n
.sym
= sym
;
1479 gcc_assert (symtree1
== root1
);
1481 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1482 symtree2
->n
.sym
= &init_val_sym
;
1483 gcc_assert (symtree2
== root2
);
1485 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1486 symtree3
->n
.sym
= &outer_sym
;
1487 gcc_assert (symtree3
== root3
);
1489 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1492 omp_var_copy
[0] = *udr
->omp_out
;
1493 omp_var_copy
[1] = *udr
->omp_in
;
1494 *udr
->omp_out
= outer_sym
;
1495 *udr
->omp_in
= *sym
;
1496 if (udr
->initializer_ns
)
1498 omp_var_copy
[2] = *udr
->omp_priv
;
1499 omp_var_copy
[3] = *udr
->omp_orig
;
1500 *udr
->omp_priv
= *sym
;
1501 *udr
->omp_orig
= outer_sym
;
1505 /* Create expressions. */
1506 e1
= gfc_get_expr ();
1507 e1
->expr_type
= EXPR_VARIABLE
;
1509 e1
->symtree
= symtree1
;
1511 if (sym
->attr
.dimension
)
1513 e1
->ref
= ref
= gfc_get_ref ();
1514 ref
->type
= REF_ARRAY
;
1515 ref
->u
.ar
.where
= where
;
1516 ref
->u
.ar
.as
= sym
->as
;
1517 ref
->u
.ar
.type
= AR_FULL
;
1518 ref
->u
.ar
.dimen
= 0;
1520 t
= gfc_resolve_expr (e1
);
1524 if (backend_decl
!= NULL_TREE
)
1526 e2
= gfc_get_expr ();
1527 e2
->expr_type
= EXPR_VARIABLE
;
1529 e2
->symtree
= symtree2
;
1531 t
= gfc_resolve_expr (e2
);
1534 else if (udr
->initializer_ns
== NULL
)
1536 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1537 e2
= gfc_default_initializer (&sym
->ts
);
1539 t
= gfc_resolve_expr (e2
);
1542 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1544 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1545 t
= gfc_resolve_expr (e2
);
1548 if (udr
&& udr
->initializer_ns
)
1550 struct omp_udr_find_orig_data cd
;
1552 cd
.omp_orig_seen
= false;
1553 gfc_code_walker (&n
->udr
->initializer
,
1554 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1555 if (cd
.omp_orig_seen
)
1556 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1559 e3
= gfc_copy_expr (e1
);
1560 e3
->symtree
= symtree3
;
1561 t
= gfc_resolve_expr (e3
);
1566 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1570 e4
= gfc_add (e3
, e1
);
1573 e4
= gfc_multiply (e3
, e1
);
1575 case TRUTH_ANDIF_EXPR
:
1576 e4
= gfc_and (e3
, e1
);
1578 case TRUTH_ORIF_EXPR
:
1579 e4
= gfc_or (e3
, e1
);
1582 e4
= gfc_eqv (e3
, e1
);
1585 e4
= gfc_neqv (e3
, e1
);
1603 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1606 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1607 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1608 t
= gfc_resolve_expr (e3
);
1610 t
= gfc_resolve_expr (e4
);
1619 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1620 intrinsic_sym
.ns
= sym
->ns
;
1621 intrinsic_sym
.name
= iname
;
1622 intrinsic_sym
.ts
= sym
->ts
;
1623 intrinsic_sym
.attr
.referenced
= 1;
1624 intrinsic_sym
.attr
.intrinsic
= 1;
1625 intrinsic_sym
.attr
.function
= 1;
1626 intrinsic_sym
.result
= &intrinsic_sym
;
1627 intrinsic_sym
.declared_at
= where
;
1629 symtree4
= gfc_new_symtree (&root4
, iname
);
1630 symtree4
->n
.sym
= &intrinsic_sym
;
1631 gcc_assert (symtree4
== root4
);
1633 e4
= gfc_get_expr ();
1634 e4
->expr_type
= EXPR_FUNCTION
;
1636 e4
->symtree
= symtree4
;
1637 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1638 e4
->value
.function
.actual
->expr
= e3
;
1639 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1640 e4
->value
.function
.actual
->next
->expr
= e1
;
1642 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1644 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1645 e1
= gfc_copy_expr (e1
);
1646 e3
= gfc_copy_expr (e3
);
1647 t
= gfc_resolve_expr (e4
);
1651 /* Create the init statement list. */
1654 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1656 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1657 NULL_TREE
, NULL_TREE
, false);
1658 if (TREE_CODE (stmt
) != BIND_EXPR
)
1659 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1662 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1664 /* Create the merge statement list. */
1667 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1669 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1670 NULL_TREE
, NULL_TREE
, false);
1671 if (TREE_CODE (stmt
) != BIND_EXPR
)
1672 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1675 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1677 /* And stick the placeholder VAR_DECL into the clause as well. */
1678 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1680 gfc_current_locus
= old_loc
;
1693 gfc_free_array_spec (outer_sym
.as
);
1697 *udr
->omp_out
= omp_var_copy
[0];
1698 *udr
->omp_in
= omp_var_copy
[1];
1699 if (udr
->initializer_ns
)
1701 *udr
->omp_priv
= omp_var_copy
[2];
1702 *udr
->omp_orig
= omp_var_copy
[3];
1708 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1709 locus where
, bool mark_addressable
)
1711 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1712 if (namelist
->sym
->attr
.referenced
)
1714 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1715 if (t
!= error_mark_node
)
1717 tree node
= build_omp_clause (where
.lb
->location
,
1718 OMP_CLAUSE_REDUCTION
);
1719 OMP_CLAUSE_DECL (node
) = t
;
1720 if (mark_addressable
)
1721 TREE_ADDRESSABLE (t
) = 1;
1722 switch (namelist
->u
.reduction_op
)
1724 case OMP_REDUCTION_PLUS
:
1725 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1727 case OMP_REDUCTION_MINUS
:
1728 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1730 case OMP_REDUCTION_TIMES
:
1731 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1733 case OMP_REDUCTION_AND
:
1734 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1736 case OMP_REDUCTION_OR
:
1737 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1739 case OMP_REDUCTION_EQV
:
1740 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1742 case OMP_REDUCTION_NEQV
:
1743 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1745 case OMP_REDUCTION_MAX
:
1746 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1748 case OMP_REDUCTION_MIN
:
1749 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1751 case OMP_REDUCTION_IAND
:
1752 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1754 case OMP_REDUCTION_IOR
:
1755 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1757 case OMP_REDUCTION_IEOR
:
1758 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1760 case OMP_REDUCTION_USER
:
1761 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1766 if (namelist
->sym
->attr
.dimension
1767 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1768 || namelist
->sym
->attr
.allocatable
)
1769 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1770 list
= gfc_trans_add_clause (node
, list
);
1777 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1782 gfc_init_se (&se
, NULL
);
1783 gfc_conv_expr (&se
, expr
);
1784 gfc_add_block_to_block (block
, &se
.pre
);
1785 result
= gfc_evaluate_now (se
.expr
, block
);
1786 gfc_add_block_to_block (block
, &se
.post
);
1791 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
1794 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1795 locus where
, bool declare_simd
= false)
1797 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1799 enum omp_clause_code clause_code
;
1802 if (clauses
== NULL
)
1805 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1807 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1813 case OMP_LIST_REDUCTION
:
1814 /* An OpenACC async clause indicates the need to set reduction
1815 arguments addressable, to allow asynchronous copy-out. */
1816 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1819 case OMP_LIST_PRIVATE
:
1820 clause_code
= OMP_CLAUSE_PRIVATE
;
1822 case OMP_LIST_SHARED
:
1823 clause_code
= OMP_CLAUSE_SHARED
;
1825 case OMP_LIST_FIRSTPRIVATE
:
1826 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1828 case OMP_LIST_LASTPRIVATE
:
1829 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1831 case OMP_LIST_COPYIN
:
1832 clause_code
= OMP_CLAUSE_COPYIN
;
1834 case OMP_LIST_COPYPRIVATE
:
1835 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1837 case OMP_LIST_UNIFORM
:
1838 clause_code
= OMP_CLAUSE_UNIFORM
;
1840 case OMP_LIST_USE_DEVICE
:
1841 case OMP_LIST_USE_DEVICE_PTR
:
1842 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1844 case OMP_LIST_IS_DEVICE_PTR
:
1845 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
1850 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1853 case OMP_LIST_ALIGNED
:
1854 for (; n
!= NULL
; n
= n
->next
)
1855 if (n
->sym
->attr
.referenced
|| declare_simd
)
1857 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1858 if (t
!= error_mark_node
)
1860 tree node
= build_omp_clause (input_location
,
1861 OMP_CLAUSE_ALIGNED
);
1862 OMP_CLAUSE_DECL (node
) = t
;
1868 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1871 gfc_init_se (&se
, NULL
);
1872 gfc_conv_expr (&se
, n
->expr
);
1873 gfc_add_block_to_block (block
, &se
.pre
);
1874 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1875 gfc_add_block_to_block (block
, &se
.post
);
1877 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1879 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1883 case OMP_LIST_LINEAR
:
1885 gfc_expr
*last_step_expr
= NULL
;
1886 tree last_step
= NULL_TREE
;
1887 bool last_step_parm
= false;
1889 for (; n
!= NULL
; n
= n
->next
)
1893 last_step_expr
= n
->expr
;
1894 last_step
= NULL_TREE
;
1895 last_step_parm
= false;
1897 if (n
->sym
->attr
.referenced
|| declare_simd
)
1899 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1900 if (t
!= error_mark_node
)
1902 tree node
= build_omp_clause (input_location
,
1904 OMP_CLAUSE_DECL (node
) = t
;
1905 omp_clause_linear_kind kind
;
1906 switch (n
->u
.linear_op
)
1908 case OMP_LINEAR_DEFAULT
:
1909 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
1911 case OMP_LINEAR_REF
:
1912 kind
= OMP_CLAUSE_LINEAR_REF
;
1914 case OMP_LINEAR_VAL
:
1915 kind
= OMP_CLAUSE_LINEAR_VAL
;
1917 case OMP_LINEAR_UVAL
:
1918 kind
= OMP_CLAUSE_LINEAR_UVAL
;
1923 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
1924 if (last_step_expr
&& last_step
== NULL_TREE
)
1928 gfc_init_se (&se
, NULL
);
1929 gfc_conv_expr (&se
, last_step_expr
);
1930 gfc_add_block_to_block (block
, &se
.pre
);
1931 last_step
= gfc_evaluate_now (se
.expr
, block
);
1932 gfc_add_block_to_block (block
, &se
.post
);
1934 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
1936 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
1937 last_step
= gfc_trans_omp_variable (s
, true);
1938 last_step_parm
= true;
1942 = gfc_conv_constant_to_tree (last_step_expr
);
1946 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
1947 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
1951 tree type
= gfc_typenode_for_spec (&n
->sym
->ts
);
1952 OMP_CLAUSE_LINEAR_STEP (node
)
1953 = fold_convert (type
, last_step
);
1955 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1956 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1957 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1963 case OMP_LIST_DEPEND
:
1964 for (; n
!= NULL
; n
= n
->next
)
1966 if (n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
1968 tree vec
= NULL_TREE
;
1972 tree addend
= integer_zero_node
, t
;
1976 addend
= gfc_conv_constant_to_tree (n
->expr
);
1977 if (TREE_CODE (addend
) == INTEGER_CST
1978 && tree_int_cst_sgn (addend
) == -1)
1981 addend
= const_unop (NEGATE_EXPR
,
1982 TREE_TYPE (addend
), addend
);
1985 t
= gfc_trans_omp_variable (n
->sym
, false);
1986 if (t
!= error_mark_node
)
1988 if (i
< vec_safe_length (doacross_steps
)
1989 && !integer_zerop (addend
)
1990 && (*doacross_steps
)[i
])
1992 tree step
= (*doacross_steps
)[i
];
1993 addend
= fold_convert (TREE_TYPE (step
), addend
);
1994 addend
= build2 (TRUNC_DIV_EXPR
,
1995 TREE_TYPE (step
), addend
, step
);
1997 vec
= tree_cons (addend
, t
, vec
);
1999 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2002 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2006 if (vec
== NULL_TREE
)
2009 tree node
= build_omp_clause (input_location
,
2011 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2012 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2013 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2017 if (!n
->sym
->attr
.referenced
)
2020 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
2021 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2023 tree decl
= gfc_get_symbol_decl (n
->sym
);
2024 if (gfc_omp_privatize_by_reference (decl
))
2025 decl
= build_fold_indirect_ref (decl
);
2026 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2028 decl
= gfc_conv_descriptor_data_get (decl
);
2029 decl
= fold_convert (build_pointer_type (char_type_node
),
2031 decl
= build_fold_indirect_ref (decl
);
2033 else if (DECL_P (decl
))
2034 TREE_ADDRESSABLE (decl
) = 1;
2035 OMP_CLAUSE_DECL (node
) = decl
;
2040 gfc_init_se (&se
, NULL
);
2041 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2043 gfc_conv_expr_reference (&se
, n
->expr
);
2048 gfc_conv_expr_descriptor (&se
, n
->expr
);
2049 ptr
= gfc_conv_array_data (se
.expr
);
2051 gfc_add_block_to_block (block
, &se
.pre
);
2052 gfc_add_block_to_block (block
, &se
.post
);
2053 ptr
= fold_convert (build_pointer_type (char_type_node
),
2055 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2057 switch (n
->u
.depend_op
)
2060 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2062 case OMP_DEPEND_OUT
:
2063 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2065 case OMP_DEPEND_INOUT
:
2066 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2071 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2075 for (; n
!= NULL
; n
= n
->next
)
2077 if (!n
->sym
->attr
.referenced
)
2080 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2081 tree node2
= NULL_TREE
;
2082 tree node3
= NULL_TREE
;
2083 tree node4
= NULL_TREE
;
2084 tree decl
= gfc_get_symbol_decl (n
->sym
);
2086 TREE_ADDRESSABLE (decl
) = 1;
2087 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2089 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2090 && (gfc_omp_privatize_by_reference (decl
)
2091 || GFC_DECL_GET_SCALAR_POINTER (decl
)
2092 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
2093 || GFC_DECL_CRAY_POINTEE (decl
)
2094 || GFC_DESCRIPTOR_TYPE_P
2095 (TREE_TYPE (TREE_TYPE (decl
)))))
2097 tree orig_decl
= decl
;
2098 node4
= build_omp_clause (input_location
,
2100 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2101 OMP_CLAUSE_DECL (node4
) = decl
;
2102 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2103 decl
= build_fold_indirect_ref (decl
);
2104 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
2105 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
2106 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
2108 node3
= build_omp_clause (input_location
,
2110 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2111 OMP_CLAUSE_DECL (node3
) = decl
;
2112 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2113 decl
= build_fold_indirect_ref (decl
);
2116 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2118 tree type
= TREE_TYPE (decl
);
2119 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2120 ptr
= fold_convert (build_pointer_type (char_type_node
),
2122 ptr
= build_fold_indirect_ref (ptr
);
2123 OMP_CLAUSE_DECL (node
) = ptr
;
2124 node2
= build_omp_clause (input_location
,
2126 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2127 OMP_CLAUSE_DECL (node2
) = decl
;
2128 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2129 node3
= build_omp_clause (input_location
,
2131 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2132 OMP_CLAUSE_DECL (node3
)
2133 = gfc_conv_descriptor_data_get (decl
);
2134 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2136 /* We have to check for n->sym->attr.dimension because
2137 of scalar coarrays. */
2138 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
2140 stmtblock_t cond_block
;
2142 = gfc_create_var (gfc_array_index_type
, NULL
);
2143 tree tem
, then_b
, else_b
, zero
, cond
;
2145 gfc_init_block (&cond_block
);
2147 = gfc_full_array_size (&cond_block
, decl
,
2148 GFC_TYPE_ARRAY_RANK (type
));
2149 gfc_add_modify (&cond_block
, size
, tem
);
2150 then_b
= gfc_finish_block (&cond_block
);
2151 gfc_init_block (&cond_block
);
2152 zero
= build_int_cst (gfc_array_index_type
, 0);
2153 gfc_add_modify (&cond_block
, size
, zero
);
2154 else_b
= gfc_finish_block (&cond_block
);
2155 tem
= gfc_conv_descriptor_data_get (decl
);
2156 tem
= fold_convert (pvoid_type_node
, tem
);
2157 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2159 tem
, null_pointer_node
);
2160 gfc_add_expr_to_block (block
,
2161 build3_loc (input_location
,
2166 OMP_CLAUSE_SIZE (node
) = size
;
2168 else if (n
->sym
->attr
.dimension
)
2169 OMP_CLAUSE_SIZE (node
)
2170 = gfc_full_array_size (block
, decl
,
2171 GFC_TYPE_ARRAY_RANK (type
));
2172 if (n
->sym
->attr
.dimension
)
2175 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2176 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2177 OMP_CLAUSE_SIZE (node
)
2178 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2179 OMP_CLAUSE_SIZE (node
), elemsz
);
2183 OMP_CLAUSE_DECL (node
) = decl
;
2188 gfc_init_se (&se
, NULL
);
2189 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2191 gfc_conv_expr_reference (&se
, n
->expr
);
2192 gfc_add_block_to_block (block
, &se
.pre
);
2194 OMP_CLAUSE_SIZE (node
)
2195 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2199 gfc_conv_expr_descriptor (&se
, n
->expr
);
2200 ptr
= gfc_conv_array_data (se
.expr
);
2201 tree type
= TREE_TYPE (se
.expr
);
2202 gfc_add_block_to_block (block
, &se
.pre
);
2203 OMP_CLAUSE_SIZE (node
)
2204 = gfc_full_array_size (block
, se
.expr
,
2205 GFC_TYPE_ARRAY_RANK (type
));
2207 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2208 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2209 OMP_CLAUSE_SIZE (node
)
2210 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2211 OMP_CLAUSE_SIZE (node
), elemsz
);
2213 gfc_add_block_to_block (block
, &se
.post
);
2214 ptr
= fold_convert (build_pointer_type (char_type_node
),
2216 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2218 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2219 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2221 node4
= build_omp_clause (input_location
,
2223 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2224 OMP_CLAUSE_DECL (node4
) = decl
;
2225 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2226 decl
= build_fold_indirect_ref (decl
);
2228 ptr
= fold_convert (sizetype
, ptr
);
2229 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2231 tree type
= TREE_TYPE (decl
);
2232 ptr2
= gfc_conv_descriptor_data_get (decl
);
2233 node2
= build_omp_clause (input_location
,
2235 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2236 OMP_CLAUSE_DECL (node2
) = decl
;
2237 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2238 node3
= build_omp_clause (input_location
,
2240 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2241 OMP_CLAUSE_DECL (node3
)
2242 = gfc_conv_descriptor_data_get (decl
);
2246 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2247 ptr2
= build_fold_addr_expr (decl
);
2250 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2253 node3
= build_omp_clause (input_location
,
2255 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2256 OMP_CLAUSE_DECL (node3
) = decl
;
2258 ptr2
= fold_convert (sizetype
, ptr2
);
2259 OMP_CLAUSE_SIZE (node3
)
2260 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2262 switch (n
->u
.map_op
)
2265 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2268 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2271 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2273 case OMP_MAP_TOFROM
:
2274 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2276 case OMP_MAP_ALWAYS_TO
:
2277 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2279 case OMP_MAP_ALWAYS_FROM
:
2280 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2282 case OMP_MAP_ALWAYS_TOFROM
:
2283 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2285 case OMP_MAP_RELEASE
:
2286 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2288 case OMP_MAP_DELETE
:
2289 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2291 case OMP_MAP_FORCE_ALLOC
:
2292 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2294 case OMP_MAP_FORCE_TO
:
2295 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2297 case OMP_MAP_FORCE_FROM
:
2298 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2300 case OMP_MAP_FORCE_TOFROM
:
2301 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2303 case OMP_MAP_FORCE_PRESENT
:
2304 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2306 case OMP_MAP_FORCE_DEVICEPTR
:
2307 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2312 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2314 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2316 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2318 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2323 case OMP_LIST_CACHE
:
2324 for (; n
!= NULL
; n
= n
->next
)
2326 if (!n
->sym
->attr
.referenced
)
2332 clause_code
= OMP_CLAUSE_TO
;
2335 clause_code
= OMP_CLAUSE_FROM
;
2337 case OMP_LIST_CACHE
:
2338 clause_code
= OMP_CLAUSE__CACHE_
;
2343 tree node
= build_omp_clause (input_location
, clause_code
);
2344 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2346 tree decl
= gfc_get_symbol_decl (n
->sym
);
2347 if (gfc_omp_privatize_by_reference (decl
))
2348 decl
= build_fold_indirect_ref (decl
);
2349 else if (DECL_P (decl
))
2350 TREE_ADDRESSABLE (decl
) = 1;
2351 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2353 tree type
= TREE_TYPE (decl
);
2354 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2355 ptr
= fold_convert (build_pointer_type (char_type_node
),
2357 ptr
= build_fold_indirect_ref (ptr
);
2358 OMP_CLAUSE_DECL (node
) = ptr
;
2359 OMP_CLAUSE_SIZE (node
)
2360 = gfc_full_array_size (block
, decl
,
2361 GFC_TYPE_ARRAY_RANK (type
));
2363 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2364 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2365 OMP_CLAUSE_SIZE (node
)
2366 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2367 OMP_CLAUSE_SIZE (node
), elemsz
);
2370 OMP_CLAUSE_DECL (node
) = decl
;
2375 gfc_init_se (&se
, NULL
);
2376 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2378 gfc_conv_expr_reference (&se
, n
->expr
);
2380 gfc_add_block_to_block (block
, &se
.pre
);
2381 OMP_CLAUSE_SIZE (node
)
2382 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2386 gfc_conv_expr_descriptor (&se
, n
->expr
);
2387 ptr
= gfc_conv_array_data (se
.expr
);
2388 tree type
= TREE_TYPE (se
.expr
);
2389 gfc_add_block_to_block (block
, &se
.pre
);
2390 OMP_CLAUSE_SIZE (node
)
2391 = gfc_full_array_size (block
, se
.expr
,
2392 GFC_TYPE_ARRAY_RANK (type
));
2394 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2395 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2396 OMP_CLAUSE_SIZE (node
)
2397 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2398 OMP_CLAUSE_SIZE (node
), elemsz
);
2400 gfc_add_block_to_block (block
, &se
.post
);
2401 ptr
= fold_convert (build_pointer_type (char_type_node
),
2403 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2405 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2413 if (clauses
->if_expr
)
2417 gfc_init_se (&se
, NULL
);
2418 gfc_conv_expr (&se
, clauses
->if_expr
);
2419 gfc_add_block_to_block (block
, &se
.pre
);
2420 if_var
= gfc_evaluate_now (se
.expr
, block
);
2421 gfc_add_block_to_block (block
, &se
.post
);
2423 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2424 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2425 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2426 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2428 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
2429 if (clauses
->if_exprs
[ifc
])
2433 gfc_init_se (&se
, NULL
);
2434 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
2435 gfc_add_block_to_block (block
, &se
.pre
);
2436 if_var
= gfc_evaluate_now (se
.expr
, block
);
2437 gfc_add_block_to_block (block
, &se
.post
);
2439 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2442 case OMP_IF_PARALLEL
:
2443 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
2446 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
2448 case OMP_IF_TASKLOOP
:
2449 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
2452 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
2454 case OMP_IF_TARGET_DATA
:
2455 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
2457 case OMP_IF_TARGET_UPDATE
:
2458 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
2460 case OMP_IF_TARGET_ENTER_DATA
:
2461 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
2463 case OMP_IF_TARGET_EXIT_DATA
:
2464 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
2469 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2470 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2473 if (clauses
->final_expr
)
2477 gfc_init_se (&se
, NULL
);
2478 gfc_conv_expr (&se
, clauses
->final_expr
);
2479 gfc_add_block_to_block (block
, &se
.pre
);
2480 final_var
= gfc_evaluate_now (se
.expr
, block
);
2481 gfc_add_block_to_block (block
, &se
.post
);
2483 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2484 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2485 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2488 if (clauses
->num_threads
)
2492 gfc_init_se (&se
, NULL
);
2493 gfc_conv_expr (&se
, clauses
->num_threads
);
2494 gfc_add_block_to_block (block
, &se
.pre
);
2495 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2496 gfc_add_block_to_block (block
, &se
.post
);
2498 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2499 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2500 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2503 chunk_size
= NULL_TREE
;
2504 if (clauses
->chunk_size
)
2506 gfc_init_se (&se
, NULL
);
2507 gfc_conv_expr (&se
, clauses
->chunk_size
);
2508 gfc_add_block_to_block (block
, &se
.pre
);
2509 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2510 gfc_add_block_to_block (block
, &se
.post
);
2513 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2515 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2516 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2517 switch (clauses
->sched_kind
)
2519 case OMP_SCHED_STATIC
:
2520 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2522 case OMP_SCHED_DYNAMIC
:
2523 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2525 case OMP_SCHED_GUIDED
:
2526 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2528 case OMP_SCHED_RUNTIME
:
2529 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2531 case OMP_SCHED_AUTO
:
2532 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2537 if (clauses
->sched_monotonic
)
2538 OMP_CLAUSE_SCHEDULE_KIND (c
)
2539 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2540 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
2541 else if (clauses
->sched_nonmonotonic
)
2542 OMP_CLAUSE_SCHEDULE_KIND (c
)
2543 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
2544 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
2545 if (clauses
->sched_simd
)
2546 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
2547 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2550 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2552 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2553 switch (clauses
->default_sharing
)
2555 case OMP_DEFAULT_NONE
:
2556 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2558 case OMP_DEFAULT_SHARED
:
2559 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2561 case OMP_DEFAULT_PRIVATE
:
2562 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2564 case OMP_DEFAULT_FIRSTPRIVATE
:
2565 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2570 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2573 if (clauses
->nowait
)
2575 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2576 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2579 if (clauses
->ordered
)
2581 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2582 OMP_CLAUSE_ORDERED_EXPR (c
)
2583 = clauses
->orderedc
? build_int_cst (integer_type_node
,
2584 clauses
->orderedc
) : NULL_TREE
;
2585 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2588 if (clauses
->untied
)
2590 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2591 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2594 if (clauses
->mergeable
)
2596 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2597 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2600 if (clauses
->collapse
)
2602 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2603 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2604 = build_int_cst (integer_type_node
, clauses
->collapse
);
2605 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2608 if (clauses
->inbranch
)
2610 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2611 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2614 if (clauses
->notinbranch
)
2616 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2617 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2620 switch (clauses
->cancel
)
2622 case OMP_CANCEL_UNKNOWN
:
2624 case OMP_CANCEL_PARALLEL
:
2625 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2626 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2628 case OMP_CANCEL_SECTIONS
:
2629 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2630 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2633 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2634 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2636 case OMP_CANCEL_TASKGROUP
:
2637 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2638 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2642 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2644 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2645 switch (clauses
->proc_bind
)
2647 case OMP_PROC_BIND_MASTER
:
2648 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2650 case OMP_PROC_BIND_SPREAD
:
2651 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2653 case OMP_PROC_BIND_CLOSE
:
2654 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2659 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2662 if (clauses
->safelen_expr
)
2666 gfc_init_se (&se
, NULL
);
2667 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2668 gfc_add_block_to_block (block
, &se
.pre
);
2669 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2670 gfc_add_block_to_block (block
, &se
.post
);
2672 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2673 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2674 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2677 if (clauses
->simdlen_expr
)
2681 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2682 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2683 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2684 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2690 gfc_init_se (&se
, NULL
);
2691 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
2692 gfc_add_block_to_block (block
, &se
.pre
);
2693 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
2694 gfc_add_block_to_block (block
, &se
.post
);
2696 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2697 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
2698 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2702 if (clauses
->num_teams
)
2706 gfc_init_se (&se
, NULL
);
2707 gfc_conv_expr (&se
, clauses
->num_teams
);
2708 gfc_add_block_to_block (block
, &se
.pre
);
2709 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2710 gfc_add_block_to_block (block
, &se
.post
);
2712 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2713 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2714 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2717 if (clauses
->device
)
2721 gfc_init_se (&se
, NULL
);
2722 gfc_conv_expr (&se
, clauses
->device
);
2723 gfc_add_block_to_block (block
, &se
.pre
);
2724 device
= gfc_evaluate_now (se
.expr
, block
);
2725 gfc_add_block_to_block (block
, &se
.post
);
2727 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2728 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2729 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2732 if (clauses
->thread_limit
)
2736 gfc_init_se (&se
, NULL
);
2737 gfc_conv_expr (&se
, clauses
->thread_limit
);
2738 gfc_add_block_to_block (block
, &se
.pre
);
2739 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2740 gfc_add_block_to_block (block
, &se
.post
);
2742 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2743 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2744 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2747 chunk_size
= NULL_TREE
;
2748 if (clauses
->dist_chunk_size
)
2750 gfc_init_se (&se
, NULL
);
2751 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2752 gfc_add_block_to_block (block
, &se
.pre
);
2753 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2754 gfc_add_block_to_block (block
, &se
.post
);
2757 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2759 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2760 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2761 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2764 if (clauses
->grainsize
)
2768 gfc_init_se (&se
, NULL
);
2769 gfc_conv_expr (&se
, clauses
->grainsize
);
2770 gfc_add_block_to_block (block
, &se
.pre
);
2771 grainsize
= gfc_evaluate_now (se
.expr
, block
);
2772 gfc_add_block_to_block (block
, &se
.post
);
2774 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GRAINSIZE
);
2775 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
2776 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2779 if (clauses
->num_tasks
)
2783 gfc_init_se (&se
, NULL
);
2784 gfc_conv_expr (&se
, clauses
->num_tasks
);
2785 gfc_add_block_to_block (block
, &se
.pre
);
2786 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
2787 gfc_add_block_to_block (block
, &se
.post
);
2789 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TASKS
);
2790 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
2791 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2794 if (clauses
->priority
)
2798 gfc_init_se (&se
, NULL
);
2799 gfc_conv_expr (&se
, clauses
->priority
);
2800 gfc_add_block_to_block (block
, &se
.pre
);
2801 priority
= gfc_evaluate_now (se
.expr
, block
);
2802 gfc_add_block_to_block (block
, &se
.post
);
2804 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PRIORITY
);
2805 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
2806 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2813 gfc_init_se (&se
, NULL
);
2814 gfc_conv_expr (&se
, clauses
->hint
);
2815 gfc_add_block_to_block (block
, &se
.pre
);
2816 hint
= gfc_evaluate_now (se
.expr
, block
);
2817 gfc_add_block_to_block (block
, &se
.post
);
2819 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_HINT
);
2820 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
2821 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2826 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMD
);
2827 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2829 if (clauses
->threads
)
2831 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREADS
);
2832 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2834 if (clauses
->nogroup
)
2836 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOGROUP
);
2837 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2839 if (clauses
->defaultmap
)
2841 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULTMAP
);
2842 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2844 if (clauses
->depend_source
)
2846 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEPEND
);
2847 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
2848 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2853 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2854 if (clauses
->async_expr
)
2855 OMP_CLAUSE_ASYNC_EXPR (c
)
2856 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2858 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2859 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2863 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2864 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2866 if (clauses
->par_auto
)
2868 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2869 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2871 if (clauses
->independent
)
2873 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2874 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2876 if (clauses
->wait_list
)
2880 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2882 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2883 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2884 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2888 if (clauses
->num_gangs_expr
)
2891 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2892 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2893 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2894 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2896 if (clauses
->num_workers_expr
)
2898 tree num_workers_var
2899 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2900 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2901 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2902 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2904 if (clauses
->vector_length_expr
)
2906 tree vector_length_var
2907 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2908 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2909 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2910 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2912 if (clauses
->tile_list
)
2914 vec
<tree
, va_gc
> *tvec
;
2917 vec_alloc (tvec
, 4);
2919 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2920 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2922 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2923 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2924 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2927 if (clauses
->vector
)
2929 if (clauses
->vector_expr
)
2932 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2933 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2934 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2935 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2939 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2940 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2943 if (clauses
->worker
)
2945 if (clauses
->worker_expr
)
2948 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2949 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2950 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2951 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2955 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2956 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2962 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2963 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2964 if (clauses
->gang_num_expr
)
2966 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
2967 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
2969 if (clauses
->gang_static
)
2971 arg
= clauses
->gang_static_expr
2972 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
2973 : integer_minus_one_node
;
2974 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
2978 return nreverse (omp_clauses
);
2981 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2984 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2989 stmt
= gfc_trans_code (code
);
2990 if (TREE_CODE (stmt
) != BIND_EXPR
)
2992 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2994 tree block
= poplevel (1, 0);
2995 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
3005 /* Trans OpenACC directives. */
3006 /* parallel, kernels, data and host_data. */
3008 gfc_trans_oacc_construct (gfc_code
*code
)
3011 tree stmt
, oacc_clauses
;
3012 enum tree_code construct_code
;
3016 case EXEC_OACC_PARALLEL
:
3017 construct_code
= OACC_PARALLEL
;
3019 case EXEC_OACC_KERNELS
:
3020 construct_code
= OACC_KERNELS
;
3022 case EXEC_OACC_DATA
:
3023 construct_code
= OACC_DATA
;
3025 case EXEC_OACC_HOST_DATA
:
3026 construct_code
= OACC_HOST_DATA
;
3032 gfc_start_block (&block
);
3033 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3035 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3036 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3038 gfc_add_expr_to_block (&block
, stmt
);
3039 return gfc_finish_block (&block
);
3042 /* update, enter_data, exit_data, cache. */
3044 gfc_trans_oacc_executable_directive (gfc_code
*code
)
3047 tree stmt
, oacc_clauses
;
3048 enum tree_code construct_code
;
3052 case EXEC_OACC_UPDATE
:
3053 construct_code
= OACC_UPDATE
;
3055 case EXEC_OACC_ENTER_DATA
:
3056 construct_code
= OACC_ENTER_DATA
;
3058 case EXEC_OACC_EXIT_DATA
:
3059 construct_code
= OACC_EXIT_DATA
;
3061 case EXEC_OACC_CACHE
:
3062 construct_code
= OACC_CACHE
;
3068 gfc_start_block (&block
);
3069 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3071 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
3073 gfc_add_expr_to_block (&block
, stmt
);
3074 return gfc_finish_block (&block
);
3078 gfc_trans_oacc_wait_directive (gfc_code
*code
)
3082 vec
<tree
, va_gc
> *args
;
3085 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3086 location_t loc
= input_location
;
3088 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3091 vec_alloc (args
, nparms
+ 2);
3092 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
3094 gfc_start_block (&block
);
3096 if (clauses
->async_expr
)
3097 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
3099 t
= build_int_cst (integer_type_node
, -2);
3101 args
->quick_push (t
);
3102 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
3104 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3105 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
3107 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
3108 gfc_add_expr_to_block (&block
, stmt
);
3112 return gfc_finish_block (&block
);
3115 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
3116 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
3119 gfc_trans_omp_atomic (gfc_code
*code
)
3121 gfc_code
*atomic_code
= code
;
3125 gfc_expr
*expr2
, *e
;
3128 tree lhsaddr
, type
, rhs
, x
;
3129 enum tree_code op
= ERROR_MARK
;
3130 enum tree_code aop
= OMP_ATOMIC
;
3131 bool var_on_left
= false;
3132 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
3134 code
= code
->block
->next
;
3135 gcc_assert (code
->op
== EXEC_ASSIGN
);
3136 var
= code
->expr1
->symtree
->n
.sym
;
3138 gfc_init_se (&lse
, NULL
);
3139 gfc_init_se (&rse
, NULL
);
3140 gfc_init_se (&vse
, NULL
);
3141 gfc_start_block (&block
);
3143 expr2
= code
->expr2
;
3144 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3145 != GFC_OMP_ATOMIC_WRITE
)
3146 && (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
) == 0
3147 && expr2
->expr_type
== EXPR_FUNCTION
3148 && expr2
->value
.function
.isym
3149 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3150 expr2
= expr2
->value
.function
.actual
->expr
;
3152 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3154 case GFC_OMP_ATOMIC_READ
:
3155 gfc_conv_expr (&vse
, code
->expr1
);
3156 gfc_add_block_to_block (&block
, &vse
.pre
);
3158 gfc_conv_expr (&lse
, expr2
);
3159 gfc_add_block_to_block (&block
, &lse
.pre
);
3160 type
= TREE_TYPE (lse
.expr
);
3161 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3163 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
3164 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3165 x
= convert (TREE_TYPE (vse
.expr
), x
);
3166 gfc_add_modify (&block
, vse
.expr
, x
);
3168 gfc_add_block_to_block (&block
, &lse
.pre
);
3169 gfc_add_block_to_block (&block
, &rse
.pre
);
3171 return gfc_finish_block (&block
);
3172 case GFC_OMP_ATOMIC_CAPTURE
:
3173 aop
= OMP_ATOMIC_CAPTURE_NEW
;
3174 if (expr2
->expr_type
== EXPR_VARIABLE
)
3176 aop
= OMP_ATOMIC_CAPTURE_OLD
;
3177 gfc_conv_expr (&vse
, code
->expr1
);
3178 gfc_add_block_to_block (&block
, &vse
.pre
);
3180 gfc_conv_expr (&lse
, expr2
);
3181 gfc_add_block_to_block (&block
, &lse
.pre
);
3182 gfc_init_se (&lse
, NULL
);
3184 var
= code
->expr1
->symtree
->n
.sym
;
3185 expr2
= code
->expr2
;
3186 if (expr2
->expr_type
== EXPR_FUNCTION
3187 && expr2
->value
.function
.isym
3188 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3189 expr2
= expr2
->value
.function
.actual
->expr
;
3196 gfc_conv_expr (&lse
, code
->expr1
);
3197 gfc_add_block_to_block (&block
, &lse
.pre
);
3198 type
= TREE_TYPE (lse
.expr
);
3199 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3201 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3202 == GFC_OMP_ATOMIC_WRITE
)
3203 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3205 gfc_conv_expr (&rse
, expr2
);
3206 gfc_add_block_to_block (&block
, &rse
.pre
);
3208 else if (expr2
->expr_type
== EXPR_OP
)
3211 switch (expr2
->value
.op
.op
)
3213 case INTRINSIC_PLUS
:
3216 case INTRINSIC_TIMES
:
3219 case INTRINSIC_MINUS
:
3222 case INTRINSIC_DIVIDE
:
3223 if (expr2
->ts
.type
== BT_INTEGER
)
3224 op
= TRUNC_DIV_EXPR
;
3229 op
= TRUTH_ANDIF_EXPR
;
3232 op
= TRUTH_ORIF_EXPR
;
3237 case INTRINSIC_NEQV
:
3243 e
= expr2
->value
.op
.op1
;
3244 if (e
->expr_type
== EXPR_FUNCTION
3245 && e
->value
.function
.isym
3246 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3247 e
= e
->value
.function
.actual
->expr
;
3248 if (e
->expr_type
== EXPR_VARIABLE
3249 && e
->symtree
!= NULL
3250 && e
->symtree
->n
.sym
== var
)
3252 expr2
= expr2
->value
.op
.op2
;
3257 e
= expr2
->value
.op
.op2
;
3258 if (e
->expr_type
== EXPR_FUNCTION
3259 && e
->value
.function
.isym
3260 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3261 e
= e
->value
.function
.actual
->expr
;
3262 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3263 && e
->symtree
!= NULL
3264 && e
->symtree
->n
.sym
== var
);
3265 expr2
= expr2
->value
.op
.op1
;
3266 var_on_left
= false;
3268 gfc_conv_expr (&rse
, expr2
);
3269 gfc_add_block_to_block (&block
, &rse
.pre
);
3273 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
3274 switch (expr2
->value
.function
.isym
->id
)
3294 e
= expr2
->value
.function
.actual
->expr
;
3295 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3296 && e
->symtree
!= NULL
3297 && e
->symtree
->n
.sym
== var
);
3299 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
3300 gfc_add_block_to_block (&block
, &rse
.pre
);
3301 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
3303 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
3304 gfc_actual_arglist
*arg
;
3306 gfc_add_modify (&block
, accum
, rse
.expr
);
3307 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
3310 gfc_init_block (&rse
.pre
);
3311 gfc_conv_expr (&rse
, arg
->expr
);
3312 gfc_add_block_to_block (&block
, &rse
.pre
);
3313 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
3315 gfc_add_modify (&block
, accum
, x
);
3321 expr2
= expr2
->value
.function
.actual
->next
->expr
;
3324 lhsaddr
= save_expr (lhsaddr
);
3325 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3326 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3327 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3329 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3330 it even after unsharing function body. */
3331 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3332 DECL_CONTEXT (var
) = current_function_decl
;
3333 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3334 NULL_TREE
, NULL_TREE
);
3337 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3339 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3340 == GFC_OMP_ATOMIC_WRITE
)
3341 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3345 x
= convert (TREE_TYPE (rhs
),
3346 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3348 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3350 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3353 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3354 && TREE_CODE (type
) != COMPLEX_TYPE
)
3355 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3356 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3358 gfc_add_block_to_block (&block
, &lse
.pre
);
3359 gfc_add_block_to_block (&block
, &rse
.pre
);
3361 if (aop
== OMP_ATOMIC
)
3363 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3364 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3365 gfc_add_expr_to_block (&block
, x
);
3369 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3372 expr2
= code
->expr2
;
3373 if (expr2
->expr_type
== EXPR_FUNCTION
3374 && expr2
->value
.function
.isym
3375 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3376 expr2
= expr2
->value
.function
.actual
->expr
;
3378 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3379 gfc_conv_expr (&vse
, code
->expr1
);
3380 gfc_add_block_to_block (&block
, &vse
.pre
);
3382 gfc_init_se (&lse
, NULL
);
3383 gfc_conv_expr (&lse
, expr2
);
3384 gfc_add_block_to_block (&block
, &lse
.pre
);
3386 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3387 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3388 x
= convert (TREE_TYPE (vse
.expr
), x
);
3389 gfc_add_modify (&block
, vse
.expr
, x
);
3392 return gfc_finish_block (&block
);
3396 gfc_trans_omp_barrier (void)
3398 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3399 return build_call_expr_loc (input_location
, decl
, 0);
3403 gfc_trans_omp_cancel (gfc_code
*code
)
3406 tree ifc
= boolean_true_node
;
3408 switch (code
->ext
.omp_clauses
->cancel
)
3410 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3411 case OMP_CANCEL_DO
: mask
= 2; break;
3412 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3413 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3414 default: gcc_unreachable ();
3416 gfc_start_block (&block
);
3417 if (code
->ext
.omp_clauses
->if_expr
)
3422 gfc_init_se (&se
, NULL
);
3423 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3424 gfc_add_block_to_block (&block
, &se
.pre
);
3425 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3426 gfc_add_block_to_block (&block
, &se
.post
);
3427 tree type
= TREE_TYPE (if_var
);
3428 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3429 boolean_type_node
, if_var
,
3430 build_zero_cst (type
));
3432 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3433 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3434 ifc
= fold_convert (c_bool_type
, ifc
);
3435 gfc_add_expr_to_block (&block
,
3436 build_call_expr_loc (input_location
, decl
, 2,
3437 build_int_cst (integer_type_node
,
3439 return gfc_finish_block (&block
);
3443 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3446 switch (code
->ext
.omp_clauses
->cancel
)
3448 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3449 case OMP_CANCEL_DO
: mask
= 2; break;
3450 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3451 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3452 default: gcc_unreachable ();
3454 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3455 return build_call_expr_loc (input_location
, decl
, 1,
3456 build_int_cst (integer_type_node
, mask
));
3460 gfc_trans_omp_critical (gfc_code
*code
)
3462 tree name
= NULL_TREE
, stmt
;
3463 if (code
->ext
.omp_clauses
!= NULL
)
3464 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
3465 stmt
= gfc_trans_code (code
->block
->next
);
3466 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3470 typedef struct dovar_init_d
{
3477 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3478 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3481 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
3482 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3485 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3486 int i
, collapse
= clauses
->collapse
;
3487 vec
<dovar_init
> inits
= vNULL
;
3490 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
3492 doacross_steps
= NULL
;
3493 if (clauses
->orderedc
)
3494 collapse
= clauses
->orderedc
;
3498 code
= code
->block
->next
;
3499 gcc_assert (code
->op
== EXEC_DO
);
3501 init
= make_tree_vec (collapse
);
3502 cond
= make_tree_vec (collapse
);
3503 incr
= make_tree_vec (collapse
);
3504 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
3508 gfc_start_block (&block
);
3512 /* simd schedule modifier is only useful for composite do simd and other
3513 constructs including that, where gfc_trans_omp_do is only called
3514 on the simd construct and DO's clauses are translated elsewhere. */
3515 do_clauses
->sched_simd
= false;
3517 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3519 for (i
= 0; i
< collapse
; i
++)
3522 int dovar_found
= 0;
3527 gfc_omp_namelist
*n
= NULL
;
3528 if (op
!= EXEC_OMP_DISTRIBUTE
)
3529 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3530 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3531 n
!= NULL
; n
= n
->next
)
3532 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3536 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3537 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3538 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3544 /* Evaluate all the expressions in the iterator. */
3545 gfc_init_se (&se
, NULL
);
3546 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3547 gfc_add_block_to_block (pblock
, &se
.pre
);
3549 type
= TREE_TYPE (dovar
);
3550 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3552 gfc_init_se (&se
, NULL
);
3553 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3554 gfc_add_block_to_block (pblock
, &se
.pre
);
3555 from
= gfc_evaluate_now (se
.expr
, pblock
);
3557 gfc_init_se (&se
, NULL
);
3558 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3559 gfc_add_block_to_block (pblock
, &se
.pre
);
3560 to
= gfc_evaluate_now (se
.expr
, pblock
);
3562 gfc_init_se (&se
, NULL
);
3563 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3564 gfc_add_block_to_block (pblock
, &se
.pre
);
3565 step
= gfc_evaluate_now (se
.expr
, pblock
);
3568 /* Special case simple loops. */
3571 if (integer_onep (step
))
3573 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3578 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3584 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3585 /* The condition should not be folded. */
3586 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3587 ? LE_EXPR
: GE_EXPR
,
3588 boolean_type_node
, dovar
, to
);
3589 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3591 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3594 TREE_VEC_ELT (incr
, i
));
3598 /* STEP is not 1 or -1. Use:
3599 for (count = 0; count < (to + step - from) / step; count++)
3601 dovar = from + count * step;
3605 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3606 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3607 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3609 tmp
= gfc_evaluate_now (tmp
, pblock
);
3610 count
= gfc_create_var (type
, "count");
3611 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3612 build_int_cst (type
, 0));
3613 /* The condition should not be folded. */
3614 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3617 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3619 build_int_cst (type
, 1));
3620 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3621 MODIFY_EXPR
, type
, count
,
3622 TREE_VEC_ELT (incr
, i
));
3624 /* Initialize DOVAR. */
3625 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3626 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3627 dovar_init e
= {dovar
, tmp
};
3628 inits
.safe_push (e
);
3629 if (clauses
->orderedc
)
3631 if (doacross_steps
== NULL
)
3632 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
);
3633 (*doacross_steps
)[i
] = step
;
3637 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
3639 if (dovar_found
== 2
3640 && op
== EXEC_OMP_SIMD
3644 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3645 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3646 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3648 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3654 if (op
== EXEC_OMP_SIMD
)
3658 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3659 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3660 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3663 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3668 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3669 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3670 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3672 if (dovar_found
== 2)
3679 /* If dovar is lastprivate, but different counter is used,
3680 dovar += step needs to be added to
3681 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3682 will have the value on entry of the last loop, rather
3683 than value after iterator increment. */
3684 if (clauses
->orderedc
)
3686 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
3689 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3690 type
, count
, build_one_cst (type
));
3691 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
3693 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3698 tmp
= gfc_evaluate_now (step
, pblock
);
3699 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
3702 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3704 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3705 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3706 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3708 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3711 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3712 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3714 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3718 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3720 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3721 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3722 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3724 tree l
= build_omp_clause (input_location
,
3725 OMP_CLAUSE_LASTPRIVATE
);
3726 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3727 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3728 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3730 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3734 gcc_assert (simple
|| c
!= NULL
);
3738 if (op
!= EXEC_OMP_SIMD
)
3739 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3740 else if (collapse
== 1)
3742 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3743 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3744 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3745 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3748 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3749 OMP_CLAUSE_DECL (tmp
) = count
;
3750 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3753 if (i
+ 1 < collapse
)
3754 code
= code
->block
->next
;
3757 if (pblock
!= &block
)
3760 gfc_start_block (&block
);
3763 gfc_start_block (&body
);
3765 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3766 gfc_add_modify (&body
, di
->var
, di
->init
);
3769 /* Cycle statement is implemented with a goto. Exit statement must not be
3770 present for this loop. */
3771 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3773 /* Put these labels where they can be found later. */
3775 code
->cycle_label
= cycle_label
;
3776 code
->exit_label
= NULL_TREE
;
3778 /* Main loop body. */
3779 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3780 gfc_add_expr_to_block (&body
, tmp
);
3782 /* Label for cycle statements (if needed). */
3783 if (TREE_USED (cycle_label
))
3785 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3786 gfc_add_expr_to_block (&body
, tmp
);
3789 /* End of loop body. */
3792 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3793 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3794 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3795 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
3796 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3797 default: gcc_unreachable ();
3800 TREE_TYPE (stmt
) = void_type_node
;
3801 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3802 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3803 OMP_FOR_INIT (stmt
) = init
;
3804 OMP_FOR_COND (stmt
) = cond
;
3805 OMP_FOR_INCR (stmt
) = incr
;
3807 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
3808 gfc_add_expr_to_block (&block
, stmt
);
3810 vec_free (doacross_steps
);
3811 doacross_steps
= saved_doacross_steps
;
3813 return gfc_finish_block (&block
);
3816 /* parallel loop and kernels loop. */
3818 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3820 stmtblock_t block
, *pblock
= NULL
;
3821 gfc_omp_clauses construct_clauses
, loop_clauses
;
3822 tree stmt
, oacc_clauses
= NULL_TREE
;
3823 enum tree_code construct_code
;
3827 case EXEC_OACC_PARALLEL_LOOP
:
3828 construct_code
= OACC_PARALLEL
;
3830 case EXEC_OACC_KERNELS_LOOP
:
3831 construct_code
= OACC_KERNELS
;
3837 gfc_start_block (&block
);
3839 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3840 if (code
->ext
.omp_clauses
!= NULL
)
3842 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3843 sizeof (construct_clauses
));
3844 loop_clauses
.collapse
= construct_clauses
.collapse
;
3845 loop_clauses
.gang
= construct_clauses
.gang
;
3846 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3847 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3848 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3849 loop_clauses
.vector
= construct_clauses
.vector
;
3850 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3851 loop_clauses
.worker
= construct_clauses
.worker
;
3852 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3853 loop_clauses
.seq
= construct_clauses
.seq
;
3854 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3855 loop_clauses
.independent
= construct_clauses
.independent
;
3856 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3857 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3858 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3859 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3860 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3861 construct_clauses
.gang
= false;
3862 construct_clauses
.gang_static
= false;
3863 construct_clauses
.gang_num_expr
= NULL
;
3864 construct_clauses
.gang_static_expr
= NULL
;
3865 construct_clauses
.vector
= false;
3866 construct_clauses
.vector_expr
= NULL
;
3867 construct_clauses
.worker
= false;
3868 construct_clauses
.worker_expr
= NULL
;
3869 construct_clauses
.seq
= false;
3870 construct_clauses
.par_auto
= false;
3871 construct_clauses
.independent
= false;
3872 construct_clauses
.independent
= false;
3873 construct_clauses
.tile_list
= NULL
;
3874 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3875 if (construct_code
== OACC_KERNELS
)
3876 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3877 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3880 if (!loop_clauses
.seq
)
3884 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3885 if (TREE_CODE (stmt
) != BIND_EXPR
)
3886 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3889 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3891 gfc_add_expr_to_block (&block
, stmt
);
3892 return gfc_finish_block (&block
);
3896 gfc_trans_omp_flush (void)
3898 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3899 return build_call_expr_loc (input_location
, decl
, 0);
3903 gfc_trans_omp_master (gfc_code
*code
)
3905 tree stmt
= gfc_trans_code (code
->block
->next
);
3906 if (IS_EMPTY_STMT (stmt
))
3908 return build1_v (OMP_MASTER
, stmt
);
3912 gfc_trans_omp_ordered (gfc_code
*code
)
3914 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
3916 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3917 code
->block
? gfc_trans_code (code
->block
->next
)
3918 : NULL_TREE
, omp_clauses
);
3922 gfc_trans_omp_parallel (gfc_code
*code
)
3925 tree stmt
, omp_clauses
;
3927 gfc_start_block (&block
);
3928 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3931 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3932 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3933 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3935 gfc_add_expr_to_block (&block
, stmt
);
3936 return gfc_finish_block (&block
);
3943 GFC_OMP_SPLIT_PARALLEL
,
3944 GFC_OMP_SPLIT_DISTRIBUTE
,
3945 GFC_OMP_SPLIT_TEAMS
,
3946 GFC_OMP_SPLIT_TARGET
,
3947 GFC_OMP_SPLIT_TASKLOOP
,
3953 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3954 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3955 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3956 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3957 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3958 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
3959 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
)
3963 gfc_split_omp_clauses (gfc_code
*code
,
3964 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3966 int mask
= 0, innermost
= 0;
3967 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3970 case EXEC_OMP_DISTRIBUTE
:
3971 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3973 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3974 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3975 innermost
= GFC_OMP_SPLIT_DO
;
3977 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3978 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3979 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3980 innermost
= GFC_OMP_SPLIT_SIMD
;
3982 case EXEC_OMP_DISTRIBUTE_SIMD
:
3983 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3984 innermost
= GFC_OMP_SPLIT_SIMD
;
3987 innermost
= GFC_OMP_SPLIT_DO
;
3989 case EXEC_OMP_DO_SIMD
:
3990 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3991 innermost
= GFC_OMP_SPLIT_SIMD
;
3993 case EXEC_OMP_PARALLEL
:
3994 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3996 case EXEC_OMP_PARALLEL_DO
:
3997 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3998 innermost
= GFC_OMP_SPLIT_DO
;
4000 case EXEC_OMP_PARALLEL_DO_SIMD
:
4001 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4002 innermost
= GFC_OMP_SPLIT_SIMD
;
4005 innermost
= GFC_OMP_SPLIT_SIMD
;
4007 case EXEC_OMP_TARGET
:
4008 innermost
= GFC_OMP_SPLIT_TARGET
;
4010 case EXEC_OMP_TARGET_PARALLEL
:
4011 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
4012 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4014 case EXEC_OMP_TARGET_PARALLEL_DO
:
4015 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4016 innermost
= GFC_OMP_SPLIT_DO
;
4018 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4019 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
4020 | GFC_OMP_MASK_SIMD
;
4021 innermost
= GFC_OMP_SPLIT_SIMD
;
4023 case EXEC_OMP_TARGET_SIMD
:
4024 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
4025 innermost
= GFC_OMP_SPLIT_SIMD
;
4027 case EXEC_OMP_TARGET_TEAMS
:
4028 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
4029 innermost
= GFC_OMP_SPLIT_TEAMS
;
4031 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4032 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4033 | GFC_OMP_MASK_DISTRIBUTE
;
4034 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4036 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4037 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4038 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4039 innermost
= GFC_OMP_SPLIT_DO
;
4041 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4042 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4043 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4044 innermost
= GFC_OMP_SPLIT_SIMD
;
4046 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4047 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4048 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4049 innermost
= GFC_OMP_SPLIT_SIMD
;
4051 case EXEC_OMP_TASKLOOP
:
4052 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
4054 case EXEC_OMP_TASKLOOP_SIMD
:
4055 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
4056 innermost
= GFC_OMP_SPLIT_SIMD
;
4058 case EXEC_OMP_TEAMS
:
4059 innermost
= GFC_OMP_SPLIT_TEAMS
;
4061 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4062 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
4063 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4065 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4066 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4067 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4068 innermost
= GFC_OMP_SPLIT_DO
;
4070 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4071 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4072 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4073 innermost
= GFC_OMP_SPLIT_SIMD
;
4075 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4076 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4077 innermost
= GFC_OMP_SPLIT_SIMD
;
4084 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
4087 if (code
->ext
.omp_clauses
!= NULL
)
4089 if (mask
& GFC_OMP_MASK_TARGET
)
4091 /* First the clauses that are unique to some constructs. */
4092 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
4093 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
4094 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
4095 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
4096 clausesa
[GFC_OMP_SPLIT_TARGET
].device
4097 = code
->ext
.omp_clauses
->device
;
4098 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
4099 = code
->ext
.omp_clauses
->defaultmap
;
4100 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
4101 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
4102 /* And this is copied to all. */
4103 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4104 = code
->ext
.omp_clauses
->if_expr
;
4106 if (mask
& GFC_OMP_MASK_TEAMS
)
4108 /* First the clauses that are unique to some constructs. */
4109 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4110 = code
->ext
.omp_clauses
->num_teams
;
4111 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
4112 = code
->ext
.omp_clauses
->thread_limit
;
4113 /* Shared and default clauses are allowed on parallel, teams
4115 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
4116 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4117 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
4118 = code
->ext
.omp_clauses
->default_sharing
;
4120 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4122 /* First the clauses that are unique to some constructs. */
4123 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
4124 = code
->ext
.omp_clauses
->dist_sched_kind
;
4125 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
4126 = code
->ext
.omp_clauses
->dist_chunk_size
;
4127 /* Duplicate collapse. */
4128 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
4129 = code
->ext
.omp_clauses
->collapse
;
4131 if (mask
& GFC_OMP_MASK_PARALLEL
)
4133 /* First the clauses that are unique to some constructs. */
4134 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
4135 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
4136 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
4137 = code
->ext
.omp_clauses
->num_threads
;
4138 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
4139 = code
->ext
.omp_clauses
->proc_bind
;
4140 /* Shared and default clauses are allowed on parallel, teams
4142 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
4143 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4144 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
4145 = code
->ext
.omp_clauses
->default_sharing
;
4146 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
4147 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
4148 /* And this is copied to all. */
4149 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4150 = code
->ext
.omp_clauses
->if_expr
;
4152 if (mask
& GFC_OMP_MASK_DO
)
4154 /* First the clauses that are unique to some constructs. */
4155 clausesa
[GFC_OMP_SPLIT_DO
].ordered
4156 = code
->ext
.omp_clauses
->ordered
;
4157 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
4158 = code
->ext
.omp_clauses
->orderedc
;
4159 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
4160 = code
->ext
.omp_clauses
->sched_kind
;
4161 if (innermost
== GFC_OMP_SPLIT_SIMD
)
4162 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
4163 = code
->ext
.omp_clauses
->sched_simd
;
4164 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
4165 = code
->ext
.omp_clauses
->sched_monotonic
;
4166 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
4167 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
4168 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
4169 = code
->ext
.omp_clauses
->chunk_size
;
4170 clausesa
[GFC_OMP_SPLIT_DO
].nowait
4171 = code
->ext
.omp_clauses
->nowait
;
4172 /* Duplicate collapse. */
4173 clausesa
[GFC_OMP_SPLIT_DO
].collapse
4174 = code
->ext
.omp_clauses
->collapse
;
4176 if (mask
& GFC_OMP_MASK_SIMD
)
4178 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
4179 = code
->ext
.omp_clauses
->safelen_expr
;
4180 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
4181 = code
->ext
.omp_clauses
->simdlen_expr
;
4182 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
4183 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
4184 /* Duplicate collapse. */
4185 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
4186 = code
->ext
.omp_clauses
->collapse
;
4188 if (mask
& GFC_OMP_MASK_TASKLOOP
)
4190 /* First the clauses that are unique to some constructs. */
4191 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
4192 = code
->ext
.omp_clauses
->nogroup
;
4193 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
4194 = code
->ext
.omp_clauses
->grainsize
;
4195 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
4196 = code
->ext
.omp_clauses
->num_tasks
;
4197 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
4198 = code
->ext
.omp_clauses
->priority
;
4199 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
4200 = code
->ext
.omp_clauses
->final_expr
;
4201 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
4202 = code
->ext
.omp_clauses
->untied
;
4203 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
4204 = code
->ext
.omp_clauses
->mergeable
;
4205 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
4206 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
4207 /* And this is copied to all. */
4208 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
4209 = code
->ext
.omp_clauses
->if_expr
;
4210 /* Shared and default clauses are allowed on parallel, teams
4212 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
4213 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4214 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
4215 = code
->ext
.omp_clauses
->default_sharing
;
4216 /* Duplicate collapse. */
4217 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
4218 = code
->ext
.omp_clauses
->collapse
;
4220 /* Private clause is supported on all constructs,
4221 it is enough to put it on the innermost one. For
4222 !$ omp parallel do put it on parallel though,
4223 as that's what we did for OpenMP 3.1. */
4224 clausesa
[innermost
== GFC_OMP_SPLIT_DO
4225 ? (int) GFC_OMP_SPLIT_PARALLEL
4226 : innermost
].lists
[OMP_LIST_PRIVATE
]
4227 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
4228 /* Firstprivate clause is supported on all constructs but
4229 simd. Put it on the outermost of those and duplicate
4230 on parallel and teams. */
4231 if (mask
& GFC_OMP_MASK_TARGET
)
4232 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
4233 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4234 if (mask
& GFC_OMP_MASK_TEAMS
)
4235 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
4236 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4237 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4238 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
4239 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4240 if (mask
& GFC_OMP_MASK_PARALLEL
)
4241 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
4242 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4243 else if (mask
& GFC_OMP_MASK_DO
)
4244 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
4245 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4246 /* Lastprivate is allowed on distribute, do and simd.
4247 In parallel do{, simd} we actually want to put it on
4248 parallel rather than do. */
4249 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4250 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
4251 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4252 if (mask
& GFC_OMP_MASK_PARALLEL
)
4253 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
4254 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4255 else if (mask
& GFC_OMP_MASK_DO
)
4256 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
4257 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4258 if (mask
& GFC_OMP_MASK_SIMD
)
4259 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
4260 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4261 /* Reduction is allowed on simd, do, parallel and teams.
4262 Duplicate it on all of them, but omit on do if
4263 parallel is present. */
4264 if (mask
& GFC_OMP_MASK_TEAMS
)
4265 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
4266 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4267 if (mask
& GFC_OMP_MASK_PARALLEL
)
4268 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
4269 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4270 else if (mask
& GFC_OMP_MASK_DO
)
4271 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
4272 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4273 if (mask
& GFC_OMP_MASK_SIMD
)
4274 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
4275 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4276 /* Linear clause is supported on do and simd,
4277 put it on the innermost one. */
4278 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
4279 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
4281 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4282 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4283 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
4287 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4288 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
4291 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4292 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
4295 gfc_start_block (&block
);
4297 gfc_init_block (&block
);
4299 if (clausesa
== NULL
)
4301 clausesa
= clausesa_buf
;
4302 gfc_split_omp_clauses (code
, clausesa
);
4306 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
4307 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
4308 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
4311 if (TREE_CODE (body
) != BIND_EXPR
)
4312 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
4316 else if (TREE_CODE (body
) != BIND_EXPR
)
4317 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
4320 stmt
= make_node (OMP_FOR
);
4321 TREE_TYPE (stmt
) = void_type_node
;
4322 OMP_FOR_BODY (stmt
) = body
;
4323 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
4327 gfc_add_expr_to_block (&block
, stmt
);
4328 return gfc_finish_block (&block
);
4332 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
4333 gfc_omp_clauses
*clausesa
)
4335 stmtblock_t block
, *new_pblock
= pblock
;
4336 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4337 tree stmt
, omp_clauses
= NULL_TREE
;
4340 gfc_start_block (&block
);
4342 gfc_init_block (&block
);
4344 if (clausesa
== NULL
)
4346 clausesa
= clausesa_buf
;
4347 gfc_split_omp_clauses (code
, clausesa
);
4350 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4354 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
4355 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
4356 new_pblock
= &block
;
4360 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
4361 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
4364 if (TREE_CODE (stmt
) != BIND_EXPR
)
4365 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4369 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4370 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4371 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4373 OMP_PARALLEL_COMBINED (stmt
) = 1;
4374 gfc_add_expr_to_block (&block
, stmt
);
4375 return gfc_finish_block (&block
);
4379 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4380 gfc_omp_clauses
*clausesa
)
4383 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4384 tree stmt
, omp_clauses
= NULL_TREE
;
4387 gfc_start_block (&block
);
4389 gfc_init_block (&block
);
4391 if (clausesa
== NULL
)
4393 clausesa
= clausesa_buf
;
4394 gfc_split_omp_clauses (code
, clausesa
);
4398 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4402 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
4405 if (TREE_CODE (stmt
) != BIND_EXPR
)
4406 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4410 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4411 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4414 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4416 OMP_PARALLEL_COMBINED (stmt
) = 1;
4418 gfc_add_expr_to_block (&block
, stmt
);
4419 return gfc_finish_block (&block
);
4423 gfc_trans_omp_parallel_sections (gfc_code
*code
)
4426 gfc_omp_clauses section_clauses
;
4427 tree stmt
, omp_clauses
;
4429 memset (§ion_clauses
, 0, sizeof (section_clauses
));
4430 section_clauses
.nowait
= true;
4432 gfc_start_block (&block
);
4433 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4436 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
4437 if (TREE_CODE (stmt
) != BIND_EXPR
)
4438 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4441 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4443 OMP_PARALLEL_COMBINED (stmt
) = 1;
4444 gfc_add_expr_to_block (&block
, stmt
);
4445 return gfc_finish_block (&block
);
4449 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
4452 gfc_omp_clauses workshare_clauses
;
4453 tree stmt
, omp_clauses
;
4455 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
4456 workshare_clauses
.nowait
= true;
4458 gfc_start_block (&block
);
4459 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4462 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4463 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4464 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4466 OMP_PARALLEL_COMBINED (stmt
) = 1;
4467 gfc_add_expr_to_block (&block
, stmt
);
4468 return gfc_finish_block (&block
);
4472 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4474 stmtblock_t block
, body
;
4475 tree omp_clauses
, stmt
;
4476 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4478 gfc_start_block (&block
);
4480 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4482 gfc_init_block (&body
);
4483 for (code
= code
->block
; code
; code
= code
->block
)
4485 /* Last section is special because of lastprivate, so even if it
4486 is empty, chain it in. */
4487 stmt
= gfc_trans_omp_code (code
->next
,
4488 has_lastprivate
&& code
->block
== NULL
);
4489 if (! IS_EMPTY_STMT (stmt
))
4491 stmt
= build1_v (OMP_SECTION
, stmt
);
4492 gfc_add_expr_to_block (&body
, stmt
);
4495 stmt
= gfc_finish_block (&body
);
4497 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4499 gfc_add_expr_to_block (&block
, stmt
);
4501 return gfc_finish_block (&block
);
4505 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4507 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4508 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4509 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4515 gfc_trans_omp_task (gfc_code
*code
)
4518 tree stmt
, omp_clauses
;
4520 gfc_start_block (&block
);
4521 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4524 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4525 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4526 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4528 gfc_add_expr_to_block (&block
, stmt
);
4529 return gfc_finish_block (&block
);
4533 gfc_trans_omp_taskgroup (gfc_code
*code
)
4535 tree stmt
= gfc_trans_code (code
->block
->next
);
4536 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4540 gfc_trans_omp_taskwait (void)
4542 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4543 return build_call_expr_loc (input_location
, decl
, 0);
4547 gfc_trans_omp_taskyield (void)
4549 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4550 return build_call_expr_loc (input_location
, decl
, 0);
4554 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4557 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4558 tree stmt
, omp_clauses
= NULL_TREE
;
4560 gfc_start_block (&block
);
4561 if (clausesa
== NULL
)
4563 clausesa
= clausesa_buf
;
4564 gfc_split_omp_clauses (code
, clausesa
);
4568 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4572 case EXEC_OMP_DISTRIBUTE
:
4573 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4574 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4575 /* This is handled in gfc_trans_omp_do. */
4578 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4579 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4580 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4581 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4582 if (TREE_CODE (stmt
) != BIND_EXPR
)
4583 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4587 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4588 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4589 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4590 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4591 if (TREE_CODE (stmt
) != BIND_EXPR
)
4592 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4596 case EXEC_OMP_DISTRIBUTE_SIMD
:
4597 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4598 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4599 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4600 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4601 if (TREE_CODE (stmt
) != BIND_EXPR
)
4602 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4611 tree distribute
= make_node (OMP_DISTRIBUTE
);
4612 TREE_TYPE (distribute
) = void_type_node
;
4613 OMP_FOR_BODY (distribute
) = stmt
;
4614 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4617 gfc_add_expr_to_block (&block
, stmt
);
4618 return gfc_finish_block (&block
);
4622 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
4626 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4628 bool combined
= true;
4630 gfc_start_block (&block
);
4631 if (clausesa
== NULL
)
4633 clausesa
= clausesa_buf
;
4634 gfc_split_omp_clauses (code
, clausesa
);
4638 = chainon (omp_clauses
,
4639 gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4643 case EXEC_OMP_TARGET_TEAMS
:
4644 case EXEC_OMP_TEAMS
:
4645 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4648 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4649 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4650 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4651 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4655 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4660 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4663 OMP_TEAMS_COMBINED (stmt
) = 1;
4665 gfc_add_expr_to_block (&block
, stmt
);
4666 return gfc_finish_block (&block
);
4670 gfc_trans_omp_target (gfc_code
*code
)
4673 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4674 tree stmt
, omp_clauses
= NULL_TREE
;
4676 gfc_start_block (&block
);
4677 gfc_split_omp_clauses (code
, clausesa
);
4680 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4684 case EXEC_OMP_TARGET
:
4686 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4687 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4689 case EXEC_OMP_TARGET_PARALLEL
:
4693 gfc_start_block (&iblock
);
4695 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4697 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4698 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4700 gfc_add_expr_to_block (&iblock
, stmt
);
4701 stmt
= gfc_finish_block (&iblock
);
4702 if (TREE_CODE (stmt
) != BIND_EXPR
)
4703 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4708 case EXEC_OMP_TARGET_PARALLEL_DO
:
4709 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4710 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4711 if (TREE_CODE (stmt
) != BIND_EXPR
)
4712 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4716 case EXEC_OMP_TARGET_SIMD
:
4717 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4718 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4719 if (TREE_CODE (stmt
) != BIND_EXPR
)
4720 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4726 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4727 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
4729 gfc_omp_clauses clausesb
;
4731 /* For combined !$omp target teams, the num_teams and
4732 thread_limit clauses are evaluated before entering the
4733 target construct. */
4734 memset (&clausesb
, '\0', sizeof (clausesb
));
4735 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
4736 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
4737 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
4738 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
4740 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
4742 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
4747 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
4749 if (TREE_CODE (stmt
) != BIND_EXPR
)
4750 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4757 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4759 if (code
->op
!= EXEC_OMP_TARGET
)
4760 OMP_TARGET_COMBINED (stmt
) = 1;
4762 gfc_add_expr_to_block (&block
, stmt
);
4763 return gfc_finish_block (&block
);
4767 gfc_trans_omp_taskloop (gfc_code
*code
)
4770 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4771 tree stmt
, omp_clauses
= NULL_TREE
;
4773 gfc_start_block (&block
);
4774 gfc_split_omp_clauses (code
, clausesa
);
4777 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
4781 case EXEC_OMP_TASKLOOP
:
4782 /* This is handled in gfc_trans_omp_do. */
4785 case EXEC_OMP_TASKLOOP_SIMD
:
4786 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4787 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4788 if (TREE_CODE (stmt
) != BIND_EXPR
)
4789 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4798 tree taskloop
= make_node (OMP_TASKLOOP
);
4799 TREE_TYPE (taskloop
) = void_type_node
;
4800 OMP_FOR_BODY (taskloop
) = stmt
;
4801 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
4804 gfc_add_expr_to_block (&block
, stmt
);
4805 return gfc_finish_block (&block
);
4809 gfc_trans_omp_target_data (gfc_code
*code
)
4812 tree stmt
, omp_clauses
;
4814 gfc_start_block (&block
);
4815 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4817 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4818 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4820 gfc_add_expr_to_block (&block
, stmt
);
4821 return gfc_finish_block (&block
);
4825 gfc_trans_omp_target_enter_data (gfc_code
*code
)
4828 tree stmt
, omp_clauses
;
4830 gfc_start_block (&block
);
4831 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4833 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
4835 gfc_add_expr_to_block (&block
, stmt
);
4836 return gfc_finish_block (&block
);
4840 gfc_trans_omp_target_exit_data (gfc_code
*code
)
4843 tree stmt
, omp_clauses
;
4845 gfc_start_block (&block
);
4846 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4848 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
4850 gfc_add_expr_to_block (&block
, stmt
);
4851 return gfc_finish_block (&block
);
4855 gfc_trans_omp_target_update (gfc_code
*code
)
4858 tree stmt
, omp_clauses
;
4860 gfc_start_block (&block
);
4861 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4863 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4865 gfc_add_expr_to_block (&block
, stmt
);
4866 return gfc_finish_block (&block
);
4870 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4872 tree res
, tmp
, stmt
;
4873 stmtblock_t block
, *pblock
= NULL
;
4874 stmtblock_t singleblock
;
4875 int saved_ompws_flags
;
4876 bool singleblock_in_progress
= false;
4877 /* True if previous gfc_code in workshare construct is not workshared. */
4878 bool prev_singleunit
;
4880 code
= code
->block
->next
;
4884 gfc_start_block (&block
);
4887 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4888 prev_singleunit
= false;
4890 /* Translate statements one by one to trees until we reach
4891 the end of the workshare construct. Adjacent gfc_codes that
4892 are a single unit of work are clustered and encapsulated in a
4893 single OMP_SINGLE construct. */
4894 for (; code
; code
= code
->next
)
4896 if (code
->here
!= 0)
4898 res
= gfc_trans_label_here (code
);
4899 gfc_add_expr_to_block (pblock
, res
);
4902 /* No dependence analysis, use for clauses with wait.
4903 If this is the last gfc_code, use default omp_clauses. */
4904 if (code
->next
== NULL
&& clauses
->nowait
)
4905 ompws_flags
|= OMPWS_NOWAIT
;
4907 /* By default, every gfc_code is a single unit of work. */
4908 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4909 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4918 res
= gfc_trans_assign (code
);
4921 case EXEC_POINTER_ASSIGN
:
4922 res
= gfc_trans_pointer_assign (code
);
4925 case EXEC_INIT_ASSIGN
:
4926 res
= gfc_trans_init_assign (code
);
4930 res
= gfc_trans_forall (code
);
4934 res
= gfc_trans_where (code
);
4937 case EXEC_OMP_ATOMIC
:
4938 res
= gfc_trans_omp_directive (code
);
4941 case EXEC_OMP_PARALLEL
:
4942 case EXEC_OMP_PARALLEL_DO
:
4943 case EXEC_OMP_PARALLEL_SECTIONS
:
4944 case EXEC_OMP_PARALLEL_WORKSHARE
:
4945 case EXEC_OMP_CRITICAL
:
4946 saved_ompws_flags
= ompws_flags
;
4948 res
= gfc_trans_omp_directive (code
);
4949 ompws_flags
= saved_ompws_flags
;
4953 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4956 gfc_set_backend_locus (&code
->loc
);
4958 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4960 if (prev_singleunit
)
4962 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4963 /* Add current gfc_code to single block. */
4964 gfc_add_expr_to_block (&singleblock
, res
);
4967 /* Finish single block and add it to pblock. */
4968 tmp
= gfc_finish_block (&singleblock
);
4969 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4970 void_type_node
, tmp
, NULL_TREE
);
4971 gfc_add_expr_to_block (pblock
, tmp
);
4972 /* Add current gfc_code to pblock. */
4973 gfc_add_expr_to_block (pblock
, res
);
4974 singleblock_in_progress
= false;
4979 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4981 /* Start single block. */
4982 gfc_init_block (&singleblock
);
4983 gfc_add_expr_to_block (&singleblock
, res
);
4984 singleblock_in_progress
= true;
4987 /* Add the new statement to the block. */
4988 gfc_add_expr_to_block (pblock
, res
);
4990 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
4994 /* Finish remaining SINGLE block, if we were in the middle of one. */
4995 if (singleblock_in_progress
)
4997 /* Finish single block and add it to pblock. */
4998 tmp
= gfc_finish_block (&singleblock
);
4999 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
5001 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
5003 gfc_add_expr_to_block (pblock
, tmp
);
5006 stmt
= gfc_finish_block (pblock
);
5007 if (TREE_CODE (stmt
) != BIND_EXPR
)
5009 if (!IS_EMPTY_STMT (stmt
))
5011 tree bindblock
= poplevel (1, 0);
5012 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
5020 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
5021 stmt
= gfc_trans_omp_barrier ();
5028 gfc_trans_oacc_declare (gfc_code
*code
)
5031 tree stmt
, oacc_clauses
;
5032 enum tree_code construct_code
;
5034 construct_code
= OACC_DATA
;
5036 gfc_start_block (&block
);
5038 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
5040 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5041 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
5043 gfc_add_expr_to_block (&block
, stmt
);
5045 return gfc_finish_block (&block
);
5049 gfc_trans_oacc_directive (gfc_code
*code
)
5053 case EXEC_OACC_PARALLEL_LOOP
:
5054 case EXEC_OACC_KERNELS_LOOP
:
5055 return gfc_trans_oacc_combined_directive (code
);
5056 case EXEC_OACC_PARALLEL
:
5057 case EXEC_OACC_KERNELS
:
5058 case EXEC_OACC_DATA
:
5059 case EXEC_OACC_HOST_DATA
:
5060 return gfc_trans_oacc_construct (code
);
5061 case EXEC_OACC_LOOP
:
5062 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5064 case EXEC_OACC_UPDATE
:
5065 case EXEC_OACC_CACHE
:
5066 case EXEC_OACC_ENTER_DATA
:
5067 case EXEC_OACC_EXIT_DATA
:
5068 return gfc_trans_oacc_executable_directive (code
);
5069 case EXEC_OACC_WAIT
:
5070 return gfc_trans_oacc_wait_directive (code
);
5071 case EXEC_OACC_ATOMIC
:
5072 return gfc_trans_omp_atomic (code
);
5073 case EXEC_OACC_DECLARE
:
5074 return gfc_trans_oacc_declare (code
);
5081 gfc_trans_omp_directive (gfc_code
*code
)
5085 case EXEC_OMP_ATOMIC
:
5086 return gfc_trans_omp_atomic (code
);
5087 case EXEC_OMP_BARRIER
:
5088 return gfc_trans_omp_barrier ();
5089 case EXEC_OMP_CANCEL
:
5090 return gfc_trans_omp_cancel (code
);
5091 case EXEC_OMP_CANCELLATION_POINT
:
5092 return gfc_trans_omp_cancellation_point (code
);
5093 case EXEC_OMP_CRITICAL
:
5094 return gfc_trans_omp_critical (code
);
5095 case EXEC_OMP_DISTRIBUTE
:
5098 case EXEC_OMP_TASKLOOP
:
5099 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5101 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5102 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5103 case EXEC_OMP_DISTRIBUTE_SIMD
:
5104 return gfc_trans_omp_distribute (code
, NULL
);
5105 case EXEC_OMP_DO_SIMD
:
5106 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
5107 case EXEC_OMP_FLUSH
:
5108 return gfc_trans_omp_flush ();
5109 case EXEC_OMP_MASTER
:
5110 return gfc_trans_omp_master (code
);
5111 case EXEC_OMP_ORDERED
:
5112 return gfc_trans_omp_ordered (code
);
5113 case EXEC_OMP_PARALLEL
:
5114 return gfc_trans_omp_parallel (code
);
5115 case EXEC_OMP_PARALLEL_DO
:
5116 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
5117 case EXEC_OMP_PARALLEL_DO_SIMD
:
5118 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
5119 case EXEC_OMP_PARALLEL_SECTIONS
:
5120 return gfc_trans_omp_parallel_sections (code
);
5121 case EXEC_OMP_PARALLEL_WORKSHARE
:
5122 return gfc_trans_omp_parallel_workshare (code
);
5123 case EXEC_OMP_SECTIONS
:
5124 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
5125 case EXEC_OMP_SINGLE
:
5126 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
5127 case EXEC_OMP_TARGET
:
5128 case EXEC_OMP_TARGET_PARALLEL
:
5129 case EXEC_OMP_TARGET_PARALLEL_DO
:
5130 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5131 case EXEC_OMP_TARGET_SIMD
:
5132 case EXEC_OMP_TARGET_TEAMS
:
5133 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5134 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5135 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5136 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5137 return gfc_trans_omp_target (code
);
5138 case EXEC_OMP_TARGET_DATA
:
5139 return gfc_trans_omp_target_data (code
);
5140 case EXEC_OMP_TARGET_ENTER_DATA
:
5141 return gfc_trans_omp_target_enter_data (code
);
5142 case EXEC_OMP_TARGET_EXIT_DATA
:
5143 return gfc_trans_omp_target_exit_data (code
);
5144 case EXEC_OMP_TARGET_UPDATE
:
5145 return gfc_trans_omp_target_update (code
);
5147 return gfc_trans_omp_task (code
);
5148 case EXEC_OMP_TASKGROUP
:
5149 return gfc_trans_omp_taskgroup (code
);
5150 case EXEC_OMP_TASKLOOP_SIMD
:
5151 return gfc_trans_omp_taskloop (code
);
5152 case EXEC_OMP_TASKWAIT
:
5153 return gfc_trans_omp_taskwait ();
5154 case EXEC_OMP_TASKYIELD
:
5155 return gfc_trans_omp_taskyield ();
5156 case EXEC_OMP_TEAMS
:
5157 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5158 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5159 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5160 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5161 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
5162 case EXEC_OMP_WORKSHARE
:
5163 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
5170 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
5175 gfc_omp_declare_simd
*ods
;
5176 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
5178 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
5179 tree fndecl
= ns
->proc_name
->backend_decl
;
5181 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
5182 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
5183 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
5184 DECL_ATTRIBUTES (fndecl
) = c
;