1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
39 #include "gomp-constants.h"
43 /* True if OpenMP should privatize what this DECL points to rather
44 than the DECL itself. */
47 gfc_omp_privatize_by_reference (const_tree decl
)
49 tree type
= TREE_TYPE (decl
);
51 if (TREE_CODE (type
) == REFERENCE_TYPE
52 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
55 if (TREE_CODE (type
) == POINTER_TYPE
)
57 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
58 that have POINTER_TYPE type and aren't scalar pointers, scalar
59 allocatables, Cray pointees or C pointers are supposed to be
60 privatized by reference. */
61 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
62 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
63 || GFC_DECL_CRAY_POINTEE (decl
)
64 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
65 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
68 if (!DECL_ARTIFICIAL (decl
)
69 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
72 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
74 if (DECL_LANG_SPECIFIC (decl
)
75 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
82 /* True if OpenMP sharing attribute of DECL is predetermined. */
84 enum omp_clause_default_kind
85 gfc_omp_predetermined_sharing (tree decl
)
87 /* Associate names preserve the association established during ASSOCIATE.
88 As they are implemented either as pointers to the selector or array
89 descriptor and shouldn't really change in the ASSOCIATE region,
90 this decl can be either shared or firstprivate. If it is a pointer,
91 use firstprivate, as it is cheaper that way, otherwise make it shared. */
92 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
94 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
95 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
97 return OMP_CLAUSE_DEFAULT_SHARED
;
100 if (DECL_ARTIFICIAL (decl
)
101 && ! GFC_DECL_RESULT (decl
)
102 && ! (DECL_LANG_SPECIFIC (decl
)
103 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
104 return OMP_CLAUSE_DEFAULT_SHARED
;
106 /* Cray pointees shouldn't be listed in any clauses and should be
107 gimplified to dereference of the corresponding Cray pointer.
108 Make them all private, so that they are emitted in the debug
110 if (GFC_DECL_CRAY_POINTEE (decl
))
111 return OMP_CLAUSE_DEFAULT_PRIVATE
;
113 /* Assumed-size arrays are predetermined shared. */
114 if (TREE_CODE (decl
) == PARM_DECL
115 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
116 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
117 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
118 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
120 return OMP_CLAUSE_DEFAULT_SHARED
;
122 /* Dummy procedures aren't considered variables by OpenMP, thus are
123 disallowed in OpenMP clauses. They are represented as PARM_DECLs
124 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
125 to avoid complaining about their uses with default(none). */
126 if (TREE_CODE (decl
) == PARM_DECL
127 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
128 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
129 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
131 /* COMMON and EQUIVALENCE decls are shared. They
132 are only referenced through DECL_VALUE_EXPR of the variables
133 contained in them. If those are privatized, they will not be
134 gimplified to the COMMON or EQUIVALENCE decls. */
135 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
136 return OMP_CLAUSE_DEFAULT_SHARED
;
138 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
139 return OMP_CLAUSE_DEFAULT_SHARED
;
141 /* These are either array or derived parameters, or vtables.
142 In the former cases, the OpenMP standard doesn't consider them to be
143 variables at all (they can't be redefined), but they can nevertheless appear
144 in parallel/task regions and for default(none) purposes treat them as shared.
145 For vtables likely the same handling is desirable. */
146 if (VAR_P (decl
) && TREE_READONLY (decl
) && TREE_STATIC (decl
))
147 return OMP_CLAUSE_DEFAULT_SHARED
;
149 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
152 /* Return decl that should be used when reporting DEFAULT(NONE)
156 gfc_omp_report_decl (tree decl
)
158 if (DECL_ARTIFICIAL (decl
)
159 && DECL_LANG_SPECIFIC (decl
)
160 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
161 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
166 /* Return true if TYPE has any allocatable components. */
169 gfc_has_alloc_comps (tree type
, tree decl
)
173 if (POINTER_TYPE_P (type
))
175 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
176 type
= TREE_TYPE (type
);
177 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
181 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
182 type
= gfc_get_element_type (type
);
184 if (TREE_CODE (type
) != RECORD_TYPE
)
187 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
189 ftype
= TREE_TYPE (field
);
190 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
192 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
193 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
195 if (gfc_has_alloc_comps (ftype
, field
))
201 /* Return true if DECL in private clause needs
202 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
204 gfc_omp_private_outer_ref (tree decl
)
206 tree type
= TREE_TYPE (decl
);
208 if (gfc_omp_privatize_by_reference (decl
))
209 type
= TREE_TYPE (type
);
211 if (GFC_DESCRIPTOR_TYPE_P (type
)
212 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
215 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
218 if (gfc_has_alloc_comps (type
, decl
))
224 /* Callback for gfc_omp_unshare_expr. */
227 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
230 enum tree_code code
= TREE_CODE (t
);
232 /* Stop at types, decls, constants like copy_tree_r. */
233 if (TREE_CODE_CLASS (code
) == tcc_type
234 || TREE_CODE_CLASS (code
) == tcc_declaration
235 || TREE_CODE_CLASS (code
) == tcc_constant
238 else if (handled_component_p (t
)
239 || TREE_CODE (t
) == MEM_REF
)
241 *tp
= unshare_expr (t
);
248 /* Unshare in expr anything that the FE which normally doesn't
249 care much about tree sharing (because during gimplification
250 everything is unshared) could cause problems with tree sharing
251 at omp-low.c time. */
254 gfc_omp_unshare_expr (tree expr
)
256 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
260 enum walk_alloc_comps
262 WALK_ALLOC_COMPS_DTOR
,
263 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
264 WALK_ALLOC_COMPS_COPY_CTOR
267 /* Handle allocatable components in OpenMP clauses. */
270 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
271 enum walk_alloc_comps kind
)
273 stmtblock_t block
, tmpblock
;
274 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
275 gfc_init_block (&block
);
277 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
279 if (GFC_DESCRIPTOR_TYPE_P (type
))
281 gfc_init_block (&tmpblock
);
282 tem
= gfc_full_array_size (&tmpblock
, decl
,
283 GFC_TYPE_ARRAY_RANK (type
));
284 then_b
= gfc_finish_block (&tmpblock
);
285 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
286 tem
= gfc_omp_unshare_expr (tem
);
287 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
288 gfc_array_index_type
, tem
,
293 if (!TYPE_DOMAIN (type
)
294 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
295 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
296 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
298 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
299 TYPE_SIZE_UNIT (type
),
300 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
301 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
304 tem
= array_type_nelts (type
);
305 tem
= fold_convert (gfc_array_index_type
, tem
);
308 tree nelems
= gfc_evaluate_now (tem
, &block
);
309 tree index
= gfc_create_var (gfc_array_index_type
, "S");
311 gfc_init_block (&tmpblock
);
312 tem
= gfc_conv_array_data (decl
);
313 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
314 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
315 tree destvar
, destvref
= NULL_TREE
;
318 tem
= gfc_conv_array_data (dest
);
319 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
320 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
322 gfc_add_expr_to_block (&tmpblock
,
323 gfc_walk_alloc_comps (declvref
, destvref
,
327 gfc_init_loopinfo (&loop
);
329 loop
.from
[0] = gfc_index_zero_node
;
330 loop
.loopvar
[0] = index
;
332 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
333 gfc_add_block_to_block (&block
, &loop
.pre
);
334 return gfc_finish_block (&block
);
336 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
338 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
340 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
341 type
= TREE_TYPE (decl
);
344 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
345 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
347 tree ftype
= TREE_TYPE (field
);
348 tree declf
, destf
= NULL_TREE
;
349 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
350 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
351 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
352 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
355 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
356 decl
, field
, NULL_TREE
);
358 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
359 dest
, field
, NULL_TREE
);
364 case WALK_ALLOC_COMPS_DTOR
:
366 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
367 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
368 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
370 gfc_add_modify (&block
, unshare_expr (destf
),
371 unshare_expr (declf
));
372 tem
= gfc_duplicate_allocatable_nocopy
373 (destf
, declf
, ftype
,
374 GFC_TYPE_ARRAY_RANK (ftype
));
376 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
377 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
379 case WALK_ALLOC_COMPS_COPY_CTOR
:
380 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
381 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
382 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
383 GFC_TYPE_ARRAY_RANK (ftype
),
385 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
386 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
391 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
394 gfc_init_block (&tmpblock
);
395 gfc_add_expr_to_block (&tmpblock
,
396 gfc_walk_alloc_comps (declf
, destf
,
398 then_b
= gfc_finish_block (&tmpblock
);
399 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
400 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
401 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
402 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
403 tem
= unshare_expr (declf
);
408 tem
= fold_convert (pvoid_type_node
, tem
);
409 tem
= fold_build2_loc (input_location
, NE_EXPR
,
410 boolean_type_node
, tem
,
412 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
414 build_empty_stmt (input_location
));
416 gfc_add_expr_to_block (&block
, then_b
);
418 if (kind
== WALK_ALLOC_COMPS_DTOR
)
420 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
421 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
423 tem
= gfc_trans_dealloc_allocated (unshare_expr (declf
),
425 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
427 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
429 tem
= gfc_call_free (unshare_expr (declf
));
430 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
435 return gfc_finish_block (&block
);
438 /* Return code to initialize DECL with its default constructor, or
439 NULL if there's nothing to do. */
442 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
444 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
445 stmtblock_t block
, cond_block
;
447 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
448 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
449 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
450 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
452 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
453 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
454 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
456 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
459 gfc_start_block (&block
);
460 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
461 OMP_CLAUSE_DECL (clause
),
462 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
463 gfc_add_expr_to_block (&block
, tem
);
464 return gfc_finish_block (&block
);
469 gcc_assert (outer
!= NULL_TREE
);
471 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
472 "not currently allocated" allocation status if outer
473 array is "not currently allocated", otherwise should be allocated. */
474 gfc_start_block (&block
);
476 gfc_init_block (&cond_block
);
478 if (GFC_DESCRIPTOR_TYPE_P (type
))
480 gfc_add_modify (&cond_block
, decl
, outer
);
481 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
482 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
483 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
485 gfc_conv_descriptor_lbound_get (decl
, rank
));
486 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
487 size
, gfc_index_one_node
);
488 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
489 size
= fold_build2_loc (input_location
, MULT_EXPR
,
490 gfc_array_index_type
, size
,
491 gfc_conv_descriptor_stride_get (decl
, rank
));
492 tree esize
= fold_convert (gfc_array_index_type
,
493 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
494 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
496 size
= unshare_expr (size
);
497 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
501 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
502 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
503 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
504 if (GFC_DESCRIPTOR_TYPE_P (type
))
505 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
507 gfc_add_modify (&cond_block
, unshare_expr (decl
),
508 fold_convert (TREE_TYPE (decl
), ptr
));
509 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
511 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
512 OMP_CLAUSE_DECL (clause
),
513 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
514 gfc_add_expr_to_block (&cond_block
, tem
);
516 then_b
= gfc_finish_block (&cond_block
);
518 /* Reduction clause requires allocated ALLOCATABLE. */
519 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
521 gfc_init_block (&cond_block
);
522 if (GFC_DESCRIPTOR_TYPE_P (type
))
523 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
526 gfc_add_modify (&cond_block
, unshare_expr (decl
),
527 build_zero_cst (TREE_TYPE (decl
)));
528 else_b
= gfc_finish_block (&cond_block
);
530 tree tem
= fold_convert (pvoid_type_node
,
531 GFC_DESCRIPTOR_TYPE_P (type
)
532 ? gfc_conv_descriptor_data_get (outer
) : outer
);
533 tem
= unshare_expr (tem
);
534 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
535 tem
, null_pointer_node
);
536 gfc_add_expr_to_block (&block
,
537 build3_loc (input_location
, COND_EXPR
,
538 void_type_node
, cond
, then_b
,
542 gfc_add_expr_to_block (&block
, then_b
);
544 return gfc_finish_block (&block
);
547 /* Build and return code for a copy constructor from SRC to DEST. */
550 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
552 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
553 tree cond
, then_b
, else_b
;
554 stmtblock_t block
, cond_block
;
556 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
557 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
559 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
560 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
561 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
563 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
565 gfc_start_block (&block
);
566 gfc_add_modify (&block
, dest
, src
);
567 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
568 WALK_ALLOC_COMPS_COPY_CTOR
);
569 gfc_add_expr_to_block (&block
, tem
);
570 return gfc_finish_block (&block
);
573 return build2_v (MODIFY_EXPR
, dest
, src
);
576 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
577 and copied from SRC. */
578 gfc_start_block (&block
);
580 gfc_init_block (&cond_block
);
582 gfc_add_modify (&cond_block
, dest
, src
);
583 if (GFC_DESCRIPTOR_TYPE_P (type
))
585 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
586 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
587 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
589 gfc_conv_descriptor_lbound_get (dest
, rank
));
590 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
591 size
, gfc_index_one_node
);
592 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
593 size
= fold_build2_loc (input_location
, MULT_EXPR
,
594 gfc_array_index_type
, size
,
595 gfc_conv_descriptor_stride_get (dest
, rank
));
596 tree esize
= fold_convert (gfc_array_index_type
,
597 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
598 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
600 size
= unshare_expr (size
);
601 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
605 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
606 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
607 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
608 if (GFC_DESCRIPTOR_TYPE_P (type
))
609 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
611 gfc_add_modify (&cond_block
, unshare_expr (dest
),
612 fold_convert (TREE_TYPE (dest
), ptr
));
614 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
615 ? gfc_conv_descriptor_data_get (src
) : src
;
616 srcptr
= unshare_expr (srcptr
);
617 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
618 call
= build_call_expr_loc (input_location
,
619 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
621 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
622 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
624 tree tem
= gfc_walk_alloc_comps (src
, dest
,
625 OMP_CLAUSE_DECL (clause
),
626 WALK_ALLOC_COMPS_COPY_CTOR
);
627 gfc_add_expr_to_block (&cond_block
, tem
);
629 then_b
= gfc_finish_block (&cond_block
);
631 gfc_init_block (&cond_block
);
632 if (GFC_DESCRIPTOR_TYPE_P (type
))
633 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
636 gfc_add_modify (&cond_block
, unshare_expr (dest
),
637 build_zero_cst (TREE_TYPE (dest
)));
638 else_b
= gfc_finish_block (&cond_block
);
640 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
641 unshare_expr (srcptr
), null_pointer_node
);
642 gfc_add_expr_to_block (&block
,
643 build3_loc (input_location
, COND_EXPR
,
644 void_type_node
, cond
, then_b
, else_b
));
646 return gfc_finish_block (&block
);
649 /* Similarly, except use an intrinsic or pointer assignment operator
653 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
655 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
656 tree cond
, then_b
, else_b
;
657 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
659 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
660 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
661 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
663 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
665 gfc_start_block (&block
);
666 /* First dealloc any allocatable components in DEST. */
667 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
668 OMP_CLAUSE_DECL (clause
),
669 WALK_ALLOC_COMPS_DTOR
);
670 gfc_add_expr_to_block (&block
, tem
);
671 /* Then copy over toplevel data. */
672 gfc_add_modify (&block
, dest
, src
);
673 /* Finally allocate any allocatable components and copy. */
674 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
675 WALK_ALLOC_COMPS_COPY_CTOR
);
676 gfc_add_expr_to_block (&block
, tem
);
677 return gfc_finish_block (&block
);
680 return build2_v (MODIFY_EXPR
, dest
, src
);
683 gfc_start_block (&block
);
685 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
687 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
688 WALK_ALLOC_COMPS_DTOR
);
689 tree tem
= fold_convert (pvoid_type_node
,
690 GFC_DESCRIPTOR_TYPE_P (type
)
691 ? gfc_conv_descriptor_data_get (dest
) : dest
);
692 tem
= unshare_expr (tem
);
693 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
694 tem
, null_pointer_node
);
695 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
696 then_b
, build_empty_stmt (input_location
));
697 gfc_add_expr_to_block (&block
, tem
);
700 gfc_init_block (&cond_block
);
702 if (GFC_DESCRIPTOR_TYPE_P (type
))
704 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
705 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
706 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
708 gfc_conv_descriptor_lbound_get (src
, rank
));
709 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
710 size
, gfc_index_one_node
);
711 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
712 size
= fold_build2_loc (input_location
, MULT_EXPR
,
713 gfc_array_index_type
, size
,
714 gfc_conv_descriptor_stride_get (src
, rank
));
715 tree esize
= fold_convert (gfc_array_index_type
,
716 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
717 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
719 size
= unshare_expr (size
);
720 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
724 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
725 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
727 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
728 ? gfc_conv_descriptor_data_get (dest
) : dest
;
729 destptr
= unshare_expr (destptr
);
730 destptr
= fold_convert (pvoid_type_node
, destptr
);
731 gfc_add_modify (&cond_block
, ptr
, destptr
);
733 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
734 destptr
, null_pointer_node
);
736 if (GFC_DESCRIPTOR_TYPE_P (type
))
739 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
741 tree rank
= gfc_rank_cst
[i
];
742 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
743 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
744 gfc_array_index_type
, tem
,
745 gfc_conv_descriptor_lbound_get (src
, rank
));
746 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
747 gfc_array_index_type
, tem
,
748 gfc_conv_descriptor_lbound_get (dest
, rank
));
749 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
750 tem
, gfc_conv_descriptor_ubound_get (dest
,
752 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
753 boolean_type_node
, cond
, tem
);
757 gfc_init_block (&cond_block2
);
759 if (GFC_DESCRIPTOR_TYPE_P (type
))
761 gfc_init_block (&inner_block
);
762 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
763 then_b
= gfc_finish_block (&inner_block
);
765 gfc_init_block (&inner_block
);
766 gfc_add_modify (&inner_block
, ptr
,
767 gfc_call_realloc (&inner_block
, ptr
, size
));
768 else_b
= gfc_finish_block (&inner_block
);
770 gfc_add_expr_to_block (&cond_block2
,
771 build3_loc (input_location
, COND_EXPR
,
773 unshare_expr (nonalloc
),
775 gfc_add_modify (&cond_block2
, dest
, src
);
776 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
780 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
781 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
782 fold_convert (type
, ptr
));
784 then_b
= gfc_finish_block (&cond_block2
);
785 else_b
= build_empty_stmt (input_location
);
787 gfc_add_expr_to_block (&cond_block
,
788 build3_loc (input_location
, COND_EXPR
,
789 void_type_node
, unshare_expr (cond
),
792 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
793 ? gfc_conv_descriptor_data_get (src
) : src
;
794 srcptr
= unshare_expr (srcptr
);
795 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
796 call
= build_call_expr_loc (input_location
,
797 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
799 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
800 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
802 tree tem
= gfc_walk_alloc_comps (src
, dest
,
803 OMP_CLAUSE_DECL (clause
),
804 WALK_ALLOC_COMPS_COPY_CTOR
);
805 gfc_add_expr_to_block (&cond_block
, tem
);
807 then_b
= gfc_finish_block (&cond_block
);
809 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
811 gfc_init_block (&cond_block
);
812 if (GFC_DESCRIPTOR_TYPE_P (type
))
813 gfc_add_expr_to_block (&cond_block
,
814 gfc_trans_dealloc_allocated (unshare_expr (dest
),
818 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
819 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
820 gfc_add_modify (&cond_block
, unshare_expr (dest
),
821 build_zero_cst (TREE_TYPE (dest
)));
823 else_b
= gfc_finish_block (&cond_block
);
825 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
826 unshare_expr (srcptr
), null_pointer_node
);
827 gfc_add_expr_to_block (&block
,
828 build3_loc (input_location
, COND_EXPR
,
829 void_type_node
, cond
,
833 gfc_add_expr_to_block (&block
, then_b
);
835 return gfc_finish_block (&block
);
839 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
840 tree add
, tree nelems
)
842 stmtblock_t tmpblock
;
843 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
844 nelems
= gfc_evaluate_now (nelems
, block
);
846 gfc_init_block (&tmpblock
);
847 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
849 desta
= gfc_build_array_ref (dest
, index
, NULL
);
850 srca
= gfc_build_array_ref (src
, index
, NULL
);
854 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
855 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
856 fold_convert (sizetype
, index
),
857 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
858 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
859 TREE_TYPE (dest
), dest
,
861 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
862 TREE_TYPE (src
), src
,
865 gfc_add_modify (&tmpblock
, desta
,
866 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
870 gfc_init_loopinfo (&loop
);
872 loop
.from
[0] = gfc_index_zero_node
;
873 loop
.loopvar
[0] = index
;
875 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
876 gfc_add_block_to_block (block
, &loop
.pre
);
879 /* Build and return code for a constructor of DEST that initializes
880 it to SRC plus ADD (ADD is scalar integer). */
883 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
885 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
888 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
890 gfc_start_block (&block
);
891 add
= gfc_evaluate_now (add
, &block
);
893 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
894 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
895 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
897 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
898 if (!TYPE_DOMAIN (type
)
899 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
900 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
901 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
903 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
904 TYPE_SIZE_UNIT (type
),
905 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
906 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
909 nelems
= array_type_nelts (type
);
910 nelems
= fold_convert (gfc_array_index_type
, nelems
);
912 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
913 return gfc_finish_block (&block
);
916 /* Allocatable arrays in LINEAR clauses need to be allocated
917 and copied from SRC. */
918 gfc_add_modify (&block
, dest
, src
);
919 if (GFC_DESCRIPTOR_TYPE_P (type
))
921 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
922 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
923 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
925 gfc_conv_descriptor_lbound_get (dest
, rank
));
926 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
927 size
, gfc_index_one_node
);
928 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
929 size
= fold_build2_loc (input_location
, MULT_EXPR
,
930 gfc_array_index_type
, size
,
931 gfc_conv_descriptor_stride_get (dest
, rank
));
932 tree esize
= fold_convert (gfc_array_index_type
,
933 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
934 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
935 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
936 nelems
, unshare_expr (esize
));
937 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
939 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
940 gfc_array_index_type
, nelems
,
944 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
945 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
946 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
947 if (GFC_DESCRIPTOR_TYPE_P (type
))
949 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
950 tree etype
= gfc_get_element_type (type
);
951 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
952 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
953 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
954 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
958 gfc_add_modify (&block
, unshare_expr (dest
),
959 fold_convert (TREE_TYPE (dest
), ptr
));
960 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
961 tree dstm
= build_fold_indirect_ref (ptr
);
962 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
963 gfc_add_modify (&block
, dstm
,
964 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
966 return gfc_finish_block (&block
);
969 /* Build and return code destructing DECL. Return NULL if nothing
973 gfc_omp_clause_dtor (tree clause
, tree decl
)
975 tree type
= TREE_TYPE (decl
), tem
;
977 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
978 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
979 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
981 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
982 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
983 OMP_CLAUSE_DECL (clause
),
984 WALK_ALLOC_COMPS_DTOR
);
988 if (GFC_DESCRIPTOR_TYPE_P (type
))
989 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
990 to be deallocated if they were allocated. */
991 tem
= gfc_trans_dealloc_allocated (decl
, false, NULL
);
993 tem
= gfc_call_free (decl
);
994 tem
= gfc_omp_unshare_expr (tem
);
996 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1001 gfc_init_block (&block
);
1002 gfc_add_expr_to_block (&block
,
1003 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1004 OMP_CLAUSE_DECL (clause
),
1005 WALK_ALLOC_COMPS_DTOR
));
1006 gfc_add_expr_to_block (&block
, tem
);
1007 then_b
= gfc_finish_block (&block
);
1009 tem
= fold_convert (pvoid_type_node
,
1010 GFC_DESCRIPTOR_TYPE_P (type
)
1011 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1012 tem
= unshare_expr (tem
);
1013 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1014 tem
, null_pointer_node
);
1015 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1016 then_b
, build_empty_stmt (input_location
));
1023 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1025 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1028 tree decl
= OMP_CLAUSE_DECL (c
);
1029 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1030 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1032 if (!gfc_omp_privatize_by_reference (decl
)
1033 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1034 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1035 && !GFC_DECL_CRAY_POINTEE (decl
)
1036 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1038 tree orig_decl
= decl
;
1039 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1040 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1041 OMP_CLAUSE_DECL (c4
) = decl
;
1042 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1043 decl
= build_fold_indirect_ref (decl
);
1044 OMP_CLAUSE_DECL (c
) = decl
;
1045 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1046 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1047 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1048 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1050 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1051 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1052 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1053 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1054 decl
= build_fold_indirect_ref (decl
);
1055 OMP_CLAUSE_DECL (c
) = decl
;
1058 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1061 gfc_start_block (&block
);
1062 tree type
= TREE_TYPE (decl
);
1063 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1064 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1065 ptr
= build_fold_indirect_ref (ptr
);
1066 OMP_CLAUSE_DECL (c
) = ptr
;
1067 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1068 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1069 OMP_CLAUSE_DECL (c2
) = decl
;
1070 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1071 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1072 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1073 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1074 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1075 tree size
= create_tmp_var (gfc_array_index_type
);
1076 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1077 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1078 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1079 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1081 stmtblock_t cond_block
;
1082 tree tem
, then_b
, else_b
, zero
, cond
;
1084 gfc_init_block (&cond_block
);
1085 tem
= gfc_full_array_size (&cond_block
, decl
,
1086 GFC_TYPE_ARRAY_RANK (type
));
1087 gfc_add_modify (&cond_block
, size
, tem
);
1088 gfc_add_modify (&cond_block
, size
,
1089 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1091 then_b
= gfc_finish_block (&cond_block
);
1092 gfc_init_block (&cond_block
);
1093 zero
= build_int_cst (gfc_array_index_type
, 0);
1094 gfc_add_modify (&cond_block
, size
, zero
);
1095 else_b
= gfc_finish_block (&cond_block
);
1096 tem
= gfc_conv_descriptor_data_get (decl
);
1097 tem
= fold_convert (pvoid_type_node
, tem
);
1098 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1099 boolean_type_node
, tem
, null_pointer_node
);
1100 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1101 void_type_node
, cond
,
1106 gfc_add_modify (&block
, size
,
1107 gfc_full_array_size (&block
, decl
,
1108 GFC_TYPE_ARRAY_RANK (type
)));
1109 gfc_add_modify (&block
, size
,
1110 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1113 OMP_CLAUSE_SIZE (c
) = size
;
1114 tree stmt
= gfc_finish_block (&block
);
1115 gimplify_and_add (stmt
, pre_p
);
1118 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1120 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1121 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1124 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1125 OMP_CLAUSE_CHAIN (last
) = c2
;
1130 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1131 OMP_CLAUSE_CHAIN (last
) = c3
;
1136 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1137 OMP_CLAUSE_CHAIN (last
) = c4
;
1143 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1144 disregarded in OpenMP construct, because it is going to be
1145 remapped during OpenMP lowering. SHARED is true if DECL
1146 is going to be shared, false if it is going to be privatized. */
1149 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1151 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1152 && DECL_HAS_VALUE_EXPR_P (decl
))
1154 tree value
= DECL_VALUE_EXPR (decl
);
1156 if (TREE_CODE (value
) == COMPONENT_REF
1157 && VAR_P (TREE_OPERAND (value
, 0))
1158 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1160 /* If variable in COMMON or EQUIVALENCE is privatized, return
1161 true, as just that variable is supposed to be privatized,
1162 not the whole COMMON or whole EQUIVALENCE.
1163 For shared variables in COMMON or EQUIVALENCE, let them be
1164 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1165 from the same COMMON or EQUIVALENCE just one sharing of the
1166 whole COMMON or EQUIVALENCE is enough. */
1171 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1177 /* Return true if DECL that is shared iff SHARED is true should
1178 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1182 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1184 if (GFC_DECL_CRAY_POINTEE (decl
))
1187 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1188 && DECL_HAS_VALUE_EXPR_P (decl
))
1190 tree value
= DECL_VALUE_EXPR (decl
);
1192 if (TREE_CODE (value
) == COMPONENT_REF
1193 && VAR_P (TREE_OPERAND (value
, 0))
1194 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1201 /* Register language specific type size variables as potentially OpenMP
1202 firstprivate variables. */
1205 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1207 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1211 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1212 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1214 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1215 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1216 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1218 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1219 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1225 gfc_trans_add_clause (tree node
, tree tail
)
1227 OMP_CLAUSE_CHAIN (node
) = tail
;
1232 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1237 gfc_symbol
*proc_sym
;
1238 gfc_formal_arglist
*f
;
1240 gcc_assert (sym
->attr
.dummy
);
1241 proc_sym
= sym
->ns
->proc_name
;
1242 if (proc_sym
->attr
.entry_master
)
1244 if (gfc_return_by_reference (proc_sym
))
1247 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1250 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1256 return build_int_cst (integer_type_node
, cnt
);
1259 tree t
= gfc_get_symbol_decl (sym
);
1263 bool alternate_entry
;
1266 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1267 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1268 && sym
->result
== sym
;
1269 entry_master
= sym
->attr
.result
1270 && sym
->ns
->proc_name
->attr
.entry_master
1271 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1272 parent_decl
= current_function_decl
1273 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1275 if ((t
== parent_decl
&& return_value
)
1276 || (sym
->ns
&& sym
->ns
->proc_name
1277 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1278 && (alternate_entry
|| entry_master
)))
1283 /* Special case for assigning the return value of a function.
1284 Self recursive functions must have an explicit return value. */
1285 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1286 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1288 /* Similarly for alternate entry points. */
1289 else if (alternate_entry
1290 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1293 gfc_entry_list
*el
= NULL
;
1295 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1298 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1303 else if (entry_master
1304 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1306 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1312 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1313 gfc_omp_namelist
*namelist
, tree list
,
1316 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1317 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1319 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1320 if (t
!= error_mark_node
)
1322 tree node
= build_omp_clause (input_location
, code
);
1323 OMP_CLAUSE_DECL (node
) = t
;
1324 list
= gfc_trans_add_clause (node
, list
);
1330 struct omp_udr_find_orig_data
1332 gfc_omp_udr
*omp_udr
;
1337 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1340 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1341 if ((*e
)->expr_type
== EXPR_VARIABLE
1342 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1343 cd
->omp_orig_seen
= true;
1349 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1351 gfc_symbol
*sym
= n
->sym
;
1352 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1353 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1354 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1355 gfc_symbol omp_var_copy
[4];
1356 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1358 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1359 locus old_loc
= gfc_current_locus
;
1362 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1364 decl
= OMP_CLAUSE_DECL (c
);
1365 gfc_current_locus
= where
;
1366 type
= TREE_TYPE (decl
);
1367 outer_decl
= create_tmp_var_raw (type
);
1368 if (TREE_CODE (decl
) == PARM_DECL
1369 && TREE_CODE (type
) == REFERENCE_TYPE
1370 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1371 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1373 decl
= build_fold_indirect_ref (decl
);
1374 type
= TREE_TYPE (type
);
1377 /* Create a fake symbol for init value. */
1378 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1379 init_val_sym
.ns
= sym
->ns
;
1380 init_val_sym
.name
= sym
->name
;
1381 init_val_sym
.ts
= sym
->ts
;
1382 init_val_sym
.attr
.referenced
= 1;
1383 init_val_sym
.declared_at
= where
;
1384 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1385 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1386 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1387 else if (udr
->initializer_ns
)
1388 backend_decl
= NULL
;
1390 switch (sym
->ts
.type
)
1396 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1399 backend_decl
= NULL_TREE
;
1402 init_val_sym
.backend_decl
= backend_decl
;
1404 /* Create a fake symbol for the outer array reference. */
1407 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1408 outer_sym
.attr
.dummy
= 0;
1409 outer_sym
.attr
.result
= 0;
1410 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1411 outer_sym
.backend_decl
= outer_decl
;
1412 if (decl
!= OMP_CLAUSE_DECL (c
))
1413 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1415 /* Create fake symtrees for it. */
1416 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1417 symtree1
->n
.sym
= sym
;
1418 gcc_assert (symtree1
== root1
);
1420 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1421 symtree2
->n
.sym
= &init_val_sym
;
1422 gcc_assert (symtree2
== root2
);
1424 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1425 symtree3
->n
.sym
= &outer_sym
;
1426 gcc_assert (symtree3
== root3
);
1428 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1431 omp_var_copy
[0] = *udr
->omp_out
;
1432 omp_var_copy
[1] = *udr
->omp_in
;
1433 *udr
->omp_out
= outer_sym
;
1434 *udr
->omp_in
= *sym
;
1435 if (udr
->initializer_ns
)
1437 omp_var_copy
[2] = *udr
->omp_priv
;
1438 omp_var_copy
[3] = *udr
->omp_orig
;
1439 *udr
->omp_priv
= *sym
;
1440 *udr
->omp_orig
= outer_sym
;
1444 /* Create expressions. */
1445 e1
= gfc_get_expr ();
1446 e1
->expr_type
= EXPR_VARIABLE
;
1448 e1
->symtree
= symtree1
;
1450 if (sym
->attr
.dimension
)
1452 e1
->ref
= ref
= gfc_get_ref ();
1453 ref
->type
= REF_ARRAY
;
1454 ref
->u
.ar
.where
= where
;
1455 ref
->u
.ar
.as
= sym
->as
;
1456 ref
->u
.ar
.type
= AR_FULL
;
1457 ref
->u
.ar
.dimen
= 0;
1459 t
= gfc_resolve_expr (e1
);
1463 if (backend_decl
!= NULL_TREE
)
1465 e2
= gfc_get_expr ();
1466 e2
->expr_type
= EXPR_VARIABLE
;
1468 e2
->symtree
= symtree2
;
1470 t
= gfc_resolve_expr (e2
);
1473 else if (udr
->initializer_ns
== NULL
)
1475 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1476 e2
= gfc_default_initializer (&sym
->ts
);
1478 t
= gfc_resolve_expr (e2
);
1481 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1483 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1484 t
= gfc_resolve_expr (e2
);
1487 if (udr
&& udr
->initializer_ns
)
1489 struct omp_udr_find_orig_data cd
;
1491 cd
.omp_orig_seen
= false;
1492 gfc_code_walker (&n
->udr
->initializer
,
1493 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1494 if (cd
.omp_orig_seen
)
1495 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1498 e3
= gfc_copy_expr (e1
);
1499 e3
->symtree
= symtree3
;
1500 t
= gfc_resolve_expr (e3
);
1505 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1509 e4
= gfc_add (e3
, e1
);
1512 e4
= gfc_multiply (e3
, e1
);
1514 case TRUTH_ANDIF_EXPR
:
1515 e4
= gfc_and (e3
, e1
);
1517 case TRUTH_ORIF_EXPR
:
1518 e4
= gfc_or (e3
, e1
);
1521 e4
= gfc_eqv (e3
, e1
);
1524 e4
= gfc_neqv (e3
, e1
);
1542 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1545 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1546 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1547 t
= gfc_resolve_expr (e3
);
1549 t
= gfc_resolve_expr (e4
);
1558 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1559 intrinsic_sym
.ns
= sym
->ns
;
1560 intrinsic_sym
.name
= iname
;
1561 intrinsic_sym
.ts
= sym
->ts
;
1562 intrinsic_sym
.attr
.referenced
= 1;
1563 intrinsic_sym
.attr
.intrinsic
= 1;
1564 intrinsic_sym
.attr
.function
= 1;
1565 intrinsic_sym
.result
= &intrinsic_sym
;
1566 intrinsic_sym
.declared_at
= where
;
1568 symtree4
= gfc_new_symtree (&root4
, iname
);
1569 symtree4
->n
.sym
= &intrinsic_sym
;
1570 gcc_assert (symtree4
== root4
);
1572 e4
= gfc_get_expr ();
1573 e4
->expr_type
= EXPR_FUNCTION
;
1575 e4
->symtree
= symtree4
;
1576 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1577 e4
->value
.function
.actual
->expr
= e3
;
1578 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1579 e4
->value
.function
.actual
->next
->expr
= e1
;
1581 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1583 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1584 e1
= gfc_copy_expr (e1
);
1585 e3
= gfc_copy_expr (e3
);
1586 t
= gfc_resolve_expr (e4
);
1590 /* Create the init statement list. */
1593 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1595 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1596 NULL_TREE
, NULL_TREE
, false);
1597 if (TREE_CODE (stmt
) != BIND_EXPR
)
1598 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1601 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1603 /* Create the merge statement list. */
1606 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1608 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1609 NULL_TREE
, NULL_TREE
, false);
1610 if (TREE_CODE (stmt
) != BIND_EXPR
)
1611 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1614 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1616 /* And stick the placeholder VAR_DECL into the clause as well. */
1617 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1619 gfc_current_locus
= old_loc
;
1632 gfc_free_array_spec (outer_sym
.as
);
1636 *udr
->omp_out
= omp_var_copy
[0];
1637 *udr
->omp_in
= omp_var_copy
[1];
1638 if (udr
->initializer_ns
)
1640 *udr
->omp_priv
= omp_var_copy
[2];
1641 *udr
->omp_orig
= omp_var_copy
[3];
1647 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1648 locus where
, bool mark_addressable
)
1650 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1651 if (namelist
->sym
->attr
.referenced
)
1653 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1654 if (t
!= error_mark_node
)
1656 tree node
= build_omp_clause (where
.lb
->location
,
1657 OMP_CLAUSE_REDUCTION
);
1658 OMP_CLAUSE_DECL (node
) = t
;
1659 if (mark_addressable
)
1660 TREE_ADDRESSABLE (t
) = 1;
1661 switch (namelist
->u
.reduction_op
)
1663 case OMP_REDUCTION_PLUS
:
1664 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1666 case OMP_REDUCTION_MINUS
:
1667 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1669 case OMP_REDUCTION_TIMES
:
1670 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1672 case OMP_REDUCTION_AND
:
1673 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1675 case OMP_REDUCTION_OR
:
1676 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1678 case OMP_REDUCTION_EQV
:
1679 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1681 case OMP_REDUCTION_NEQV
:
1682 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1684 case OMP_REDUCTION_MAX
:
1685 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1687 case OMP_REDUCTION_MIN
:
1688 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1690 case OMP_REDUCTION_IAND
:
1691 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1693 case OMP_REDUCTION_IOR
:
1694 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1696 case OMP_REDUCTION_IEOR
:
1697 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1699 case OMP_REDUCTION_USER
:
1700 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1705 if (namelist
->sym
->attr
.dimension
1706 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1707 || namelist
->sym
->attr
.allocatable
)
1708 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1709 list
= gfc_trans_add_clause (node
, list
);
1716 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1721 gfc_init_se (&se
, NULL
);
1722 gfc_conv_expr (&se
, expr
);
1723 gfc_add_block_to_block (block
, &se
.pre
);
1724 result
= gfc_evaluate_now (se
.expr
, block
);
1725 gfc_add_block_to_block (block
, &se
.post
);
1731 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1732 locus where
, bool declare_simd
= false)
1734 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1736 enum omp_clause_code clause_code
;
1739 if (clauses
== NULL
)
1742 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1744 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1750 case OMP_LIST_REDUCTION
:
1751 /* An OpenACC async clause indicates the need to set reduction
1752 arguments addressable, to allow asynchronous copy-out. */
1753 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1756 case OMP_LIST_PRIVATE
:
1757 clause_code
= OMP_CLAUSE_PRIVATE
;
1759 case OMP_LIST_SHARED
:
1760 clause_code
= OMP_CLAUSE_SHARED
;
1762 case OMP_LIST_FIRSTPRIVATE
:
1763 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1765 case OMP_LIST_LASTPRIVATE
:
1766 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1768 case OMP_LIST_COPYIN
:
1769 clause_code
= OMP_CLAUSE_COPYIN
;
1771 case OMP_LIST_COPYPRIVATE
:
1772 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1774 case OMP_LIST_UNIFORM
:
1775 clause_code
= OMP_CLAUSE_UNIFORM
;
1777 case OMP_LIST_USE_DEVICE
:
1778 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1783 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1786 case OMP_LIST_ALIGNED
:
1787 for (; n
!= NULL
; n
= n
->next
)
1788 if (n
->sym
->attr
.referenced
|| declare_simd
)
1790 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1791 if (t
!= error_mark_node
)
1793 tree node
= build_omp_clause (input_location
,
1794 OMP_CLAUSE_ALIGNED
);
1795 OMP_CLAUSE_DECL (node
) = t
;
1801 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1804 gfc_init_se (&se
, NULL
);
1805 gfc_conv_expr (&se
, n
->expr
);
1806 gfc_add_block_to_block (block
, &se
.pre
);
1807 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1808 gfc_add_block_to_block (block
, &se
.post
);
1810 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1812 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1816 case OMP_LIST_LINEAR
:
1818 gfc_expr
*last_step_expr
= NULL
;
1819 tree last_step
= NULL_TREE
;
1821 for (; n
!= NULL
; n
= n
->next
)
1825 last_step_expr
= n
->expr
;
1826 last_step
= NULL_TREE
;
1828 if (n
->sym
->attr
.referenced
|| declare_simd
)
1830 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1831 if (t
!= error_mark_node
)
1833 tree node
= build_omp_clause (input_location
,
1835 OMP_CLAUSE_DECL (node
) = t
;
1836 if (last_step_expr
&& last_step
== NULL_TREE
)
1840 = gfc_conv_constant_to_tree (last_step_expr
);
1843 gfc_init_se (&se
, NULL
);
1844 gfc_conv_expr (&se
, last_step_expr
);
1845 gfc_add_block_to_block (block
, &se
.pre
);
1846 last_step
= gfc_evaluate_now (se
.expr
, block
);
1847 gfc_add_block_to_block (block
, &se
.post
);
1850 OMP_CLAUSE_LINEAR_STEP (node
)
1851 = fold_convert (gfc_typenode_for_spec (&n
->sym
->ts
),
1853 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1854 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1855 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1861 case OMP_LIST_DEPEND
:
1862 for (; n
!= NULL
; n
= n
->next
)
1864 if (!n
->sym
->attr
.referenced
)
1867 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
1868 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1870 tree decl
= gfc_get_symbol_decl (n
->sym
);
1871 if (gfc_omp_privatize_by_reference (decl
))
1872 decl
= build_fold_indirect_ref (decl
);
1873 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1875 decl
= gfc_conv_descriptor_data_get (decl
);
1876 decl
= fold_convert (build_pointer_type (char_type_node
),
1878 decl
= build_fold_indirect_ref (decl
);
1880 else if (DECL_P (decl
))
1881 TREE_ADDRESSABLE (decl
) = 1;
1882 OMP_CLAUSE_DECL (node
) = decl
;
1887 gfc_init_se (&se
, NULL
);
1888 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
1890 gfc_conv_expr_reference (&se
, n
->expr
);
1895 gfc_conv_expr_descriptor (&se
, n
->expr
);
1896 ptr
= gfc_conv_array_data (se
.expr
);
1898 gfc_add_block_to_block (block
, &se
.pre
);
1899 gfc_add_block_to_block (block
, &se
.post
);
1900 ptr
= fold_convert (build_pointer_type (char_type_node
),
1902 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
1904 switch (n
->u
.depend_op
)
1907 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
1909 case OMP_DEPEND_OUT
:
1910 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
1912 case OMP_DEPEND_INOUT
:
1913 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
1918 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1922 for (; n
!= NULL
; n
= n
->next
)
1924 if (!n
->sym
->attr
.referenced
)
1927 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1928 tree node2
= NULL_TREE
;
1929 tree node3
= NULL_TREE
;
1930 tree node4
= NULL_TREE
;
1931 tree decl
= gfc_get_symbol_decl (n
->sym
);
1933 TREE_ADDRESSABLE (decl
) = 1;
1934 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1936 if (POINTER_TYPE_P (TREE_TYPE (decl
))
1937 && (gfc_omp_privatize_by_reference (decl
)
1938 || GFC_DECL_GET_SCALAR_POINTER (decl
)
1939 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1940 || GFC_DECL_CRAY_POINTEE (decl
)
1941 || GFC_DESCRIPTOR_TYPE_P
1942 (TREE_TYPE (TREE_TYPE (decl
)))))
1944 tree orig_decl
= decl
;
1945 node4
= build_omp_clause (input_location
,
1947 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
1948 OMP_CLAUSE_DECL (node4
) = decl
;
1949 OMP_CLAUSE_SIZE (node4
) = size_int (0);
1950 decl
= build_fold_indirect_ref (decl
);
1951 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1952 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1953 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1955 node3
= build_omp_clause (input_location
,
1957 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1958 OMP_CLAUSE_DECL (node3
) = decl
;
1959 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1960 decl
= build_fold_indirect_ref (decl
);
1963 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1965 tree type
= TREE_TYPE (decl
);
1966 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1967 ptr
= fold_convert (build_pointer_type (char_type_node
),
1969 ptr
= build_fold_indirect_ref (ptr
);
1970 OMP_CLAUSE_DECL (node
) = ptr
;
1971 node2
= build_omp_clause (input_location
,
1973 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
1974 OMP_CLAUSE_DECL (node2
) = decl
;
1975 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
1976 node3
= build_omp_clause (input_location
,
1978 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1979 OMP_CLAUSE_DECL (node3
)
1980 = gfc_conv_descriptor_data_get (decl
);
1981 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1983 /* We have to check for n->sym->attr.dimension because
1984 of scalar coarrays. */
1985 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
1987 stmtblock_t cond_block
;
1989 = gfc_create_var (gfc_array_index_type
, NULL
);
1990 tree tem
, then_b
, else_b
, zero
, cond
;
1992 gfc_init_block (&cond_block
);
1994 = gfc_full_array_size (&cond_block
, decl
,
1995 GFC_TYPE_ARRAY_RANK (type
));
1996 gfc_add_modify (&cond_block
, size
, tem
);
1997 then_b
= gfc_finish_block (&cond_block
);
1998 gfc_init_block (&cond_block
);
1999 zero
= build_int_cst (gfc_array_index_type
, 0);
2000 gfc_add_modify (&cond_block
, size
, zero
);
2001 else_b
= gfc_finish_block (&cond_block
);
2002 tem
= gfc_conv_descriptor_data_get (decl
);
2003 tem
= fold_convert (pvoid_type_node
, tem
);
2004 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2006 tem
, null_pointer_node
);
2007 gfc_add_expr_to_block (block
,
2008 build3_loc (input_location
,
2013 OMP_CLAUSE_SIZE (node
) = size
;
2015 else if (n
->sym
->attr
.dimension
)
2016 OMP_CLAUSE_SIZE (node
)
2017 = gfc_full_array_size (block
, decl
,
2018 GFC_TYPE_ARRAY_RANK (type
));
2019 if (n
->sym
->attr
.dimension
)
2022 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2023 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2024 OMP_CLAUSE_SIZE (node
)
2025 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2026 OMP_CLAUSE_SIZE (node
), elemsz
);
2030 OMP_CLAUSE_DECL (node
) = decl
;
2035 gfc_init_se (&se
, NULL
);
2036 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2038 gfc_conv_expr_reference (&se
, n
->expr
);
2039 gfc_add_block_to_block (block
, &se
.pre
);
2041 OMP_CLAUSE_SIZE (node
)
2042 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2046 gfc_conv_expr_descriptor (&se
, n
->expr
);
2047 ptr
= gfc_conv_array_data (se
.expr
);
2048 tree type
= TREE_TYPE (se
.expr
);
2049 gfc_add_block_to_block (block
, &se
.pre
);
2050 OMP_CLAUSE_SIZE (node
)
2051 = gfc_full_array_size (block
, se
.expr
,
2052 GFC_TYPE_ARRAY_RANK (type
));
2054 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2055 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2056 OMP_CLAUSE_SIZE (node
)
2057 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2058 OMP_CLAUSE_SIZE (node
), elemsz
);
2060 gfc_add_block_to_block (block
, &se
.post
);
2061 ptr
= fold_convert (build_pointer_type (char_type_node
),
2063 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2065 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2066 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2068 node4
= build_omp_clause (input_location
,
2070 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2071 OMP_CLAUSE_DECL (node4
) = decl
;
2072 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2073 decl
= build_fold_indirect_ref (decl
);
2075 ptr
= fold_convert (sizetype
, ptr
);
2076 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2078 tree type
= TREE_TYPE (decl
);
2079 ptr2
= gfc_conv_descriptor_data_get (decl
);
2080 node2
= build_omp_clause (input_location
,
2082 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2083 OMP_CLAUSE_DECL (node2
) = decl
;
2084 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2085 node3
= build_omp_clause (input_location
,
2087 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2088 OMP_CLAUSE_DECL (node3
)
2089 = gfc_conv_descriptor_data_get (decl
);
2093 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2094 ptr2
= build_fold_addr_expr (decl
);
2097 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2100 node3
= build_omp_clause (input_location
,
2102 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2103 OMP_CLAUSE_DECL (node3
) = decl
;
2105 ptr2
= fold_convert (sizetype
, ptr2
);
2106 OMP_CLAUSE_SIZE (node3
)
2107 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2109 switch (n
->u
.map_op
)
2112 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2115 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2118 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2120 case OMP_MAP_TOFROM
:
2121 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2123 case OMP_MAP_DELETE
:
2124 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2126 case OMP_MAP_FORCE_ALLOC
:
2127 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2129 case OMP_MAP_FORCE_TO
:
2130 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2132 case OMP_MAP_FORCE_FROM
:
2133 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2135 case OMP_MAP_FORCE_TOFROM
:
2136 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2138 case OMP_MAP_FORCE_PRESENT
:
2139 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2141 case OMP_MAP_FORCE_DEVICEPTR
:
2142 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2147 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2149 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2151 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2153 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2158 case OMP_LIST_CACHE
:
2159 for (; n
!= NULL
; n
= n
->next
)
2161 if (!n
->sym
->attr
.referenced
)
2167 clause_code
= OMP_CLAUSE_TO
;
2170 clause_code
= OMP_CLAUSE_FROM
;
2172 case OMP_LIST_CACHE
:
2173 clause_code
= OMP_CLAUSE__CACHE_
;
2178 tree node
= build_omp_clause (input_location
, clause_code
);
2179 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2181 tree decl
= gfc_get_symbol_decl (n
->sym
);
2182 if (gfc_omp_privatize_by_reference (decl
))
2183 decl
= build_fold_indirect_ref (decl
);
2184 else if (DECL_P (decl
))
2185 TREE_ADDRESSABLE (decl
) = 1;
2186 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2188 tree type
= TREE_TYPE (decl
);
2189 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2190 ptr
= fold_convert (build_pointer_type (char_type_node
),
2192 ptr
= build_fold_indirect_ref (ptr
);
2193 OMP_CLAUSE_DECL (node
) = ptr
;
2194 OMP_CLAUSE_SIZE (node
)
2195 = gfc_full_array_size (block
, decl
,
2196 GFC_TYPE_ARRAY_RANK (type
));
2198 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2199 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2200 OMP_CLAUSE_SIZE (node
)
2201 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2202 OMP_CLAUSE_SIZE (node
), elemsz
);
2205 OMP_CLAUSE_DECL (node
) = decl
;
2210 gfc_init_se (&se
, NULL
);
2211 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2213 gfc_conv_expr_reference (&se
, n
->expr
);
2215 gfc_add_block_to_block (block
, &se
.pre
);
2216 OMP_CLAUSE_SIZE (node
)
2217 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2221 gfc_conv_expr_descriptor (&se
, n
->expr
);
2222 ptr
= gfc_conv_array_data (se
.expr
);
2223 tree type
= TREE_TYPE (se
.expr
);
2224 gfc_add_block_to_block (block
, &se
.pre
);
2225 OMP_CLAUSE_SIZE (node
)
2226 = gfc_full_array_size (block
, se
.expr
,
2227 GFC_TYPE_ARRAY_RANK (type
));
2229 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2230 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2231 OMP_CLAUSE_SIZE (node
)
2232 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2233 OMP_CLAUSE_SIZE (node
), elemsz
);
2235 gfc_add_block_to_block (block
, &se
.post
);
2236 ptr
= fold_convert (build_pointer_type (char_type_node
),
2238 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2240 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2248 if (clauses
->if_expr
)
2252 gfc_init_se (&se
, NULL
);
2253 gfc_conv_expr (&se
, clauses
->if_expr
);
2254 gfc_add_block_to_block (block
, &se
.pre
);
2255 if_var
= gfc_evaluate_now (se
.expr
, block
);
2256 gfc_add_block_to_block (block
, &se
.post
);
2258 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2259 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2260 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2261 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2264 if (clauses
->final_expr
)
2268 gfc_init_se (&se
, NULL
);
2269 gfc_conv_expr (&se
, clauses
->final_expr
);
2270 gfc_add_block_to_block (block
, &se
.pre
);
2271 final_var
= gfc_evaluate_now (se
.expr
, block
);
2272 gfc_add_block_to_block (block
, &se
.post
);
2274 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2275 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2276 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2279 if (clauses
->num_threads
)
2283 gfc_init_se (&se
, NULL
);
2284 gfc_conv_expr (&se
, clauses
->num_threads
);
2285 gfc_add_block_to_block (block
, &se
.pre
);
2286 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2287 gfc_add_block_to_block (block
, &se
.post
);
2289 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2290 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2291 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2294 chunk_size
= NULL_TREE
;
2295 if (clauses
->chunk_size
)
2297 gfc_init_se (&se
, NULL
);
2298 gfc_conv_expr (&se
, clauses
->chunk_size
);
2299 gfc_add_block_to_block (block
, &se
.pre
);
2300 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2301 gfc_add_block_to_block (block
, &se
.post
);
2304 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2306 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2307 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2308 switch (clauses
->sched_kind
)
2310 case OMP_SCHED_STATIC
:
2311 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2313 case OMP_SCHED_DYNAMIC
:
2314 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2316 case OMP_SCHED_GUIDED
:
2317 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2319 case OMP_SCHED_RUNTIME
:
2320 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2322 case OMP_SCHED_AUTO
:
2323 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2328 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2331 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2333 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2334 switch (clauses
->default_sharing
)
2336 case OMP_DEFAULT_NONE
:
2337 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2339 case OMP_DEFAULT_SHARED
:
2340 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2342 case OMP_DEFAULT_PRIVATE
:
2343 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2345 case OMP_DEFAULT_FIRSTPRIVATE
:
2346 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2351 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2354 if (clauses
->nowait
)
2356 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2357 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2360 if (clauses
->ordered
)
2362 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2363 OMP_CLAUSE_ORDERED_EXPR (c
) = NULL_TREE
;
2364 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2367 if (clauses
->untied
)
2369 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2370 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2373 if (clauses
->mergeable
)
2375 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2376 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2379 if (clauses
->collapse
)
2381 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2382 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2383 = build_int_cst (integer_type_node
, clauses
->collapse
);
2384 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2387 if (clauses
->inbranch
)
2389 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2390 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2393 if (clauses
->notinbranch
)
2395 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2396 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2399 switch (clauses
->cancel
)
2401 case OMP_CANCEL_UNKNOWN
:
2403 case OMP_CANCEL_PARALLEL
:
2404 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2405 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2407 case OMP_CANCEL_SECTIONS
:
2408 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2409 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2412 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2413 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2415 case OMP_CANCEL_TASKGROUP
:
2416 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2417 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2421 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2423 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2424 switch (clauses
->proc_bind
)
2426 case OMP_PROC_BIND_MASTER
:
2427 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2429 case OMP_PROC_BIND_SPREAD
:
2430 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2432 case OMP_PROC_BIND_CLOSE
:
2433 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2438 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2441 if (clauses
->safelen_expr
)
2445 gfc_init_se (&se
, NULL
);
2446 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2447 gfc_add_block_to_block (block
, &se
.pre
);
2448 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2449 gfc_add_block_to_block (block
, &se
.post
);
2451 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2452 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2453 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2456 if (clauses
->simdlen_expr
)
2458 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2459 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2460 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2461 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2464 if (clauses
->num_teams
)
2468 gfc_init_se (&se
, NULL
);
2469 gfc_conv_expr (&se
, clauses
->num_teams
);
2470 gfc_add_block_to_block (block
, &se
.pre
);
2471 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2472 gfc_add_block_to_block (block
, &se
.post
);
2474 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2475 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2476 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2479 if (clauses
->device
)
2483 gfc_init_se (&se
, NULL
);
2484 gfc_conv_expr (&se
, clauses
->device
);
2485 gfc_add_block_to_block (block
, &se
.pre
);
2486 device
= gfc_evaluate_now (se
.expr
, block
);
2487 gfc_add_block_to_block (block
, &se
.post
);
2489 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2490 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2491 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2494 if (clauses
->thread_limit
)
2498 gfc_init_se (&se
, NULL
);
2499 gfc_conv_expr (&se
, clauses
->thread_limit
);
2500 gfc_add_block_to_block (block
, &se
.pre
);
2501 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2502 gfc_add_block_to_block (block
, &se
.post
);
2504 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2505 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2506 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2509 chunk_size
= NULL_TREE
;
2510 if (clauses
->dist_chunk_size
)
2512 gfc_init_se (&se
, NULL
);
2513 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2514 gfc_add_block_to_block (block
, &se
.pre
);
2515 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2516 gfc_add_block_to_block (block
, &se
.post
);
2519 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2521 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2522 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2523 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2528 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2529 if (clauses
->async_expr
)
2530 OMP_CLAUSE_ASYNC_EXPR (c
)
2531 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2533 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2534 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2538 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2539 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2541 if (clauses
->par_auto
)
2543 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2544 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2546 if (clauses
->independent
)
2548 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2549 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2551 if (clauses
->wait_list
)
2555 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2557 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2558 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2559 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2563 if (clauses
->num_gangs_expr
)
2566 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2567 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2568 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2569 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2571 if (clauses
->num_workers_expr
)
2573 tree num_workers_var
2574 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2575 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2576 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2577 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2579 if (clauses
->vector_length_expr
)
2581 tree vector_length_var
2582 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2583 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2584 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2585 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2587 if (clauses
->tile_list
)
2589 vec
<tree
, va_gc
> *tvec
;
2592 vec_alloc (tvec
, 4);
2594 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2595 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2597 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2598 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2599 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2602 if (clauses
->vector
)
2604 if (clauses
->vector_expr
)
2607 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2608 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2609 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2610 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2614 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2615 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2618 if (clauses
->worker
)
2620 if (clauses
->worker_expr
)
2623 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2624 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2625 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2626 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2630 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2631 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2637 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2638 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2639 if (clauses
->gang_num_expr
)
2641 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
2642 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
2644 if (clauses
->gang_static
)
2646 arg
= clauses
->gang_static_expr
2647 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
2648 : integer_minus_one_node
;
2649 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
2653 return nreverse (omp_clauses
);
2656 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2659 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2664 stmt
= gfc_trans_code (code
);
2665 if (TREE_CODE (stmt
) != BIND_EXPR
)
2667 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2669 tree block
= poplevel (1, 0);
2670 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
2680 /* Trans OpenACC directives. */
2681 /* parallel, kernels, data and host_data. */
2683 gfc_trans_oacc_construct (gfc_code
*code
)
2686 tree stmt
, oacc_clauses
;
2687 enum tree_code construct_code
;
2691 case EXEC_OACC_PARALLEL
:
2692 construct_code
= OACC_PARALLEL
;
2694 case EXEC_OACC_KERNELS
:
2695 construct_code
= OACC_KERNELS
;
2697 case EXEC_OACC_DATA
:
2698 construct_code
= OACC_DATA
;
2700 case EXEC_OACC_HOST_DATA
:
2701 construct_code
= OACC_HOST_DATA
;
2707 gfc_start_block (&block
);
2708 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2710 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
2711 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
2713 gfc_add_expr_to_block (&block
, stmt
);
2714 return gfc_finish_block (&block
);
2717 /* update, enter_data, exit_data, cache. */
2719 gfc_trans_oacc_executable_directive (gfc_code
*code
)
2722 tree stmt
, oacc_clauses
;
2723 enum tree_code construct_code
;
2727 case EXEC_OACC_UPDATE
:
2728 construct_code
= OACC_UPDATE
;
2730 case EXEC_OACC_ENTER_DATA
:
2731 construct_code
= OACC_ENTER_DATA
;
2733 case EXEC_OACC_EXIT_DATA
:
2734 construct_code
= OACC_EXIT_DATA
;
2736 case EXEC_OACC_CACHE
:
2737 construct_code
= OACC_CACHE
;
2743 gfc_start_block (&block
);
2744 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2746 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
2748 gfc_add_expr_to_block (&block
, stmt
);
2749 return gfc_finish_block (&block
);
2753 gfc_trans_oacc_wait_directive (gfc_code
*code
)
2757 vec
<tree
, va_gc
> *args
;
2760 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
2761 location_t loc
= input_location
;
2763 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2766 vec_alloc (args
, nparms
+ 2);
2767 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
2769 gfc_start_block (&block
);
2771 if (clauses
->async_expr
)
2772 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
2774 t
= build_int_cst (integer_type_node
, -2);
2776 args
->quick_push (t
);
2777 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
2779 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2780 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
2782 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
2783 gfc_add_expr_to_block (&block
, stmt
);
2787 return gfc_finish_block (&block
);
2790 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
2791 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
2794 gfc_trans_omp_atomic (gfc_code
*code
)
2796 gfc_code
*atomic_code
= code
;
2800 gfc_expr
*expr2
, *e
;
2803 tree lhsaddr
, type
, rhs
, x
;
2804 enum tree_code op
= ERROR_MARK
;
2805 enum tree_code aop
= OMP_ATOMIC
;
2806 bool var_on_left
= false;
2807 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
2809 code
= code
->block
->next
;
2810 gcc_assert (code
->op
== EXEC_ASSIGN
);
2811 var
= code
->expr1
->symtree
->n
.sym
;
2813 gfc_init_se (&lse
, NULL
);
2814 gfc_init_se (&rse
, NULL
);
2815 gfc_init_se (&vse
, NULL
);
2816 gfc_start_block (&block
);
2818 expr2
= code
->expr2
;
2819 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2820 != GFC_OMP_ATOMIC_WRITE
)
2821 && (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
) == 0
2822 && expr2
->expr_type
== EXPR_FUNCTION
2823 && expr2
->value
.function
.isym
2824 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2825 expr2
= expr2
->value
.function
.actual
->expr
;
2827 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2829 case GFC_OMP_ATOMIC_READ
:
2830 gfc_conv_expr (&vse
, code
->expr1
);
2831 gfc_add_block_to_block (&block
, &vse
.pre
);
2833 gfc_conv_expr (&lse
, expr2
);
2834 gfc_add_block_to_block (&block
, &lse
.pre
);
2835 type
= TREE_TYPE (lse
.expr
);
2836 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2838 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
2839 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2840 x
= convert (TREE_TYPE (vse
.expr
), x
);
2841 gfc_add_modify (&block
, vse
.expr
, x
);
2843 gfc_add_block_to_block (&block
, &lse
.pre
);
2844 gfc_add_block_to_block (&block
, &rse
.pre
);
2846 return gfc_finish_block (&block
);
2847 case GFC_OMP_ATOMIC_CAPTURE
:
2848 aop
= OMP_ATOMIC_CAPTURE_NEW
;
2849 if (expr2
->expr_type
== EXPR_VARIABLE
)
2851 aop
= OMP_ATOMIC_CAPTURE_OLD
;
2852 gfc_conv_expr (&vse
, code
->expr1
);
2853 gfc_add_block_to_block (&block
, &vse
.pre
);
2855 gfc_conv_expr (&lse
, expr2
);
2856 gfc_add_block_to_block (&block
, &lse
.pre
);
2857 gfc_init_se (&lse
, NULL
);
2859 var
= code
->expr1
->symtree
->n
.sym
;
2860 expr2
= code
->expr2
;
2861 if (expr2
->expr_type
== EXPR_FUNCTION
2862 && expr2
->value
.function
.isym
2863 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2864 expr2
= expr2
->value
.function
.actual
->expr
;
2871 gfc_conv_expr (&lse
, code
->expr1
);
2872 gfc_add_block_to_block (&block
, &lse
.pre
);
2873 type
= TREE_TYPE (lse
.expr
);
2874 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2876 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2877 == GFC_OMP_ATOMIC_WRITE
)
2878 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2880 gfc_conv_expr (&rse
, expr2
);
2881 gfc_add_block_to_block (&block
, &rse
.pre
);
2883 else if (expr2
->expr_type
== EXPR_OP
)
2886 switch (expr2
->value
.op
.op
)
2888 case INTRINSIC_PLUS
:
2891 case INTRINSIC_TIMES
:
2894 case INTRINSIC_MINUS
:
2897 case INTRINSIC_DIVIDE
:
2898 if (expr2
->ts
.type
== BT_INTEGER
)
2899 op
= TRUNC_DIV_EXPR
;
2904 op
= TRUTH_ANDIF_EXPR
;
2907 op
= TRUTH_ORIF_EXPR
;
2912 case INTRINSIC_NEQV
:
2918 e
= expr2
->value
.op
.op1
;
2919 if (e
->expr_type
== EXPR_FUNCTION
2920 && e
->value
.function
.isym
2921 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2922 e
= e
->value
.function
.actual
->expr
;
2923 if (e
->expr_type
== EXPR_VARIABLE
2924 && e
->symtree
!= NULL
2925 && e
->symtree
->n
.sym
== var
)
2927 expr2
= expr2
->value
.op
.op2
;
2932 e
= expr2
->value
.op
.op2
;
2933 if (e
->expr_type
== EXPR_FUNCTION
2934 && e
->value
.function
.isym
2935 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2936 e
= e
->value
.function
.actual
->expr
;
2937 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2938 && e
->symtree
!= NULL
2939 && e
->symtree
->n
.sym
== var
);
2940 expr2
= expr2
->value
.op
.op1
;
2941 var_on_left
= false;
2943 gfc_conv_expr (&rse
, expr2
);
2944 gfc_add_block_to_block (&block
, &rse
.pre
);
2948 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
2949 switch (expr2
->value
.function
.isym
->id
)
2969 e
= expr2
->value
.function
.actual
->expr
;
2970 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2971 && e
->symtree
!= NULL
2972 && e
->symtree
->n
.sym
== var
);
2974 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
2975 gfc_add_block_to_block (&block
, &rse
.pre
);
2976 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
2978 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
2979 gfc_actual_arglist
*arg
;
2981 gfc_add_modify (&block
, accum
, rse
.expr
);
2982 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
2985 gfc_init_block (&rse
.pre
);
2986 gfc_conv_expr (&rse
, arg
->expr
);
2987 gfc_add_block_to_block (&block
, &rse
.pre
);
2988 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
2990 gfc_add_modify (&block
, accum
, x
);
2996 expr2
= expr2
->value
.function
.actual
->next
->expr
;
2999 lhsaddr
= save_expr (lhsaddr
);
3000 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3001 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3002 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3004 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3005 it even after unsharing function body. */
3006 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3007 DECL_CONTEXT (var
) = current_function_decl
;
3008 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3009 NULL_TREE
, NULL_TREE
);
3012 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3014 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3015 == GFC_OMP_ATOMIC_WRITE
)
3016 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3020 x
= convert (TREE_TYPE (rhs
),
3021 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3023 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3025 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3028 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3029 && TREE_CODE (type
) != COMPLEX_TYPE
)
3030 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3031 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3033 gfc_add_block_to_block (&block
, &lse
.pre
);
3034 gfc_add_block_to_block (&block
, &rse
.pre
);
3036 if (aop
== OMP_ATOMIC
)
3038 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3039 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3040 gfc_add_expr_to_block (&block
, x
);
3044 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3047 expr2
= code
->expr2
;
3048 if (expr2
->expr_type
== EXPR_FUNCTION
3049 && expr2
->value
.function
.isym
3050 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3051 expr2
= expr2
->value
.function
.actual
->expr
;
3053 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3054 gfc_conv_expr (&vse
, code
->expr1
);
3055 gfc_add_block_to_block (&block
, &vse
.pre
);
3057 gfc_init_se (&lse
, NULL
);
3058 gfc_conv_expr (&lse
, expr2
);
3059 gfc_add_block_to_block (&block
, &lse
.pre
);
3061 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3062 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3063 x
= convert (TREE_TYPE (vse
.expr
), x
);
3064 gfc_add_modify (&block
, vse
.expr
, x
);
3067 return gfc_finish_block (&block
);
3071 gfc_trans_omp_barrier (void)
3073 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3074 return build_call_expr_loc (input_location
, decl
, 0);
3078 gfc_trans_omp_cancel (gfc_code
*code
)
3081 tree ifc
= boolean_true_node
;
3083 switch (code
->ext
.omp_clauses
->cancel
)
3085 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3086 case OMP_CANCEL_DO
: mask
= 2; break;
3087 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3088 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3089 default: gcc_unreachable ();
3091 gfc_start_block (&block
);
3092 if (code
->ext
.omp_clauses
->if_expr
)
3097 gfc_init_se (&se
, NULL
);
3098 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3099 gfc_add_block_to_block (&block
, &se
.pre
);
3100 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3101 gfc_add_block_to_block (&block
, &se
.post
);
3102 tree type
= TREE_TYPE (if_var
);
3103 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3104 boolean_type_node
, if_var
,
3105 build_zero_cst (type
));
3107 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3108 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3109 ifc
= fold_convert (c_bool_type
, ifc
);
3110 gfc_add_expr_to_block (&block
,
3111 build_call_expr_loc (input_location
, decl
, 2,
3112 build_int_cst (integer_type_node
,
3114 return gfc_finish_block (&block
);
3118 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3121 switch (code
->ext
.omp_clauses
->cancel
)
3123 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3124 case OMP_CANCEL_DO
: mask
= 2; break;
3125 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3126 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3127 default: gcc_unreachable ();
3129 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3130 return build_call_expr_loc (input_location
, decl
, 1,
3131 build_int_cst (integer_type_node
, mask
));
3135 gfc_trans_omp_critical (gfc_code
*code
)
3137 tree name
= NULL_TREE
, stmt
;
3138 if (code
->ext
.omp_name
!= NULL
)
3139 name
= get_identifier (code
->ext
.omp_name
);
3140 stmt
= gfc_trans_code (code
->block
->next
);
3141 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3145 typedef struct dovar_init_d
{
3152 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3153 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3156 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
3157 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3160 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3161 int i
, collapse
= clauses
->collapse
;
3162 vec
<dovar_init
> inits
= vNULL
;
3169 code
= code
->block
->next
;
3170 gcc_assert (code
->op
== EXEC_DO
);
3172 init
= make_tree_vec (collapse
);
3173 cond
= make_tree_vec (collapse
);
3174 incr
= make_tree_vec (collapse
);
3178 gfc_start_block (&block
);
3182 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3184 for (i
= 0; i
< collapse
; i
++)
3187 int dovar_found
= 0;
3192 gfc_omp_namelist
*n
= NULL
;
3193 if (op
!= EXEC_OMP_DISTRIBUTE
)
3194 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3195 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3196 n
!= NULL
; n
= n
->next
)
3197 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3201 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3202 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3203 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3209 /* Evaluate all the expressions in the iterator. */
3210 gfc_init_se (&se
, NULL
);
3211 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3212 gfc_add_block_to_block (pblock
, &se
.pre
);
3214 type
= TREE_TYPE (dovar
);
3215 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3217 gfc_init_se (&se
, NULL
);
3218 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3219 gfc_add_block_to_block (pblock
, &se
.pre
);
3220 from
= gfc_evaluate_now (se
.expr
, pblock
);
3222 gfc_init_se (&se
, NULL
);
3223 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3224 gfc_add_block_to_block (pblock
, &se
.pre
);
3225 to
= gfc_evaluate_now (se
.expr
, pblock
);
3227 gfc_init_se (&se
, NULL
);
3228 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3229 gfc_add_block_to_block (pblock
, &se
.pre
);
3230 step
= gfc_evaluate_now (se
.expr
, pblock
);
3233 /* Special case simple loops. */
3236 if (integer_onep (step
))
3238 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3243 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3249 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3250 /* The condition should not be folded. */
3251 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3252 ? LE_EXPR
: GE_EXPR
,
3253 boolean_type_node
, dovar
, to
);
3254 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3256 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3259 TREE_VEC_ELT (incr
, i
));
3263 /* STEP is not 1 or -1. Use:
3264 for (count = 0; count < (to + step - from) / step; count++)
3266 dovar = from + count * step;
3270 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3271 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3272 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3274 tmp
= gfc_evaluate_now (tmp
, pblock
);
3275 count
= gfc_create_var (type
, "count");
3276 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3277 build_int_cst (type
, 0));
3278 /* The condition should not be folded. */
3279 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3282 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3284 build_int_cst (type
, 1));
3285 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3286 MODIFY_EXPR
, type
, count
,
3287 TREE_VEC_ELT (incr
, i
));
3289 /* Initialize DOVAR. */
3290 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3291 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3292 dovar_init e
= {dovar
, tmp
};
3293 inits
.safe_push (e
);
3296 if (dovar_found
== 2
3297 && op
== EXEC_OMP_SIMD
3301 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3302 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3303 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3305 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3311 if (op
== EXEC_OMP_SIMD
)
3315 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3316 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3317 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3320 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3325 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3326 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3327 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3329 if (dovar_found
== 2)
3336 /* If dovar is lastprivate, but different counter is used,
3337 dovar += step needs to be added to
3338 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3339 will have the value on entry of the last loop, rather
3340 than value after iterator increment. */
3341 tmp
= gfc_evaluate_now (step
, pblock
);
3342 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
3344 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3346 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3347 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3348 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3350 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3353 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3354 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3356 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3360 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3362 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3363 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3364 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3366 tree l
= build_omp_clause (input_location
,
3367 OMP_CLAUSE_LASTPRIVATE
);
3368 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3369 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3370 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3372 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3376 gcc_assert (simple
|| c
!= NULL
);
3380 if (op
!= EXEC_OMP_SIMD
)
3381 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3382 else if (collapse
== 1)
3384 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3385 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3386 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3387 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3390 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3391 OMP_CLAUSE_DECL (tmp
) = count
;
3392 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3395 if (i
+ 1 < collapse
)
3396 code
= code
->block
->next
;
3399 if (pblock
!= &block
)
3402 gfc_start_block (&block
);
3405 gfc_start_block (&body
);
3407 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3408 gfc_add_modify (&body
, di
->var
, di
->init
);
3411 /* Cycle statement is implemented with a goto. Exit statement must not be
3412 present for this loop. */
3413 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3415 /* Put these labels where they can be found later. */
3417 code
->cycle_label
= cycle_label
;
3418 code
->exit_label
= NULL_TREE
;
3420 /* Main loop body. */
3421 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3422 gfc_add_expr_to_block (&body
, tmp
);
3424 /* Label for cycle statements (if needed). */
3425 if (TREE_USED (cycle_label
))
3427 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3428 gfc_add_expr_to_block (&body
, tmp
);
3431 /* End of loop body. */
3434 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3435 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3436 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3437 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3438 default: gcc_unreachable ();
3441 TREE_TYPE (stmt
) = void_type_node
;
3442 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3443 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3444 OMP_FOR_INIT (stmt
) = init
;
3445 OMP_FOR_COND (stmt
) = cond
;
3446 OMP_FOR_INCR (stmt
) = incr
;
3447 gfc_add_expr_to_block (&block
, stmt
);
3449 return gfc_finish_block (&block
);
3452 /* parallel loop and kernels loop. */
3454 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3456 stmtblock_t block
, *pblock
= NULL
;
3457 gfc_omp_clauses construct_clauses
, loop_clauses
;
3458 tree stmt
, oacc_clauses
= NULL_TREE
;
3459 enum tree_code construct_code
;
3463 case EXEC_OACC_PARALLEL_LOOP
:
3464 construct_code
= OACC_PARALLEL
;
3466 case EXEC_OACC_KERNELS_LOOP
:
3467 construct_code
= OACC_KERNELS
;
3473 gfc_start_block (&block
);
3475 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3476 if (code
->ext
.omp_clauses
!= NULL
)
3478 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3479 sizeof (construct_clauses
));
3480 loop_clauses
.collapse
= construct_clauses
.collapse
;
3481 loop_clauses
.gang
= construct_clauses
.gang
;
3482 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3483 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3484 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3485 loop_clauses
.vector
= construct_clauses
.vector
;
3486 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3487 loop_clauses
.worker
= construct_clauses
.worker
;
3488 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3489 loop_clauses
.seq
= construct_clauses
.seq
;
3490 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3491 loop_clauses
.independent
= construct_clauses
.independent
;
3492 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3493 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3494 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3495 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3496 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3497 construct_clauses
.gang
= false;
3498 construct_clauses
.gang_static
= false;
3499 construct_clauses
.gang_num_expr
= NULL
;
3500 construct_clauses
.gang_static_expr
= NULL
;
3501 construct_clauses
.vector
= false;
3502 construct_clauses
.vector_expr
= NULL
;
3503 construct_clauses
.worker
= false;
3504 construct_clauses
.worker_expr
= NULL
;
3505 construct_clauses
.seq
= false;
3506 construct_clauses
.par_auto
= false;
3507 construct_clauses
.independent
= false;
3508 construct_clauses
.independent
= false;
3509 construct_clauses
.tile_list
= NULL
;
3510 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3511 if (construct_code
== OACC_KERNELS
)
3512 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3513 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3516 if (!loop_clauses
.seq
)
3520 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3521 if (TREE_CODE (stmt
) != BIND_EXPR
)
3522 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3525 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3527 gfc_add_expr_to_block (&block
, stmt
);
3528 return gfc_finish_block (&block
);
3532 gfc_trans_omp_flush (void)
3534 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3535 return build_call_expr_loc (input_location
, decl
, 0);
3539 gfc_trans_omp_master (gfc_code
*code
)
3541 tree stmt
= gfc_trans_code (code
->block
->next
);
3542 if (IS_EMPTY_STMT (stmt
))
3544 return build1_v (OMP_MASTER
, stmt
);
3548 gfc_trans_omp_ordered (gfc_code
*code
)
3550 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3551 gfc_trans_code (code
->block
->next
), NULL_TREE
);
3555 gfc_trans_omp_parallel (gfc_code
*code
)
3558 tree stmt
, omp_clauses
;
3560 gfc_start_block (&block
);
3561 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3564 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3565 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3566 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3568 gfc_add_expr_to_block (&block
, stmt
);
3569 return gfc_finish_block (&block
);
3576 GFC_OMP_SPLIT_PARALLEL
,
3577 GFC_OMP_SPLIT_DISTRIBUTE
,
3578 GFC_OMP_SPLIT_TEAMS
,
3579 GFC_OMP_SPLIT_TARGET
,
3585 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3586 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3587 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3588 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3589 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3590 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
)
3594 gfc_split_omp_clauses (gfc_code
*code
,
3595 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3597 int mask
= 0, innermost
= 0;
3598 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3601 case EXEC_OMP_DISTRIBUTE
:
3602 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3604 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3605 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3606 innermost
= GFC_OMP_SPLIT_DO
;
3608 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3609 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3610 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3611 innermost
= GFC_OMP_SPLIT_SIMD
;
3613 case EXEC_OMP_DISTRIBUTE_SIMD
:
3614 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3615 innermost
= GFC_OMP_SPLIT_SIMD
;
3618 innermost
= GFC_OMP_SPLIT_DO
;
3620 case EXEC_OMP_DO_SIMD
:
3621 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3622 innermost
= GFC_OMP_SPLIT_SIMD
;
3624 case EXEC_OMP_PARALLEL
:
3625 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3627 case EXEC_OMP_PARALLEL_DO
:
3628 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3629 innermost
= GFC_OMP_SPLIT_DO
;
3631 case EXEC_OMP_PARALLEL_DO_SIMD
:
3632 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3633 innermost
= GFC_OMP_SPLIT_SIMD
;
3636 innermost
= GFC_OMP_SPLIT_SIMD
;
3638 case EXEC_OMP_TARGET
:
3639 innermost
= GFC_OMP_SPLIT_TARGET
;
3641 case EXEC_OMP_TARGET_TEAMS
:
3642 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
3643 innermost
= GFC_OMP_SPLIT_TEAMS
;
3645 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3646 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3647 | GFC_OMP_MASK_DISTRIBUTE
;
3648 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3650 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3651 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3652 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3653 innermost
= GFC_OMP_SPLIT_DO
;
3655 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3656 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3657 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3658 innermost
= GFC_OMP_SPLIT_SIMD
;
3660 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3661 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3662 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3663 innermost
= GFC_OMP_SPLIT_SIMD
;
3665 case EXEC_OMP_TEAMS
:
3666 innermost
= GFC_OMP_SPLIT_TEAMS
;
3668 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3669 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
3670 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3672 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3673 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3674 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3675 innermost
= GFC_OMP_SPLIT_DO
;
3677 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3678 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3679 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3680 innermost
= GFC_OMP_SPLIT_SIMD
;
3682 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3683 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3684 innermost
= GFC_OMP_SPLIT_SIMD
;
3691 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
3694 if (code
->ext
.omp_clauses
!= NULL
)
3696 if (mask
& GFC_OMP_MASK_TARGET
)
3698 /* First the clauses that are unique to some constructs. */
3699 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
3700 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
3701 clausesa
[GFC_OMP_SPLIT_TARGET
].device
3702 = code
->ext
.omp_clauses
->device
;
3704 if (mask
& GFC_OMP_MASK_TEAMS
)
3706 /* First the clauses that are unique to some constructs. */
3707 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
3708 = code
->ext
.omp_clauses
->num_teams
;
3709 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
3710 = code
->ext
.omp_clauses
->thread_limit
;
3711 /* Shared and default clauses are allowed on parallel and teams. */
3712 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
3713 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3714 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
3715 = code
->ext
.omp_clauses
->default_sharing
;
3717 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3719 /* First the clauses that are unique to some constructs. */
3720 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
3721 = code
->ext
.omp_clauses
->dist_sched_kind
;
3722 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
3723 = code
->ext
.omp_clauses
->dist_chunk_size
;
3724 /* Duplicate collapse. */
3725 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
3726 = code
->ext
.omp_clauses
->collapse
;
3728 if (mask
& GFC_OMP_MASK_PARALLEL
)
3730 /* First the clauses that are unique to some constructs. */
3731 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
3732 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
3733 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
3734 = code
->ext
.omp_clauses
->num_threads
;
3735 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
3736 = code
->ext
.omp_clauses
->proc_bind
;
3737 /* Shared and default clauses are allowed on parallel and teams. */
3738 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
3739 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3740 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
3741 = code
->ext
.omp_clauses
->default_sharing
;
3743 if (mask
& GFC_OMP_MASK_DO
)
3745 /* First the clauses that are unique to some constructs. */
3746 clausesa
[GFC_OMP_SPLIT_DO
].ordered
3747 = code
->ext
.omp_clauses
->ordered
;
3748 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
3749 = code
->ext
.omp_clauses
->sched_kind
;
3750 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
3751 = code
->ext
.omp_clauses
->chunk_size
;
3752 clausesa
[GFC_OMP_SPLIT_DO
].nowait
3753 = code
->ext
.omp_clauses
->nowait
;
3754 /* Duplicate collapse. */
3755 clausesa
[GFC_OMP_SPLIT_DO
].collapse
3756 = code
->ext
.omp_clauses
->collapse
;
3758 if (mask
& GFC_OMP_MASK_SIMD
)
3760 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
3761 = code
->ext
.omp_clauses
->safelen_expr
;
3762 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LINEAR
]
3763 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
3764 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
3765 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
3766 /* Duplicate collapse. */
3767 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
3768 = code
->ext
.omp_clauses
->collapse
;
3770 /* Private clause is supported on all constructs but target,
3771 it is enough to put it on the innermost one. For
3772 !$ omp do put it on parallel though,
3773 as that's what we did for OpenMP 3.1. */
3774 clausesa
[innermost
== GFC_OMP_SPLIT_DO
3775 ? (int) GFC_OMP_SPLIT_PARALLEL
3776 : innermost
].lists
[OMP_LIST_PRIVATE
]
3777 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
3778 /* Firstprivate clause is supported on all constructs but
3779 target and simd. Put it on the outermost of those and
3780 duplicate on parallel. */
3781 if (mask
& GFC_OMP_MASK_TEAMS
)
3782 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
3783 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3784 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3785 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
3786 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3787 if (mask
& GFC_OMP_MASK_PARALLEL
)
3788 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
3789 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3790 else if (mask
& GFC_OMP_MASK_DO
)
3791 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
3792 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3793 /* Lastprivate is allowed on do and simd. In
3794 parallel do{, simd} we actually want to put it on
3795 parallel rather than do. */
3796 if (mask
& GFC_OMP_MASK_PARALLEL
)
3797 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
3798 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3799 else if (mask
& GFC_OMP_MASK_DO
)
3800 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
3801 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3802 if (mask
& GFC_OMP_MASK_SIMD
)
3803 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
3804 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3805 /* Reduction is allowed on simd, do, parallel and teams.
3806 Duplicate it on all of them, but omit on do if
3807 parallel is present. */
3808 if (mask
& GFC_OMP_MASK_TEAMS
)
3809 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
3810 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3811 if (mask
& GFC_OMP_MASK_PARALLEL
)
3812 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
3813 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3814 else if (mask
& GFC_OMP_MASK_DO
)
3815 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
3816 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3817 if (mask
& GFC_OMP_MASK_SIMD
)
3818 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
3819 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3820 /* FIXME: This is currently being discussed. */
3821 if (mask
& GFC_OMP_MASK_PARALLEL
)
3822 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
3823 = code
->ext
.omp_clauses
->if_expr
;
3825 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
3826 = code
->ext
.omp_clauses
->if_expr
;
3828 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3829 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3830 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
3834 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3835 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
3838 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3839 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
3842 gfc_start_block (&block
);
3844 gfc_init_block (&block
);
3846 if (clausesa
== NULL
)
3848 clausesa
= clausesa_buf
;
3849 gfc_split_omp_clauses (code
, clausesa
);
3853 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
3854 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
3855 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
3858 if (TREE_CODE (body
) != BIND_EXPR
)
3859 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
3863 else if (TREE_CODE (body
) != BIND_EXPR
)
3864 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
3867 stmt
= make_node (OMP_FOR
);
3868 TREE_TYPE (stmt
) = void_type_node
;
3869 OMP_FOR_BODY (stmt
) = body
;
3870 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
3874 gfc_add_expr_to_block (&block
, stmt
);
3875 return gfc_finish_block (&block
);
3879 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
3880 gfc_omp_clauses
*clausesa
)
3882 stmtblock_t block
, *new_pblock
= pblock
;
3883 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3884 tree stmt
, omp_clauses
= NULL_TREE
;
3887 gfc_start_block (&block
);
3889 gfc_init_block (&block
);
3891 if (clausesa
== NULL
)
3893 clausesa
= clausesa_buf
;
3894 gfc_split_omp_clauses (code
, clausesa
);
3897 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3901 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
3902 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
3903 new_pblock
= &block
;
3907 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
3908 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
3911 if (TREE_CODE (stmt
) != BIND_EXPR
)
3912 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3916 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3917 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3918 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3920 OMP_PARALLEL_COMBINED (stmt
) = 1;
3921 gfc_add_expr_to_block (&block
, stmt
);
3922 return gfc_finish_block (&block
);
3926 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3927 gfc_omp_clauses
*clausesa
)
3930 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3931 tree stmt
, omp_clauses
= NULL_TREE
;
3934 gfc_start_block (&block
);
3936 gfc_init_block (&block
);
3938 if (clausesa
== NULL
)
3940 clausesa
= clausesa_buf
;
3941 gfc_split_omp_clauses (code
, clausesa
);
3945 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3949 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
3952 if (TREE_CODE (stmt
) != BIND_EXPR
)
3953 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3957 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3958 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3961 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3963 OMP_PARALLEL_COMBINED (stmt
) = 1;
3965 gfc_add_expr_to_block (&block
, stmt
);
3966 return gfc_finish_block (&block
);
3970 gfc_trans_omp_parallel_sections (gfc_code
*code
)
3973 gfc_omp_clauses section_clauses
;
3974 tree stmt
, omp_clauses
;
3976 memset (§ion_clauses
, 0, sizeof (section_clauses
));
3977 section_clauses
.nowait
= true;
3979 gfc_start_block (&block
);
3980 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3983 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
3984 if (TREE_CODE (stmt
) != BIND_EXPR
)
3985 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3988 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3990 OMP_PARALLEL_COMBINED (stmt
) = 1;
3991 gfc_add_expr_to_block (&block
, stmt
);
3992 return gfc_finish_block (&block
);
3996 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
3999 gfc_omp_clauses workshare_clauses
;
4000 tree stmt
, omp_clauses
;
4002 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
4003 workshare_clauses
.nowait
= true;
4005 gfc_start_block (&block
);
4006 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4009 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4010 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4011 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4013 OMP_PARALLEL_COMBINED (stmt
) = 1;
4014 gfc_add_expr_to_block (&block
, stmt
);
4015 return gfc_finish_block (&block
);
4019 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4021 stmtblock_t block
, body
;
4022 tree omp_clauses
, stmt
;
4023 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4025 gfc_start_block (&block
);
4027 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4029 gfc_init_block (&body
);
4030 for (code
= code
->block
; code
; code
= code
->block
)
4032 /* Last section is special because of lastprivate, so even if it
4033 is empty, chain it in. */
4034 stmt
= gfc_trans_omp_code (code
->next
,
4035 has_lastprivate
&& code
->block
== NULL
);
4036 if (! IS_EMPTY_STMT (stmt
))
4038 stmt
= build1_v (OMP_SECTION
, stmt
);
4039 gfc_add_expr_to_block (&body
, stmt
);
4042 stmt
= gfc_finish_block (&body
);
4044 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4046 gfc_add_expr_to_block (&block
, stmt
);
4048 return gfc_finish_block (&block
);
4052 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4054 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4055 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4056 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4062 gfc_trans_omp_task (gfc_code
*code
)
4065 tree stmt
, omp_clauses
;
4067 gfc_start_block (&block
);
4068 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4071 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4072 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4073 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4075 gfc_add_expr_to_block (&block
, stmt
);
4076 return gfc_finish_block (&block
);
4080 gfc_trans_omp_taskgroup (gfc_code
*code
)
4082 tree stmt
= gfc_trans_code (code
->block
->next
);
4083 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4087 gfc_trans_omp_taskwait (void)
4089 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4090 return build_call_expr_loc (input_location
, decl
, 0);
4094 gfc_trans_omp_taskyield (void)
4096 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4097 return build_call_expr_loc (input_location
, decl
, 0);
4101 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4104 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4105 tree stmt
, omp_clauses
= NULL_TREE
;
4107 gfc_start_block (&block
);
4108 if (clausesa
== NULL
)
4110 clausesa
= clausesa_buf
;
4111 gfc_split_omp_clauses (code
, clausesa
);
4115 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4119 case EXEC_OMP_DISTRIBUTE
:
4120 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4121 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4122 /* This is handled in gfc_trans_omp_do. */
4125 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4126 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4127 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4128 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4129 if (TREE_CODE (stmt
) != BIND_EXPR
)
4130 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4134 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4135 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4136 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4137 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4138 if (TREE_CODE (stmt
) != BIND_EXPR
)
4139 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4143 case EXEC_OMP_DISTRIBUTE_SIMD
:
4144 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4145 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4146 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4147 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4148 if (TREE_CODE (stmt
) != BIND_EXPR
)
4149 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4158 tree distribute
= make_node (OMP_DISTRIBUTE
);
4159 TREE_TYPE (distribute
) = void_type_node
;
4160 OMP_FOR_BODY (distribute
) = stmt
;
4161 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4164 gfc_add_expr_to_block (&block
, stmt
);
4165 return gfc_finish_block (&block
);
4169 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4172 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4173 tree stmt
, omp_clauses
= NULL_TREE
;
4174 bool combined
= true;
4176 gfc_start_block (&block
);
4177 if (clausesa
== NULL
)
4179 clausesa
= clausesa_buf
;
4180 gfc_split_omp_clauses (code
, clausesa
);
4184 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4188 case EXEC_OMP_TARGET_TEAMS
:
4189 case EXEC_OMP_TEAMS
:
4190 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4193 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4194 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4195 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4196 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4200 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4203 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4206 OMP_TEAMS_COMBINED (stmt
) = 1;
4207 gfc_add_expr_to_block (&block
, stmt
);
4208 return gfc_finish_block (&block
);
4212 gfc_trans_omp_target (gfc_code
*code
)
4215 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4216 tree stmt
, omp_clauses
= NULL_TREE
;
4218 gfc_start_block (&block
);
4219 gfc_split_omp_clauses (code
, clausesa
);
4222 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4224 if (code
->op
== EXEC_OMP_TARGET
)
4227 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4228 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4233 stmt
= gfc_trans_omp_teams (code
, clausesa
);
4234 if (TREE_CODE (stmt
) != BIND_EXPR
)
4235 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4240 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4242 gfc_add_expr_to_block (&block
, stmt
);
4243 return gfc_finish_block (&block
);
4247 gfc_trans_omp_target_data (gfc_code
*code
)
4250 tree stmt
, omp_clauses
;
4252 gfc_start_block (&block
);
4253 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4255 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4256 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4258 gfc_add_expr_to_block (&block
, stmt
);
4259 return gfc_finish_block (&block
);
4263 gfc_trans_omp_target_update (gfc_code
*code
)
4266 tree stmt
, omp_clauses
;
4268 gfc_start_block (&block
);
4269 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4271 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4273 gfc_add_expr_to_block (&block
, stmt
);
4274 return gfc_finish_block (&block
);
4278 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4280 tree res
, tmp
, stmt
;
4281 stmtblock_t block
, *pblock
= NULL
;
4282 stmtblock_t singleblock
;
4283 int saved_ompws_flags
;
4284 bool singleblock_in_progress
= false;
4285 /* True if previous gfc_code in workshare construct is not workshared. */
4286 bool prev_singleunit
;
4288 code
= code
->block
->next
;
4292 gfc_start_block (&block
);
4295 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4296 prev_singleunit
= false;
4298 /* Translate statements one by one to trees until we reach
4299 the end of the workshare construct. Adjacent gfc_codes that
4300 are a single unit of work are clustered and encapsulated in a
4301 single OMP_SINGLE construct. */
4302 for (; code
; code
= code
->next
)
4304 if (code
->here
!= 0)
4306 res
= gfc_trans_label_here (code
);
4307 gfc_add_expr_to_block (pblock
, res
);
4310 /* No dependence analysis, use for clauses with wait.
4311 If this is the last gfc_code, use default omp_clauses. */
4312 if (code
->next
== NULL
&& clauses
->nowait
)
4313 ompws_flags
|= OMPWS_NOWAIT
;
4315 /* By default, every gfc_code is a single unit of work. */
4316 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4317 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4326 res
= gfc_trans_assign (code
);
4329 case EXEC_POINTER_ASSIGN
:
4330 res
= gfc_trans_pointer_assign (code
);
4333 case EXEC_INIT_ASSIGN
:
4334 res
= gfc_trans_init_assign (code
);
4338 res
= gfc_trans_forall (code
);
4342 res
= gfc_trans_where (code
);
4345 case EXEC_OMP_ATOMIC
:
4346 res
= gfc_trans_omp_directive (code
);
4349 case EXEC_OMP_PARALLEL
:
4350 case EXEC_OMP_PARALLEL_DO
:
4351 case EXEC_OMP_PARALLEL_SECTIONS
:
4352 case EXEC_OMP_PARALLEL_WORKSHARE
:
4353 case EXEC_OMP_CRITICAL
:
4354 saved_ompws_flags
= ompws_flags
;
4356 res
= gfc_trans_omp_directive (code
);
4357 ompws_flags
= saved_ompws_flags
;
4361 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4364 gfc_set_backend_locus (&code
->loc
);
4366 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4368 if (prev_singleunit
)
4370 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4371 /* Add current gfc_code to single block. */
4372 gfc_add_expr_to_block (&singleblock
, res
);
4375 /* Finish single block and add it to pblock. */
4376 tmp
= gfc_finish_block (&singleblock
);
4377 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4378 void_type_node
, tmp
, NULL_TREE
);
4379 gfc_add_expr_to_block (pblock
, tmp
);
4380 /* Add current gfc_code to pblock. */
4381 gfc_add_expr_to_block (pblock
, res
);
4382 singleblock_in_progress
= false;
4387 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4389 /* Start single block. */
4390 gfc_init_block (&singleblock
);
4391 gfc_add_expr_to_block (&singleblock
, res
);
4392 singleblock_in_progress
= true;
4395 /* Add the new statement to the block. */
4396 gfc_add_expr_to_block (pblock
, res
);
4398 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
4402 /* Finish remaining SINGLE block, if we were in the middle of one. */
4403 if (singleblock_in_progress
)
4405 /* Finish single block and add it to pblock. */
4406 tmp
= gfc_finish_block (&singleblock
);
4407 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
4409 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
4411 gfc_add_expr_to_block (pblock
, tmp
);
4414 stmt
= gfc_finish_block (pblock
);
4415 if (TREE_CODE (stmt
) != BIND_EXPR
)
4417 if (!IS_EMPTY_STMT (stmt
))
4419 tree bindblock
= poplevel (1, 0);
4420 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
4428 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
4429 stmt
= gfc_trans_omp_barrier ();
4436 gfc_trans_oacc_declare (gfc_code
*code
)
4439 tree stmt
, oacc_clauses
;
4440 enum tree_code construct_code
;
4442 construct_code
= OACC_DATA
;
4444 gfc_start_block (&block
);
4446 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
4448 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4449 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
4451 gfc_add_expr_to_block (&block
, stmt
);
4453 return gfc_finish_block (&block
);
4457 gfc_trans_oacc_directive (gfc_code
*code
)
4461 case EXEC_OACC_PARALLEL_LOOP
:
4462 case EXEC_OACC_KERNELS_LOOP
:
4463 return gfc_trans_oacc_combined_directive (code
);
4464 case EXEC_OACC_PARALLEL
:
4465 case EXEC_OACC_KERNELS
:
4466 case EXEC_OACC_DATA
:
4467 case EXEC_OACC_HOST_DATA
:
4468 return gfc_trans_oacc_construct (code
);
4469 case EXEC_OACC_LOOP
:
4470 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4472 case EXEC_OACC_UPDATE
:
4473 case EXEC_OACC_CACHE
:
4474 case EXEC_OACC_ENTER_DATA
:
4475 case EXEC_OACC_EXIT_DATA
:
4476 return gfc_trans_oacc_executable_directive (code
);
4477 case EXEC_OACC_WAIT
:
4478 return gfc_trans_oacc_wait_directive (code
);
4479 case EXEC_OACC_ATOMIC
:
4480 return gfc_trans_omp_atomic (code
);
4481 case EXEC_OACC_DECLARE
:
4482 return gfc_trans_oacc_declare (code
);
4489 gfc_trans_omp_directive (gfc_code
*code
)
4493 case EXEC_OMP_ATOMIC
:
4494 return gfc_trans_omp_atomic (code
);
4495 case EXEC_OMP_BARRIER
:
4496 return gfc_trans_omp_barrier ();
4497 case EXEC_OMP_CANCEL
:
4498 return gfc_trans_omp_cancel (code
);
4499 case EXEC_OMP_CANCELLATION_POINT
:
4500 return gfc_trans_omp_cancellation_point (code
);
4501 case EXEC_OMP_CRITICAL
:
4502 return gfc_trans_omp_critical (code
);
4503 case EXEC_OMP_DISTRIBUTE
:
4506 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4508 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4509 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4510 case EXEC_OMP_DISTRIBUTE_SIMD
:
4511 return gfc_trans_omp_distribute (code
, NULL
);
4512 case EXEC_OMP_DO_SIMD
:
4513 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
4514 case EXEC_OMP_FLUSH
:
4515 return gfc_trans_omp_flush ();
4516 case EXEC_OMP_MASTER
:
4517 return gfc_trans_omp_master (code
);
4518 case EXEC_OMP_ORDERED
:
4519 return gfc_trans_omp_ordered (code
);
4520 case EXEC_OMP_PARALLEL
:
4521 return gfc_trans_omp_parallel (code
);
4522 case EXEC_OMP_PARALLEL_DO
:
4523 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
4524 case EXEC_OMP_PARALLEL_DO_SIMD
:
4525 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
4526 case EXEC_OMP_PARALLEL_SECTIONS
:
4527 return gfc_trans_omp_parallel_sections (code
);
4528 case EXEC_OMP_PARALLEL_WORKSHARE
:
4529 return gfc_trans_omp_parallel_workshare (code
);
4530 case EXEC_OMP_SECTIONS
:
4531 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
4532 case EXEC_OMP_SINGLE
:
4533 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
4534 case EXEC_OMP_TARGET
:
4535 case EXEC_OMP_TARGET_TEAMS
:
4536 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4537 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4538 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4539 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4540 return gfc_trans_omp_target (code
);
4541 case EXEC_OMP_TARGET_DATA
:
4542 return gfc_trans_omp_target_data (code
);
4543 case EXEC_OMP_TARGET_UPDATE
:
4544 return gfc_trans_omp_target_update (code
);
4546 return gfc_trans_omp_task (code
);
4547 case EXEC_OMP_TASKGROUP
:
4548 return gfc_trans_omp_taskgroup (code
);
4549 case EXEC_OMP_TASKWAIT
:
4550 return gfc_trans_omp_taskwait ();
4551 case EXEC_OMP_TASKYIELD
:
4552 return gfc_trans_omp_taskyield ();
4553 case EXEC_OMP_TEAMS
:
4554 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4555 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4556 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4557 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4558 return gfc_trans_omp_teams (code
, NULL
);
4559 case EXEC_OMP_WORKSHARE
:
4560 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
4567 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
4572 gfc_omp_declare_simd
*ods
;
4573 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
4575 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
4576 tree fndecl
= ns
->proc_name
->backend_decl
;
4578 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
4579 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
4580 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
4581 DECL_ATTRIBUTES (fndecl
) = c
;