1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
39 #include "gomp-constants.h"
43 /* True if OpenMP should privatize what this DECL points to rather
44 than the DECL itself. */
47 gfc_omp_privatize_by_reference (const_tree decl
)
49 tree type
= TREE_TYPE (decl
);
51 if (TREE_CODE (type
) == REFERENCE_TYPE
52 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
55 if (TREE_CODE (type
) == POINTER_TYPE
)
57 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
58 that have POINTER_TYPE type and aren't scalar pointers, scalar
59 allocatables, Cray pointees or C pointers are supposed to be
60 privatized by reference. */
61 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
62 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
63 || GFC_DECL_CRAY_POINTEE (decl
)
64 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
65 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
68 if (!DECL_ARTIFICIAL (decl
)
69 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
72 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
74 if (DECL_LANG_SPECIFIC (decl
)
75 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
82 /* True if OpenMP sharing attribute of DECL is predetermined. */
84 enum omp_clause_default_kind
85 gfc_omp_predetermined_sharing (tree decl
)
87 /* Associate names preserve the association established during ASSOCIATE.
88 As they are implemented either as pointers to the selector or array
89 descriptor and shouldn't really change in the ASSOCIATE region,
90 this decl can be either shared or firstprivate. If it is a pointer,
91 use firstprivate, as it is cheaper that way, otherwise make it shared. */
92 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
94 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
95 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
97 return OMP_CLAUSE_DEFAULT_SHARED
;
100 if (DECL_ARTIFICIAL (decl
)
101 && ! GFC_DECL_RESULT (decl
)
102 && ! (DECL_LANG_SPECIFIC (decl
)
103 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
104 return OMP_CLAUSE_DEFAULT_SHARED
;
106 /* Cray pointees shouldn't be listed in any clauses and should be
107 gimplified to dereference of the corresponding Cray pointer.
108 Make them all private, so that they are emitted in the debug
110 if (GFC_DECL_CRAY_POINTEE (decl
))
111 return OMP_CLAUSE_DEFAULT_PRIVATE
;
113 /* Assumed-size arrays are predetermined shared. */
114 if (TREE_CODE (decl
) == PARM_DECL
115 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
116 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
117 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
118 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
120 return OMP_CLAUSE_DEFAULT_SHARED
;
122 /* Dummy procedures aren't considered variables by OpenMP, thus are
123 disallowed in OpenMP clauses. They are represented as PARM_DECLs
124 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
125 to avoid complaining about their uses with default(none). */
126 if (TREE_CODE (decl
) == PARM_DECL
127 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
128 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
129 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
131 /* COMMON and EQUIVALENCE decls are shared. They
132 are only referenced through DECL_VALUE_EXPR of the variables
133 contained in them. If those are privatized, they will not be
134 gimplified to the COMMON or EQUIVALENCE decls. */
135 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
136 return OMP_CLAUSE_DEFAULT_SHARED
;
138 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
139 return OMP_CLAUSE_DEFAULT_SHARED
;
141 /* These are either array or derived parameters, or vtables.
142 In the former cases, the OpenMP standard doesn't consider them to be
143 variables at all (they can't be redefined), but they can nevertheless appear
144 in parallel/task regions and for default(none) purposes treat them as shared.
145 For vtables likely the same handling is desirable. */
146 if (TREE_CODE (decl
) == VAR_DECL
147 && TREE_READONLY (decl
)
148 && TREE_STATIC (decl
))
149 return OMP_CLAUSE_DEFAULT_SHARED
;
151 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
154 /* Return decl that should be used when reporting DEFAULT(NONE)
158 gfc_omp_report_decl (tree decl
)
160 if (DECL_ARTIFICIAL (decl
)
161 && DECL_LANG_SPECIFIC (decl
)
162 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
163 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
168 /* Return true if TYPE has any allocatable components. */
171 gfc_has_alloc_comps (tree type
, tree decl
)
175 if (POINTER_TYPE_P (type
))
177 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
178 type
= TREE_TYPE (type
);
179 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
183 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
184 type
= gfc_get_element_type (type
);
186 if (TREE_CODE (type
) != RECORD_TYPE
)
189 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
191 ftype
= TREE_TYPE (field
);
192 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
194 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
195 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
197 if (gfc_has_alloc_comps (ftype
, field
))
203 /* Return true if DECL in private clause needs
204 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
206 gfc_omp_private_outer_ref (tree decl
)
208 tree type
= TREE_TYPE (decl
);
210 if (GFC_DESCRIPTOR_TYPE_P (type
)
211 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
214 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
217 if (gfc_omp_privatize_by_reference (decl
))
218 type
= TREE_TYPE (type
);
220 if (gfc_has_alloc_comps (type
, decl
))
226 /* Callback for gfc_omp_unshare_expr. */
229 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
232 enum tree_code code
= TREE_CODE (t
);
234 /* Stop at types, decls, constants like copy_tree_r. */
235 if (TREE_CODE_CLASS (code
) == tcc_type
236 || TREE_CODE_CLASS (code
) == tcc_declaration
237 || TREE_CODE_CLASS (code
) == tcc_constant
240 else if (handled_component_p (t
)
241 || TREE_CODE (t
) == MEM_REF
)
243 *tp
= unshare_expr (t
);
250 /* Unshare in expr anything that the FE which normally doesn't
251 care much about tree sharing (because during gimplification
252 everything is unshared) could cause problems with tree sharing
253 at omp-low.c time. */
256 gfc_omp_unshare_expr (tree expr
)
258 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
262 enum walk_alloc_comps
264 WALK_ALLOC_COMPS_DTOR
,
265 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
266 WALK_ALLOC_COMPS_COPY_CTOR
269 /* Handle allocatable components in OpenMP clauses. */
272 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
273 enum walk_alloc_comps kind
)
275 stmtblock_t block
, tmpblock
;
276 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
277 gfc_init_block (&block
);
279 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
281 if (GFC_DESCRIPTOR_TYPE_P (type
))
283 gfc_init_block (&tmpblock
);
284 tem
= gfc_full_array_size (&tmpblock
, decl
,
285 GFC_TYPE_ARRAY_RANK (type
));
286 then_b
= gfc_finish_block (&tmpblock
);
287 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
288 tem
= gfc_omp_unshare_expr (tem
);
289 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
290 gfc_array_index_type
, tem
,
295 if (!TYPE_DOMAIN (type
)
296 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
297 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
298 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
300 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
301 TYPE_SIZE_UNIT (type
),
302 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
303 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
306 tem
= array_type_nelts (type
);
307 tem
= fold_convert (gfc_array_index_type
, tem
);
310 tree nelems
= gfc_evaluate_now (tem
, &block
);
311 tree index
= gfc_create_var (gfc_array_index_type
, "S");
313 gfc_init_block (&tmpblock
);
314 tem
= gfc_conv_array_data (decl
);
315 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
316 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
317 tree destvar
, destvref
= NULL_TREE
;
320 tem
= gfc_conv_array_data (dest
);
321 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
322 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
324 gfc_add_expr_to_block (&tmpblock
,
325 gfc_walk_alloc_comps (declvref
, destvref
,
329 gfc_init_loopinfo (&loop
);
331 loop
.from
[0] = gfc_index_zero_node
;
332 loop
.loopvar
[0] = index
;
334 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
335 gfc_add_block_to_block (&block
, &loop
.pre
);
336 return gfc_finish_block (&block
);
338 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
340 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
342 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
343 type
= TREE_TYPE (decl
);
346 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
347 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
349 tree ftype
= TREE_TYPE (field
);
350 tree declf
, destf
= NULL_TREE
;
351 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
352 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
353 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
354 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
357 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
358 decl
, field
, NULL_TREE
);
360 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
361 dest
, field
, NULL_TREE
);
366 case WALK_ALLOC_COMPS_DTOR
:
368 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
369 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
370 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
372 gfc_add_modify (&block
, unshare_expr (destf
),
373 unshare_expr (declf
));
374 tem
= gfc_duplicate_allocatable_nocopy
375 (destf
, declf
, ftype
,
376 GFC_TYPE_ARRAY_RANK (ftype
));
378 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
379 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
381 case WALK_ALLOC_COMPS_COPY_CTOR
:
382 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
383 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
384 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
385 GFC_TYPE_ARRAY_RANK (ftype
),
387 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
388 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
393 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
396 gfc_init_block (&tmpblock
);
397 gfc_add_expr_to_block (&tmpblock
,
398 gfc_walk_alloc_comps (declf
, destf
,
400 then_b
= gfc_finish_block (&tmpblock
);
401 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
402 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
403 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
404 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
405 tem
= unshare_expr (declf
);
410 tem
= fold_convert (pvoid_type_node
, tem
);
411 tem
= fold_build2_loc (input_location
, NE_EXPR
,
412 boolean_type_node
, tem
,
414 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
416 build_empty_stmt (input_location
));
418 gfc_add_expr_to_block (&block
, then_b
);
420 if (kind
== WALK_ALLOC_COMPS_DTOR
)
422 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
423 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
425 tem
= gfc_trans_dealloc_allocated (unshare_expr (declf
),
427 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
429 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
431 tem
= gfc_call_free (unshare_expr (declf
));
432 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
437 return gfc_finish_block (&block
);
440 /* Return code to initialize DECL with its default constructor, or
441 NULL if there's nothing to do. */
444 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
446 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
447 stmtblock_t block
, cond_block
;
449 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
450 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
451 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
452 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
454 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
455 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
456 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
458 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
461 gfc_start_block (&block
);
462 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
463 OMP_CLAUSE_DECL (clause
),
464 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
465 gfc_add_expr_to_block (&block
, tem
);
466 return gfc_finish_block (&block
);
471 gcc_assert (outer
!= NULL_TREE
);
473 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
474 "not currently allocated" allocation status if outer
475 array is "not currently allocated", otherwise should be allocated. */
476 gfc_start_block (&block
);
478 gfc_init_block (&cond_block
);
480 if (GFC_DESCRIPTOR_TYPE_P (type
))
482 gfc_add_modify (&cond_block
, decl
, outer
);
483 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
484 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
485 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
487 gfc_conv_descriptor_lbound_get (decl
, rank
));
488 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
489 size
, gfc_index_one_node
);
490 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
491 size
= fold_build2_loc (input_location
, MULT_EXPR
,
492 gfc_array_index_type
, size
,
493 gfc_conv_descriptor_stride_get (decl
, rank
));
494 tree esize
= fold_convert (gfc_array_index_type
,
495 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
496 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
498 size
= unshare_expr (size
);
499 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
503 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
504 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
505 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
506 if (GFC_DESCRIPTOR_TYPE_P (type
))
507 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
509 gfc_add_modify (&cond_block
, unshare_expr (decl
),
510 fold_convert (TREE_TYPE (decl
), ptr
));
511 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
513 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
514 OMP_CLAUSE_DECL (clause
),
515 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
516 gfc_add_expr_to_block (&cond_block
, tem
);
518 then_b
= gfc_finish_block (&cond_block
);
520 /* Reduction clause requires allocated ALLOCATABLE. */
521 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
523 gfc_init_block (&cond_block
);
524 if (GFC_DESCRIPTOR_TYPE_P (type
))
525 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
528 gfc_add_modify (&cond_block
, unshare_expr (decl
),
529 build_zero_cst (TREE_TYPE (decl
)));
530 else_b
= gfc_finish_block (&cond_block
);
532 tree tem
= fold_convert (pvoid_type_node
,
533 GFC_DESCRIPTOR_TYPE_P (type
)
534 ? gfc_conv_descriptor_data_get (outer
) : outer
);
535 tem
= unshare_expr (tem
);
536 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
537 tem
, null_pointer_node
);
538 gfc_add_expr_to_block (&block
,
539 build3_loc (input_location
, COND_EXPR
,
540 void_type_node
, cond
, then_b
,
544 gfc_add_expr_to_block (&block
, then_b
);
546 return gfc_finish_block (&block
);
549 /* Build and return code for a copy constructor from SRC to DEST. */
552 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
554 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
555 tree cond
, then_b
, else_b
;
556 stmtblock_t block
, cond_block
;
558 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
559 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
561 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
562 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
563 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
565 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
567 gfc_start_block (&block
);
568 gfc_add_modify (&block
, dest
, src
);
569 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
570 WALK_ALLOC_COMPS_COPY_CTOR
);
571 gfc_add_expr_to_block (&block
, tem
);
572 return gfc_finish_block (&block
);
575 return build2_v (MODIFY_EXPR
, dest
, src
);
578 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
579 and copied from SRC. */
580 gfc_start_block (&block
);
582 gfc_init_block (&cond_block
);
584 gfc_add_modify (&cond_block
, dest
, src
);
585 if (GFC_DESCRIPTOR_TYPE_P (type
))
587 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
588 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
589 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
591 gfc_conv_descriptor_lbound_get (dest
, rank
));
592 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
593 size
, gfc_index_one_node
);
594 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
595 size
= fold_build2_loc (input_location
, MULT_EXPR
,
596 gfc_array_index_type
, size
,
597 gfc_conv_descriptor_stride_get (dest
, rank
));
598 tree esize
= fold_convert (gfc_array_index_type
,
599 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
600 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
602 size
= unshare_expr (size
);
603 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
607 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
608 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
609 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
610 if (GFC_DESCRIPTOR_TYPE_P (type
))
611 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
613 gfc_add_modify (&cond_block
, unshare_expr (dest
),
614 fold_convert (TREE_TYPE (dest
), ptr
));
616 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
617 ? gfc_conv_descriptor_data_get (src
) : src
;
618 srcptr
= unshare_expr (srcptr
);
619 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
620 call
= build_call_expr_loc (input_location
,
621 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
623 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
624 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
626 tree tem
= gfc_walk_alloc_comps (src
, dest
,
627 OMP_CLAUSE_DECL (clause
),
628 WALK_ALLOC_COMPS_COPY_CTOR
);
629 gfc_add_expr_to_block (&cond_block
, tem
);
631 then_b
= gfc_finish_block (&cond_block
);
633 gfc_init_block (&cond_block
);
634 if (GFC_DESCRIPTOR_TYPE_P (type
))
635 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
638 gfc_add_modify (&cond_block
, unshare_expr (dest
),
639 build_zero_cst (TREE_TYPE (dest
)));
640 else_b
= gfc_finish_block (&cond_block
);
642 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
643 unshare_expr (srcptr
), null_pointer_node
);
644 gfc_add_expr_to_block (&block
,
645 build3_loc (input_location
, COND_EXPR
,
646 void_type_node
, cond
, then_b
, else_b
));
648 return gfc_finish_block (&block
);
651 /* Similarly, except use an intrinsic or pointer assignment operator
655 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
657 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
658 tree cond
, then_b
, else_b
;
659 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
661 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
662 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
663 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
665 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
667 gfc_start_block (&block
);
668 /* First dealloc any allocatable components in DEST. */
669 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
670 OMP_CLAUSE_DECL (clause
),
671 WALK_ALLOC_COMPS_DTOR
);
672 gfc_add_expr_to_block (&block
, tem
);
673 /* Then copy over toplevel data. */
674 gfc_add_modify (&block
, dest
, src
);
675 /* Finally allocate any allocatable components and copy. */
676 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
677 WALK_ALLOC_COMPS_COPY_CTOR
);
678 gfc_add_expr_to_block (&block
, tem
);
679 return gfc_finish_block (&block
);
682 return build2_v (MODIFY_EXPR
, dest
, src
);
685 gfc_start_block (&block
);
687 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
689 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
690 WALK_ALLOC_COMPS_DTOR
);
691 tree tem
= fold_convert (pvoid_type_node
,
692 GFC_DESCRIPTOR_TYPE_P (type
)
693 ? gfc_conv_descriptor_data_get (dest
) : dest
);
694 tem
= unshare_expr (tem
);
695 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
696 tem
, null_pointer_node
);
697 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
698 then_b
, build_empty_stmt (input_location
));
699 gfc_add_expr_to_block (&block
, tem
);
702 gfc_init_block (&cond_block
);
704 if (GFC_DESCRIPTOR_TYPE_P (type
))
706 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
707 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
708 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
710 gfc_conv_descriptor_lbound_get (src
, rank
));
711 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
712 size
, gfc_index_one_node
);
713 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
714 size
= fold_build2_loc (input_location
, MULT_EXPR
,
715 gfc_array_index_type
, size
,
716 gfc_conv_descriptor_stride_get (src
, rank
));
717 tree esize
= fold_convert (gfc_array_index_type
,
718 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
719 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
721 size
= unshare_expr (size
);
722 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
726 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
727 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
729 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
730 ? gfc_conv_descriptor_data_get (dest
) : dest
;
731 destptr
= unshare_expr (destptr
);
732 destptr
= fold_convert (pvoid_type_node
, destptr
);
733 gfc_add_modify (&cond_block
, ptr
, destptr
);
735 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
736 destptr
, null_pointer_node
);
738 if (GFC_DESCRIPTOR_TYPE_P (type
))
741 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
743 tree rank
= gfc_rank_cst
[i
];
744 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
745 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
746 gfc_array_index_type
, tem
,
747 gfc_conv_descriptor_lbound_get (src
, rank
));
748 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
749 gfc_array_index_type
, tem
,
750 gfc_conv_descriptor_lbound_get (dest
, rank
));
751 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
752 tem
, gfc_conv_descriptor_ubound_get (dest
,
754 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
755 boolean_type_node
, cond
, tem
);
759 gfc_init_block (&cond_block2
);
761 if (GFC_DESCRIPTOR_TYPE_P (type
))
763 gfc_init_block (&inner_block
);
764 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
765 then_b
= gfc_finish_block (&inner_block
);
767 gfc_init_block (&inner_block
);
768 gfc_add_modify (&inner_block
, ptr
,
769 gfc_call_realloc (&inner_block
, ptr
, size
));
770 else_b
= gfc_finish_block (&inner_block
);
772 gfc_add_expr_to_block (&cond_block2
,
773 build3_loc (input_location
, COND_EXPR
,
775 unshare_expr (nonalloc
),
777 gfc_add_modify (&cond_block2
, dest
, src
);
778 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
782 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
783 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
784 fold_convert (type
, ptr
));
786 then_b
= gfc_finish_block (&cond_block2
);
787 else_b
= build_empty_stmt (input_location
);
789 gfc_add_expr_to_block (&cond_block
,
790 build3_loc (input_location
, COND_EXPR
,
791 void_type_node
, unshare_expr (cond
),
794 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
795 ? gfc_conv_descriptor_data_get (src
) : src
;
796 srcptr
= unshare_expr (srcptr
);
797 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
798 call
= build_call_expr_loc (input_location
,
799 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
801 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
802 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
804 tree tem
= gfc_walk_alloc_comps (src
, dest
,
805 OMP_CLAUSE_DECL (clause
),
806 WALK_ALLOC_COMPS_COPY_CTOR
);
807 gfc_add_expr_to_block (&cond_block
, tem
);
809 then_b
= gfc_finish_block (&cond_block
);
811 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
813 gfc_init_block (&cond_block
);
814 if (GFC_DESCRIPTOR_TYPE_P (type
))
815 gfc_add_expr_to_block (&cond_block
,
816 gfc_trans_dealloc_allocated (unshare_expr (dest
),
820 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
821 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
822 gfc_add_modify (&cond_block
, unshare_expr (dest
),
823 build_zero_cst (TREE_TYPE (dest
)));
825 else_b
= gfc_finish_block (&cond_block
);
827 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
828 unshare_expr (srcptr
), null_pointer_node
);
829 gfc_add_expr_to_block (&block
,
830 build3_loc (input_location
, COND_EXPR
,
831 void_type_node
, cond
,
835 gfc_add_expr_to_block (&block
, then_b
);
837 return gfc_finish_block (&block
);
841 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
842 tree add
, tree nelems
)
844 stmtblock_t tmpblock
;
845 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
846 nelems
= gfc_evaluate_now (nelems
, block
);
848 gfc_init_block (&tmpblock
);
849 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
851 desta
= gfc_build_array_ref (dest
, index
, NULL
);
852 srca
= gfc_build_array_ref (src
, index
, NULL
);
856 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
857 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
858 fold_convert (sizetype
, index
),
859 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
860 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
861 TREE_TYPE (dest
), dest
,
863 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
864 TREE_TYPE (src
), src
,
867 gfc_add_modify (&tmpblock
, desta
,
868 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
872 gfc_init_loopinfo (&loop
);
874 loop
.from
[0] = gfc_index_zero_node
;
875 loop
.loopvar
[0] = index
;
877 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
878 gfc_add_block_to_block (block
, &loop
.pre
);
881 /* Build and return code for a constructor of DEST that initializes
882 it to SRC plus ADD (ADD is scalar integer). */
885 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
887 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
890 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
892 gfc_start_block (&block
);
893 add
= gfc_evaluate_now (add
, &block
);
895 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
896 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
897 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
899 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
900 if (!TYPE_DOMAIN (type
)
901 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
902 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
903 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
905 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
906 TYPE_SIZE_UNIT (type
),
907 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
908 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
911 nelems
= array_type_nelts (type
);
912 nelems
= fold_convert (gfc_array_index_type
, nelems
);
914 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
915 return gfc_finish_block (&block
);
918 /* Allocatable arrays in LINEAR clauses need to be allocated
919 and copied from SRC. */
920 gfc_add_modify (&block
, dest
, src
);
921 if (GFC_DESCRIPTOR_TYPE_P (type
))
923 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
924 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
925 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
927 gfc_conv_descriptor_lbound_get (dest
, rank
));
928 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
929 size
, gfc_index_one_node
);
930 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
931 size
= fold_build2_loc (input_location
, MULT_EXPR
,
932 gfc_array_index_type
, size
,
933 gfc_conv_descriptor_stride_get (dest
, rank
));
934 tree esize
= fold_convert (gfc_array_index_type
,
935 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
936 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
937 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
938 nelems
, unshare_expr (esize
));
939 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
941 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
942 gfc_array_index_type
, nelems
,
946 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
947 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
948 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
949 if (GFC_DESCRIPTOR_TYPE_P (type
))
951 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
952 tree etype
= gfc_get_element_type (type
);
953 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
954 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
955 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
956 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
960 gfc_add_modify (&block
, unshare_expr (dest
),
961 fold_convert (TREE_TYPE (dest
), ptr
));
962 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
963 tree dstm
= build_fold_indirect_ref (ptr
);
964 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
965 gfc_add_modify (&block
, dstm
,
966 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
968 return gfc_finish_block (&block
);
971 /* Build and return code destructing DECL. Return NULL if nothing
975 gfc_omp_clause_dtor (tree clause
, tree decl
)
977 tree type
= TREE_TYPE (decl
), tem
;
979 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
980 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
981 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
983 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
984 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
985 OMP_CLAUSE_DECL (clause
),
986 WALK_ALLOC_COMPS_DTOR
);
990 if (GFC_DESCRIPTOR_TYPE_P (type
))
991 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
992 to be deallocated if they were allocated. */
993 tem
= gfc_trans_dealloc_allocated (decl
, false, NULL
);
995 tem
= gfc_call_free (decl
);
996 tem
= gfc_omp_unshare_expr (tem
);
998 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1003 gfc_init_block (&block
);
1004 gfc_add_expr_to_block (&block
,
1005 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1006 OMP_CLAUSE_DECL (clause
),
1007 WALK_ALLOC_COMPS_DTOR
));
1008 gfc_add_expr_to_block (&block
, tem
);
1009 then_b
= gfc_finish_block (&block
);
1011 tem
= fold_convert (pvoid_type_node
,
1012 GFC_DESCRIPTOR_TYPE_P (type
)
1013 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1014 tem
= unshare_expr (tem
);
1015 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1016 tem
, null_pointer_node
);
1017 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1018 then_b
, build_empty_stmt (input_location
));
1025 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1027 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1030 tree decl
= OMP_CLAUSE_DECL (c
);
1031 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1032 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1034 if (!gfc_omp_privatize_by_reference (decl
)
1035 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1036 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1037 && !GFC_DECL_CRAY_POINTEE (decl
)
1038 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1040 tree orig_decl
= decl
;
1041 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1042 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1043 OMP_CLAUSE_DECL (c4
) = decl
;
1044 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1045 decl
= build_fold_indirect_ref (decl
);
1046 OMP_CLAUSE_DECL (c
) = decl
;
1047 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1048 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1049 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1050 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1052 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1053 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1054 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1055 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1056 decl
= build_fold_indirect_ref (decl
);
1057 OMP_CLAUSE_DECL (c
) = decl
;
1060 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1063 gfc_start_block (&block
);
1064 tree type
= TREE_TYPE (decl
);
1065 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1066 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1067 ptr
= build_fold_indirect_ref (ptr
);
1068 OMP_CLAUSE_DECL (c
) = ptr
;
1069 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1070 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1071 OMP_CLAUSE_DECL (c2
) = decl
;
1072 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1073 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1074 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1075 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1076 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1077 tree size
= create_tmp_var (gfc_array_index_type
);
1078 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1079 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1080 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1081 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1083 stmtblock_t cond_block
;
1084 tree tem
, then_b
, else_b
, zero
, cond
;
1086 gfc_init_block (&cond_block
);
1087 tem
= gfc_full_array_size (&cond_block
, decl
,
1088 GFC_TYPE_ARRAY_RANK (type
));
1089 gfc_add_modify (&cond_block
, size
, tem
);
1090 gfc_add_modify (&cond_block
, size
,
1091 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1093 then_b
= gfc_finish_block (&cond_block
);
1094 gfc_init_block (&cond_block
);
1095 zero
= build_int_cst (gfc_array_index_type
, 0);
1096 gfc_add_modify (&cond_block
, size
, zero
);
1097 else_b
= gfc_finish_block (&cond_block
);
1098 tem
= gfc_conv_descriptor_data_get (decl
);
1099 tem
= fold_convert (pvoid_type_node
, tem
);
1100 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1101 boolean_type_node
, tem
, null_pointer_node
);
1102 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1103 void_type_node
, cond
,
1108 gfc_add_modify (&block
, size
,
1109 gfc_full_array_size (&block
, decl
,
1110 GFC_TYPE_ARRAY_RANK (type
)));
1111 gfc_add_modify (&block
, size
,
1112 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1115 OMP_CLAUSE_SIZE (c
) = size
;
1116 tree stmt
= gfc_finish_block (&block
);
1117 gimplify_and_add (stmt
, pre_p
);
1120 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1122 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1123 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1126 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1127 OMP_CLAUSE_CHAIN (last
) = c2
;
1132 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1133 OMP_CLAUSE_CHAIN (last
) = c3
;
1138 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1139 OMP_CLAUSE_CHAIN (last
) = c4
;
1145 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1146 disregarded in OpenMP construct, because it is going to be
1147 remapped during OpenMP lowering. SHARED is true if DECL
1148 is going to be shared, false if it is going to be privatized. */
1151 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1153 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1154 && DECL_HAS_VALUE_EXPR_P (decl
))
1156 tree value
= DECL_VALUE_EXPR (decl
);
1158 if (TREE_CODE (value
) == COMPONENT_REF
1159 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1160 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1162 /* If variable in COMMON or EQUIVALENCE is privatized, return
1163 true, as just that variable is supposed to be privatized,
1164 not the whole COMMON or whole EQUIVALENCE.
1165 For shared variables in COMMON or EQUIVALENCE, let them be
1166 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1167 from the same COMMON or EQUIVALENCE just one sharing of the
1168 whole COMMON or EQUIVALENCE is enough. */
1173 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1179 /* Return true if DECL that is shared iff SHARED is true should
1180 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1184 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1186 if (GFC_DECL_CRAY_POINTEE (decl
))
1189 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1190 && DECL_HAS_VALUE_EXPR_P (decl
))
1192 tree value
= DECL_VALUE_EXPR (decl
);
1194 if (TREE_CODE (value
) == COMPONENT_REF
1195 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1196 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1203 /* Register language specific type size variables as potentially OpenMP
1204 firstprivate variables. */
1207 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1209 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1213 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1214 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1216 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1217 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1218 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1220 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1221 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1227 gfc_trans_add_clause (tree node
, tree tail
)
1229 OMP_CLAUSE_CHAIN (node
) = tail
;
1234 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1239 gfc_symbol
*proc_sym
;
1240 gfc_formal_arglist
*f
;
1242 gcc_assert (sym
->attr
.dummy
);
1243 proc_sym
= sym
->ns
->proc_name
;
1244 if (proc_sym
->attr
.entry_master
)
1246 if (gfc_return_by_reference (proc_sym
))
1249 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1252 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1258 return build_int_cst (integer_type_node
, cnt
);
1261 tree t
= gfc_get_symbol_decl (sym
);
1265 bool alternate_entry
;
1268 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1269 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1270 && sym
->result
== sym
;
1271 entry_master
= sym
->attr
.result
1272 && sym
->ns
->proc_name
->attr
.entry_master
1273 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1274 parent_decl
= current_function_decl
1275 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1277 if ((t
== parent_decl
&& return_value
)
1278 || (sym
->ns
&& sym
->ns
->proc_name
1279 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1280 && (alternate_entry
|| entry_master
)))
1285 /* Special case for assigning the return value of a function.
1286 Self recursive functions must have an explicit return value. */
1287 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1288 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1290 /* Similarly for alternate entry points. */
1291 else if (alternate_entry
1292 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1295 gfc_entry_list
*el
= NULL
;
1297 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1300 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1305 else if (entry_master
1306 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1308 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1314 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1315 gfc_omp_namelist
*namelist
, tree list
,
1318 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1319 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1321 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1322 if (t
!= error_mark_node
)
1324 tree node
= build_omp_clause (input_location
, code
);
1325 OMP_CLAUSE_DECL (node
) = t
;
1326 list
= gfc_trans_add_clause (node
, list
);
1332 struct omp_udr_find_orig_data
1334 gfc_omp_udr
*omp_udr
;
1339 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1342 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1343 if ((*e
)->expr_type
== EXPR_VARIABLE
1344 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1345 cd
->omp_orig_seen
= true;
1351 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1353 gfc_symbol
*sym
= n
->sym
;
1354 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1355 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1356 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1357 gfc_symbol omp_var_copy
[4];
1358 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1360 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1361 locus old_loc
= gfc_current_locus
;
1364 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1366 decl
= OMP_CLAUSE_DECL (c
);
1367 gfc_current_locus
= where
;
1368 type
= TREE_TYPE (decl
);
1369 outer_decl
= create_tmp_var_raw (type
);
1370 if (TREE_CODE (decl
) == PARM_DECL
1371 && TREE_CODE (type
) == REFERENCE_TYPE
1372 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1373 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1375 decl
= build_fold_indirect_ref (decl
);
1376 type
= TREE_TYPE (type
);
1379 /* Create a fake symbol for init value. */
1380 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1381 init_val_sym
.ns
= sym
->ns
;
1382 init_val_sym
.name
= sym
->name
;
1383 init_val_sym
.ts
= sym
->ts
;
1384 init_val_sym
.attr
.referenced
= 1;
1385 init_val_sym
.declared_at
= where
;
1386 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1387 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1388 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1389 else if (udr
->initializer_ns
)
1390 backend_decl
= NULL
;
1392 switch (sym
->ts
.type
)
1398 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1401 backend_decl
= NULL_TREE
;
1404 init_val_sym
.backend_decl
= backend_decl
;
1406 /* Create a fake symbol for the outer array reference. */
1409 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1410 outer_sym
.attr
.dummy
= 0;
1411 outer_sym
.attr
.result
= 0;
1412 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1413 outer_sym
.backend_decl
= outer_decl
;
1414 if (decl
!= OMP_CLAUSE_DECL (c
))
1415 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1417 /* Create fake symtrees for it. */
1418 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1419 symtree1
->n
.sym
= sym
;
1420 gcc_assert (symtree1
== root1
);
1422 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1423 symtree2
->n
.sym
= &init_val_sym
;
1424 gcc_assert (symtree2
== root2
);
1426 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1427 symtree3
->n
.sym
= &outer_sym
;
1428 gcc_assert (symtree3
== root3
);
1430 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1433 omp_var_copy
[0] = *udr
->omp_out
;
1434 omp_var_copy
[1] = *udr
->omp_in
;
1435 *udr
->omp_out
= outer_sym
;
1436 *udr
->omp_in
= *sym
;
1437 if (udr
->initializer_ns
)
1439 omp_var_copy
[2] = *udr
->omp_priv
;
1440 omp_var_copy
[3] = *udr
->omp_orig
;
1441 *udr
->omp_priv
= *sym
;
1442 *udr
->omp_orig
= outer_sym
;
1446 /* Create expressions. */
1447 e1
= gfc_get_expr ();
1448 e1
->expr_type
= EXPR_VARIABLE
;
1450 e1
->symtree
= symtree1
;
1452 if (sym
->attr
.dimension
)
1454 e1
->ref
= ref
= gfc_get_ref ();
1455 ref
->type
= REF_ARRAY
;
1456 ref
->u
.ar
.where
= where
;
1457 ref
->u
.ar
.as
= sym
->as
;
1458 ref
->u
.ar
.type
= AR_FULL
;
1459 ref
->u
.ar
.dimen
= 0;
1461 t
= gfc_resolve_expr (e1
);
1465 if (backend_decl
!= NULL_TREE
)
1467 e2
= gfc_get_expr ();
1468 e2
->expr_type
= EXPR_VARIABLE
;
1470 e2
->symtree
= symtree2
;
1472 t
= gfc_resolve_expr (e2
);
1475 else if (udr
->initializer_ns
== NULL
)
1477 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1478 e2
= gfc_default_initializer (&sym
->ts
);
1480 t
= gfc_resolve_expr (e2
);
1483 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1485 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1486 t
= gfc_resolve_expr (e2
);
1489 if (udr
&& udr
->initializer_ns
)
1491 struct omp_udr_find_orig_data cd
;
1493 cd
.omp_orig_seen
= false;
1494 gfc_code_walker (&n
->udr
->initializer
,
1495 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1496 if (cd
.omp_orig_seen
)
1497 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1500 e3
= gfc_copy_expr (e1
);
1501 e3
->symtree
= symtree3
;
1502 t
= gfc_resolve_expr (e3
);
1507 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1511 e4
= gfc_add (e3
, e1
);
1514 e4
= gfc_multiply (e3
, e1
);
1516 case TRUTH_ANDIF_EXPR
:
1517 e4
= gfc_and (e3
, e1
);
1519 case TRUTH_ORIF_EXPR
:
1520 e4
= gfc_or (e3
, e1
);
1523 e4
= gfc_eqv (e3
, e1
);
1526 e4
= gfc_neqv (e3
, e1
);
1544 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1547 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1548 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1549 t
= gfc_resolve_expr (e3
);
1551 t
= gfc_resolve_expr (e4
);
1560 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1561 intrinsic_sym
.ns
= sym
->ns
;
1562 intrinsic_sym
.name
= iname
;
1563 intrinsic_sym
.ts
= sym
->ts
;
1564 intrinsic_sym
.attr
.referenced
= 1;
1565 intrinsic_sym
.attr
.intrinsic
= 1;
1566 intrinsic_sym
.attr
.function
= 1;
1567 intrinsic_sym
.result
= &intrinsic_sym
;
1568 intrinsic_sym
.declared_at
= where
;
1570 symtree4
= gfc_new_symtree (&root4
, iname
);
1571 symtree4
->n
.sym
= &intrinsic_sym
;
1572 gcc_assert (symtree4
== root4
);
1574 e4
= gfc_get_expr ();
1575 e4
->expr_type
= EXPR_FUNCTION
;
1577 e4
->symtree
= symtree4
;
1578 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1579 e4
->value
.function
.actual
->expr
= e3
;
1580 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1581 e4
->value
.function
.actual
->next
->expr
= e1
;
1583 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1585 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1586 e1
= gfc_copy_expr (e1
);
1587 e3
= gfc_copy_expr (e3
);
1588 t
= gfc_resolve_expr (e4
);
1592 /* Create the init statement list. */
1595 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1597 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1598 NULL_TREE
, NULL_TREE
, false);
1599 if (TREE_CODE (stmt
) != BIND_EXPR
)
1600 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1603 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1605 /* Create the merge statement list. */
1608 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1610 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1611 NULL_TREE
, NULL_TREE
, false);
1612 if (TREE_CODE (stmt
) != BIND_EXPR
)
1613 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1616 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1618 /* And stick the placeholder VAR_DECL into the clause as well. */
1619 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1621 gfc_current_locus
= old_loc
;
1634 gfc_free_array_spec (outer_sym
.as
);
1638 *udr
->omp_out
= omp_var_copy
[0];
1639 *udr
->omp_in
= omp_var_copy
[1];
1640 if (udr
->initializer_ns
)
1642 *udr
->omp_priv
= omp_var_copy
[2];
1643 *udr
->omp_orig
= omp_var_copy
[3];
1649 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1650 locus where
, bool mark_addressable
)
1652 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1653 if (namelist
->sym
->attr
.referenced
)
1655 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1656 if (t
!= error_mark_node
)
1658 tree node
= build_omp_clause (where
.lb
->location
,
1659 OMP_CLAUSE_REDUCTION
);
1660 OMP_CLAUSE_DECL (node
) = t
;
1661 if (mark_addressable
)
1662 TREE_ADDRESSABLE (t
) = 1;
1663 switch (namelist
->u
.reduction_op
)
1665 case OMP_REDUCTION_PLUS
:
1666 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1668 case OMP_REDUCTION_MINUS
:
1669 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1671 case OMP_REDUCTION_TIMES
:
1672 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1674 case OMP_REDUCTION_AND
:
1675 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1677 case OMP_REDUCTION_OR
:
1678 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1680 case OMP_REDUCTION_EQV
:
1681 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1683 case OMP_REDUCTION_NEQV
:
1684 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1686 case OMP_REDUCTION_MAX
:
1687 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1689 case OMP_REDUCTION_MIN
:
1690 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1692 case OMP_REDUCTION_IAND
:
1693 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1695 case OMP_REDUCTION_IOR
:
1696 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1698 case OMP_REDUCTION_IEOR
:
1699 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1701 case OMP_REDUCTION_USER
:
1702 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1707 if (namelist
->sym
->attr
.dimension
1708 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1709 || namelist
->sym
->attr
.allocatable
)
1710 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1711 list
= gfc_trans_add_clause (node
, list
);
1718 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1723 gfc_init_se (&se
, NULL
);
1724 gfc_conv_expr (&se
, expr
);
1725 gfc_add_block_to_block (block
, &se
.pre
);
1726 result
= gfc_evaluate_now (se
.expr
, block
);
1727 gfc_add_block_to_block (block
, &se
.post
);
1733 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1734 locus where
, bool declare_simd
= false)
1736 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1738 enum omp_clause_code clause_code
;
1741 if (clauses
== NULL
)
1744 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1746 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1752 case OMP_LIST_REDUCTION
:
1753 /* An OpenACC async clause indicates the need to set reduction
1754 arguments addressable, to allow asynchronous copy-out. */
1755 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
1758 case OMP_LIST_PRIVATE
:
1759 clause_code
= OMP_CLAUSE_PRIVATE
;
1761 case OMP_LIST_SHARED
:
1762 clause_code
= OMP_CLAUSE_SHARED
;
1764 case OMP_LIST_FIRSTPRIVATE
:
1765 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1767 case OMP_LIST_LASTPRIVATE
:
1768 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1770 case OMP_LIST_COPYIN
:
1771 clause_code
= OMP_CLAUSE_COPYIN
;
1773 case OMP_LIST_COPYPRIVATE
:
1774 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1776 case OMP_LIST_UNIFORM
:
1777 clause_code
= OMP_CLAUSE_UNIFORM
;
1779 case OMP_LIST_USE_DEVICE
:
1780 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
1785 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1788 case OMP_LIST_ALIGNED
:
1789 for (; n
!= NULL
; n
= n
->next
)
1790 if (n
->sym
->attr
.referenced
|| declare_simd
)
1792 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1793 if (t
!= error_mark_node
)
1795 tree node
= build_omp_clause (input_location
,
1796 OMP_CLAUSE_ALIGNED
);
1797 OMP_CLAUSE_DECL (node
) = t
;
1803 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1806 gfc_init_se (&se
, NULL
);
1807 gfc_conv_expr (&se
, n
->expr
);
1808 gfc_add_block_to_block (block
, &se
.pre
);
1809 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1810 gfc_add_block_to_block (block
, &se
.post
);
1812 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1814 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1818 case OMP_LIST_LINEAR
:
1820 gfc_expr
*last_step_expr
= NULL
;
1821 tree last_step
= NULL_TREE
;
1823 for (; n
!= NULL
; n
= n
->next
)
1827 last_step_expr
= n
->expr
;
1828 last_step
= NULL_TREE
;
1830 if (n
->sym
->attr
.referenced
|| declare_simd
)
1832 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1833 if (t
!= error_mark_node
)
1835 tree node
= build_omp_clause (input_location
,
1837 OMP_CLAUSE_DECL (node
) = t
;
1838 if (last_step_expr
&& last_step
== NULL_TREE
)
1842 = gfc_conv_constant_to_tree (last_step_expr
);
1845 gfc_init_se (&se
, NULL
);
1846 gfc_conv_expr (&se
, last_step_expr
);
1847 gfc_add_block_to_block (block
, &se
.pre
);
1848 last_step
= gfc_evaluate_now (se
.expr
, block
);
1849 gfc_add_block_to_block (block
, &se
.post
);
1852 OMP_CLAUSE_LINEAR_STEP (node
)
1853 = fold_convert (gfc_typenode_for_spec (&n
->sym
->ts
),
1855 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1856 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1857 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1863 case OMP_LIST_DEPEND
:
1864 for (; n
!= NULL
; n
= n
->next
)
1866 if (!n
->sym
->attr
.referenced
)
1869 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
1870 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1872 tree decl
= gfc_get_symbol_decl (n
->sym
);
1873 if (gfc_omp_privatize_by_reference (decl
))
1874 decl
= build_fold_indirect_ref (decl
);
1875 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1877 decl
= gfc_conv_descriptor_data_get (decl
);
1878 decl
= fold_convert (build_pointer_type (char_type_node
),
1880 decl
= build_fold_indirect_ref (decl
);
1882 else if (DECL_P (decl
))
1883 TREE_ADDRESSABLE (decl
) = 1;
1884 OMP_CLAUSE_DECL (node
) = decl
;
1889 gfc_init_se (&se
, NULL
);
1890 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
1892 gfc_conv_expr_reference (&se
, n
->expr
);
1897 gfc_conv_expr_descriptor (&se
, n
->expr
);
1898 ptr
= gfc_conv_array_data (se
.expr
);
1900 gfc_add_block_to_block (block
, &se
.pre
);
1901 gfc_add_block_to_block (block
, &se
.post
);
1902 ptr
= fold_convert (build_pointer_type (char_type_node
),
1904 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
1906 switch (n
->u
.depend_op
)
1909 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
1911 case OMP_DEPEND_OUT
:
1912 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
1914 case OMP_DEPEND_INOUT
:
1915 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
1920 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1924 for (; n
!= NULL
; n
= n
->next
)
1926 if (!n
->sym
->attr
.referenced
)
1929 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1930 tree node2
= NULL_TREE
;
1931 tree node3
= NULL_TREE
;
1932 tree node4
= NULL_TREE
;
1933 tree decl
= gfc_get_symbol_decl (n
->sym
);
1935 TREE_ADDRESSABLE (decl
) = 1;
1936 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1938 if (POINTER_TYPE_P (TREE_TYPE (decl
))
1939 && (gfc_omp_privatize_by_reference (decl
)
1940 || GFC_DECL_GET_SCALAR_POINTER (decl
)
1941 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1942 || GFC_DECL_CRAY_POINTEE (decl
)
1943 || GFC_DESCRIPTOR_TYPE_P
1944 (TREE_TYPE (TREE_TYPE (decl
)))))
1946 tree orig_decl
= decl
;
1947 node4
= build_omp_clause (input_location
,
1949 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
1950 OMP_CLAUSE_DECL (node4
) = decl
;
1951 OMP_CLAUSE_SIZE (node4
) = size_int (0);
1952 decl
= build_fold_indirect_ref (decl
);
1953 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1954 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1955 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1957 node3
= build_omp_clause (input_location
,
1959 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1960 OMP_CLAUSE_DECL (node3
) = decl
;
1961 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1962 decl
= build_fold_indirect_ref (decl
);
1965 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1967 tree type
= TREE_TYPE (decl
);
1968 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1969 ptr
= fold_convert (build_pointer_type (char_type_node
),
1971 ptr
= build_fold_indirect_ref (ptr
);
1972 OMP_CLAUSE_DECL (node
) = ptr
;
1973 node2
= build_omp_clause (input_location
,
1975 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
1976 OMP_CLAUSE_DECL (node2
) = decl
;
1977 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
1978 node3
= build_omp_clause (input_location
,
1980 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1981 OMP_CLAUSE_DECL (node3
)
1982 = gfc_conv_descriptor_data_get (decl
);
1983 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1985 /* We have to check for n->sym->attr.dimension because
1986 of scalar coarrays. */
1987 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
1989 stmtblock_t cond_block
;
1991 = gfc_create_var (gfc_array_index_type
, NULL
);
1992 tree tem
, then_b
, else_b
, zero
, cond
;
1994 gfc_init_block (&cond_block
);
1996 = gfc_full_array_size (&cond_block
, decl
,
1997 GFC_TYPE_ARRAY_RANK (type
));
1998 gfc_add_modify (&cond_block
, size
, tem
);
1999 then_b
= gfc_finish_block (&cond_block
);
2000 gfc_init_block (&cond_block
);
2001 zero
= build_int_cst (gfc_array_index_type
, 0);
2002 gfc_add_modify (&cond_block
, size
, zero
);
2003 else_b
= gfc_finish_block (&cond_block
);
2004 tem
= gfc_conv_descriptor_data_get (decl
);
2005 tem
= fold_convert (pvoid_type_node
, tem
);
2006 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2008 tem
, null_pointer_node
);
2009 gfc_add_expr_to_block (block
,
2010 build3_loc (input_location
,
2015 OMP_CLAUSE_SIZE (node
) = size
;
2017 else if (n
->sym
->attr
.dimension
)
2018 OMP_CLAUSE_SIZE (node
)
2019 = gfc_full_array_size (block
, decl
,
2020 GFC_TYPE_ARRAY_RANK (type
));
2021 if (n
->sym
->attr
.dimension
)
2024 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2025 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2026 OMP_CLAUSE_SIZE (node
)
2027 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2028 OMP_CLAUSE_SIZE (node
), elemsz
);
2032 OMP_CLAUSE_DECL (node
) = decl
;
2037 gfc_init_se (&se
, NULL
);
2038 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2040 gfc_conv_expr_reference (&se
, n
->expr
);
2041 gfc_add_block_to_block (block
, &se
.pre
);
2043 OMP_CLAUSE_SIZE (node
)
2044 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2048 gfc_conv_expr_descriptor (&se
, n
->expr
);
2049 ptr
= gfc_conv_array_data (se
.expr
);
2050 tree type
= TREE_TYPE (se
.expr
);
2051 gfc_add_block_to_block (block
, &se
.pre
);
2052 OMP_CLAUSE_SIZE (node
)
2053 = gfc_full_array_size (block
, se
.expr
,
2054 GFC_TYPE_ARRAY_RANK (type
));
2056 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2057 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2058 OMP_CLAUSE_SIZE (node
)
2059 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2060 OMP_CLAUSE_SIZE (node
), elemsz
);
2062 gfc_add_block_to_block (block
, &se
.post
);
2063 ptr
= fold_convert (build_pointer_type (char_type_node
),
2065 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2067 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2068 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2070 node4
= build_omp_clause (input_location
,
2072 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2073 OMP_CLAUSE_DECL (node4
) = decl
;
2074 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2075 decl
= build_fold_indirect_ref (decl
);
2077 ptr
= fold_convert (sizetype
, ptr
);
2078 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2080 tree type
= TREE_TYPE (decl
);
2081 ptr2
= gfc_conv_descriptor_data_get (decl
);
2082 node2
= build_omp_clause (input_location
,
2084 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2085 OMP_CLAUSE_DECL (node2
) = decl
;
2086 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2087 node3
= build_omp_clause (input_location
,
2089 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2090 OMP_CLAUSE_DECL (node3
)
2091 = gfc_conv_descriptor_data_get (decl
);
2095 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2096 ptr2
= build_fold_addr_expr (decl
);
2099 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2102 node3
= build_omp_clause (input_location
,
2104 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2105 OMP_CLAUSE_DECL (node3
) = decl
;
2107 ptr2
= fold_convert (sizetype
, ptr2
);
2108 OMP_CLAUSE_SIZE (node3
)
2109 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2111 switch (n
->u
.map_op
)
2114 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2117 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2120 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2122 case OMP_MAP_TOFROM
:
2123 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2125 case OMP_MAP_DELETE
:
2126 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2128 case OMP_MAP_FORCE_ALLOC
:
2129 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2131 case OMP_MAP_FORCE_TO
:
2132 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2134 case OMP_MAP_FORCE_FROM
:
2135 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2137 case OMP_MAP_FORCE_TOFROM
:
2138 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2140 case OMP_MAP_FORCE_PRESENT
:
2141 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2143 case OMP_MAP_FORCE_DEVICEPTR
:
2144 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2149 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2151 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2153 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2155 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2160 case OMP_LIST_CACHE
:
2161 for (; n
!= NULL
; n
= n
->next
)
2163 if (!n
->sym
->attr
.referenced
)
2169 clause_code
= OMP_CLAUSE_TO
;
2172 clause_code
= OMP_CLAUSE_FROM
;
2174 case OMP_LIST_CACHE
:
2175 clause_code
= OMP_CLAUSE__CACHE_
;
2180 tree node
= build_omp_clause (input_location
, clause_code
);
2181 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2183 tree decl
= gfc_get_symbol_decl (n
->sym
);
2184 if (gfc_omp_privatize_by_reference (decl
))
2185 decl
= build_fold_indirect_ref (decl
);
2186 else if (DECL_P (decl
))
2187 TREE_ADDRESSABLE (decl
) = 1;
2188 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2190 tree type
= TREE_TYPE (decl
);
2191 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2192 ptr
= fold_convert (build_pointer_type (char_type_node
),
2194 ptr
= build_fold_indirect_ref (ptr
);
2195 OMP_CLAUSE_DECL (node
) = ptr
;
2196 OMP_CLAUSE_SIZE (node
)
2197 = gfc_full_array_size (block
, decl
,
2198 GFC_TYPE_ARRAY_RANK (type
));
2200 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2201 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2202 OMP_CLAUSE_SIZE (node
)
2203 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2204 OMP_CLAUSE_SIZE (node
), elemsz
);
2207 OMP_CLAUSE_DECL (node
) = decl
;
2212 gfc_init_se (&se
, NULL
);
2213 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2215 gfc_conv_expr_reference (&se
, n
->expr
);
2217 gfc_add_block_to_block (block
, &se
.pre
);
2218 OMP_CLAUSE_SIZE (node
)
2219 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2223 gfc_conv_expr_descriptor (&se
, n
->expr
);
2224 ptr
= gfc_conv_array_data (se
.expr
);
2225 tree type
= TREE_TYPE (se
.expr
);
2226 gfc_add_block_to_block (block
, &se
.pre
);
2227 OMP_CLAUSE_SIZE (node
)
2228 = gfc_full_array_size (block
, se
.expr
,
2229 GFC_TYPE_ARRAY_RANK (type
));
2231 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2232 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2233 OMP_CLAUSE_SIZE (node
)
2234 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2235 OMP_CLAUSE_SIZE (node
), elemsz
);
2237 gfc_add_block_to_block (block
, &se
.post
);
2238 ptr
= fold_convert (build_pointer_type (char_type_node
),
2240 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2242 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2250 if (clauses
->if_expr
)
2254 gfc_init_se (&se
, NULL
);
2255 gfc_conv_expr (&se
, clauses
->if_expr
);
2256 gfc_add_block_to_block (block
, &se
.pre
);
2257 if_var
= gfc_evaluate_now (se
.expr
, block
);
2258 gfc_add_block_to_block (block
, &se
.post
);
2260 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2261 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2262 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2263 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2266 if (clauses
->final_expr
)
2270 gfc_init_se (&se
, NULL
);
2271 gfc_conv_expr (&se
, clauses
->final_expr
);
2272 gfc_add_block_to_block (block
, &se
.pre
);
2273 final_var
= gfc_evaluate_now (se
.expr
, block
);
2274 gfc_add_block_to_block (block
, &se
.post
);
2276 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2277 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2278 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2281 if (clauses
->num_threads
)
2285 gfc_init_se (&se
, NULL
);
2286 gfc_conv_expr (&se
, clauses
->num_threads
);
2287 gfc_add_block_to_block (block
, &se
.pre
);
2288 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2289 gfc_add_block_to_block (block
, &se
.post
);
2291 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2292 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2293 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2296 chunk_size
= NULL_TREE
;
2297 if (clauses
->chunk_size
)
2299 gfc_init_se (&se
, NULL
);
2300 gfc_conv_expr (&se
, clauses
->chunk_size
);
2301 gfc_add_block_to_block (block
, &se
.pre
);
2302 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2303 gfc_add_block_to_block (block
, &se
.post
);
2306 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2308 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2309 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2310 switch (clauses
->sched_kind
)
2312 case OMP_SCHED_STATIC
:
2313 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2315 case OMP_SCHED_DYNAMIC
:
2316 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2318 case OMP_SCHED_GUIDED
:
2319 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2321 case OMP_SCHED_RUNTIME
:
2322 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2324 case OMP_SCHED_AUTO
:
2325 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2330 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2333 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2335 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2336 switch (clauses
->default_sharing
)
2338 case OMP_DEFAULT_NONE
:
2339 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2341 case OMP_DEFAULT_SHARED
:
2342 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2344 case OMP_DEFAULT_PRIVATE
:
2345 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2347 case OMP_DEFAULT_FIRSTPRIVATE
:
2348 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2353 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2356 if (clauses
->nowait
)
2358 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2359 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2362 if (clauses
->ordered
)
2364 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2365 OMP_CLAUSE_ORDERED_EXPR (c
) = NULL_TREE
;
2366 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2369 if (clauses
->untied
)
2371 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2372 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2375 if (clauses
->mergeable
)
2377 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2378 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2381 if (clauses
->collapse
)
2383 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2384 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2385 = build_int_cst (integer_type_node
, clauses
->collapse
);
2386 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2389 if (clauses
->inbranch
)
2391 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2392 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2395 if (clauses
->notinbranch
)
2397 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2398 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2401 switch (clauses
->cancel
)
2403 case OMP_CANCEL_UNKNOWN
:
2405 case OMP_CANCEL_PARALLEL
:
2406 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2407 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2409 case OMP_CANCEL_SECTIONS
:
2410 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2411 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2414 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2415 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2417 case OMP_CANCEL_TASKGROUP
:
2418 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2419 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2423 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2425 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2426 switch (clauses
->proc_bind
)
2428 case OMP_PROC_BIND_MASTER
:
2429 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2431 case OMP_PROC_BIND_SPREAD
:
2432 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2434 case OMP_PROC_BIND_CLOSE
:
2435 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2440 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2443 if (clauses
->safelen_expr
)
2447 gfc_init_se (&se
, NULL
);
2448 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2449 gfc_add_block_to_block (block
, &se
.pre
);
2450 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2451 gfc_add_block_to_block (block
, &se
.post
);
2453 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2454 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2455 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2458 if (clauses
->simdlen_expr
)
2460 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2461 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2462 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2463 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2466 if (clauses
->num_teams
)
2470 gfc_init_se (&se
, NULL
);
2471 gfc_conv_expr (&se
, clauses
->num_teams
);
2472 gfc_add_block_to_block (block
, &se
.pre
);
2473 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2474 gfc_add_block_to_block (block
, &se
.post
);
2476 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2477 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2478 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2481 if (clauses
->device
)
2485 gfc_init_se (&se
, NULL
);
2486 gfc_conv_expr (&se
, clauses
->device
);
2487 gfc_add_block_to_block (block
, &se
.pre
);
2488 device
= gfc_evaluate_now (se
.expr
, block
);
2489 gfc_add_block_to_block (block
, &se
.post
);
2491 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2492 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2493 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2496 if (clauses
->thread_limit
)
2500 gfc_init_se (&se
, NULL
);
2501 gfc_conv_expr (&se
, clauses
->thread_limit
);
2502 gfc_add_block_to_block (block
, &se
.pre
);
2503 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2504 gfc_add_block_to_block (block
, &se
.post
);
2506 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2507 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2508 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2511 chunk_size
= NULL_TREE
;
2512 if (clauses
->dist_chunk_size
)
2514 gfc_init_se (&se
, NULL
);
2515 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2516 gfc_add_block_to_block (block
, &se
.pre
);
2517 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2518 gfc_add_block_to_block (block
, &se
.post
);
2521 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2523 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2524 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2525 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2530 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2531 if (clauses
->async_expr
)
2532 OMP_CLAUSE_ASYNC_EXPR (c
)
2533 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2535 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2536 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2540 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SEQ
);
2541 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2543 if (clauses
->par_auto
)
2545 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_AUTO
);
2546 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2548 if (clauses
->independent
)
2550 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2551 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2553 if (clauses
->wait_list
)
2557 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2559 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2560 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2561 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2565 if (clauses
->num_gangs_expr
)
2568 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2569 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2570 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2571 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2573 if (clauses
->num_workers_expr
)
2575 tree num_workers_var
2576 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2577 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2578 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2579 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2581 if (clauses
->vector_length_expr
)
2583 tree vector_length_var
2584 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2585 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2586 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2587 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2589 if (clauses
->tile_list
)
2591 vec
<tree
, va_gc
> *tvec
;
2594 vec_alloc (tvec
, 4);
2596 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
2597 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
2599 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TILE
);
2600 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
2601 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2604 if (clauses
->vector
)
2606 if (clauses
->vector_expr
)
2609 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2610 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2611 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2612 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2616 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2617 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2620 if (clauses
->worker
)
2622 if (clauses
->worker_expr
)
2625 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2626 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2627 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2628 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2632 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2633 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2639 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2640 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2641 if (clauses
->gang_num_expr
)
2643 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
2644 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
2646 if (clauses
->gang_static
)
2648 arg
= clauses
->gang_static_expr
2649 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
2650 : integer_minus_one_node
;
2651 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
2655 return nreverse (omp_clauses
);
2658 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2661 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2666 stmt
= gfc_trans_code (code
);
2667 if (TREE_CODE (stmt
) != BIND_EXPR
)
2669 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2671 tree block
= poplevel (1, 0);
2672 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
2682 /* Trans OpenACC directives. */
2683 /* parallel, kernels, data and host_data. */
2685 gfc_trans_oacc_construct (gfc_code
*code
)
2688 tree stmt
, oacc_clauses
;
2689 enum tree_code construct_code
;
2693 case EXEC_OACC_PARALLEL
:
2694 construct_code
= OACC_PARALLEL
;
2696 case EXEC_OACC_KERNELS
:
2697 construct_code
= OACC_KERNELS
;
2699 case EXEC_OACC_DATA
:
2700 construct_code
= OACC_DATA
;
2702 case EXEC_OACC_HOST_DATA
:
2703 construct_code
= OACC_HOST_DATA
;
2709 gfc_start_block (&block
);
2710 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2712 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
2713 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
2715 gfc_add_expr_to_block (&block
, stmt
);
2716 return gfc_finish_block (&block
);
2719 /* update, enter_data, exit_data, cache. */
2721 gfc_trans_oacc_executable_directive (gfc_code
*code
)
2724 tree stmt
, oacc_clauses
;
2725 enum tree_code construct_code
;
2729 case EXEC_OACC_UPDATE
:
2730 construct_code
= OACC_UPDATE
;
2732 case EXEC_OACC_ENTER_DATA
:
2733 construct_code
= OACC_ENTER_DATA
;
2735 case EXEC_OACC_EXIT_DATA
:
2736 construct_code
= OACC_EXIT_DATA
;
2738 case EXEC_OACC_CACHE
:
2739 construct_code
= OACC_CACHE
;
2745 gfc_start_block (&block
);
2746 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2748 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
2750 gfc_add_expr_to_block (&block
, stmt
);
2751 return gfc_finish_block (&block
);
2755 gfc_trans_oacc_wait_directive (gfc_code
*code
)
2759 vec
<tree
, va_gc
> *args
;
2762 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
2763 location_t loc
= input_location
;
2765 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2768 vec_alloc (args
, nparms
+ 2);
2769 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
2771 gfc_start_block (&block
);
2773 if (clauses
->async_expr
)
2774 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
2776 t
= build_int_cst (integer_type_node
, -2);
2778 args
->quick_push (t
);
2779 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
2781 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2782 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
2784 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
2785 gfc_add_expr_to_block (&block
, stmt
);
2789 return gfc_finish_block (&block
);
2792 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
2793 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
2796 gfc_trans_omp_atomic (gfc_code
*code
)
2798 gfc_code
*atomic_code
= code
;
2802 gfc_expr
*expr2
, *e
;
2805 tree lhsaddr
, type
, rhs
, x
;
2806 enum tree_code op
= ERROR_MARK
;
2807 enum tree_code aop
= OMP_ATOMIC
;
2808 bool var_on_left
= false;
2809 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
2811 code
= code
->block
->next
;
2812 gcc_assert (code
->op
== EXEC_ASSIGN
);
2813 var
= code
->expr1
->symtree
->n
.sym
;
2815 gfc_init_se (&lse
, NULL
);
2816 gfc_init_se (&rse
, NULL
);
2817 gfc_init_se (&vse
, NULL
);
2818 gfc_start_block (&block
);
2820 expr2
= code
->expr2
;
2821 if (expr2
->expr_type
== EXPR_FUNCTION
2822 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2823 expr2
= expr2
->value
.function
.actual
->expr
;
2825 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2827 case GFC_OMP_ATOMIC_READ
:
2828 gfc_conv_expr (&vse
, code
->expr1
);
2829 gfc_add_block_to_block (&block
, &vse
.pre
);
2831 gfc_conv_expr (&lse
, expr2
);
2832 gfc_add_block_to_block (&block
, &lse
.pre
);
2833 type
= TREE_TYPE (lse
.expr
);
2834 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2836 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
2837 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2838 x
= convert (TREE_TYPE (vse
.expr
), x
);
2839 gfc_add_modify (&block
, vse
.expr
, x
);
2841 gfc_add_block_to_block (&block
, &lse
.pre
);
2842 gfc_add_block_to_block (&block
, &rse
.pre
);
2844 return gfc_finish_block (&block
);
2845 case GFC_OMP_ATOMIC_CAPTURE
:
2846 aop
= OMP_ATOMIC_CAPTURE_NEW
;
2847 if (expr2
->expr_type
== EXPR_VARIABLE
)
2849 aop
= OMP_ATOMIC_CAPTURE_OLD
;
2850 gfc_conv_expr (&vse
, code
->expr1
);
2851 gfc_add_block_to_block (&block
, &vse
.pre
);
2853 gfc_conv_expr (&lse
, expr2
);
2854 gfc_add_block_to_block (&block
, &lse
.pre
);
2855 gfc_init_se (&lse
, NULL
);
2857 var
= code
->expr1
->symtree
->n
.sym
;
2858 expr2
= code
->expr2
;
2859 if (expr2
->expr_type
== EXPR_FUNCTION
2860 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2861 expr2
= expr2
->value
.function
.actual
->expr
;
2868 gfc_conv_expr (&lse
, code
->expr1
);
2869 gfc_add_block_to_block (&block
, &lse
.pre
);
2870 type
= TREE_TYPE (lse
.expr
);
2871 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2873 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2874 == GFC_OMP_ATOMIC_WRITE
)
2875 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2877 gfc_conv_expr (&rse
, expr2
);
2878 gfc_add_block_to_block (&block
, &rse
.pre
);
2880 else if (expr2
->expr_type
== EXPR_OP
)
2883 switch (expr2
->value
.op
.op
)
2885 case INTRINSIC_PLUS
:
2888 case INTRINSIC_TIMES
:
2891 case INTRINSIC_MINUS
:
2894 case INTRINSIC_DIVIDE
:
2895 if (expr2
->ts
.type
== BT_INTEGER
)
2896 op
= TRUNC_DIV_EXPR
;
2901 op
= TRUTH_ANDIF_EXPR
;
2904 op
= TRUTH_ORIF_EXPR
;
2909 case INTRINSIC_NEQV
:
2915 e
= expr2
->value
.op
.op1
;
2916 if (e
->expr_type
== EXPR_FUNCTION
2917 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2918 e
= e
->value
.function
.actual
->expr
;
2919 if (e
->expr_type
== EXPR_VARIABLE
2920 && e
->symtree
!= NULL
2921 && e
->symtree
->n
.sym
== var
)
2923 expr2
= expr2
->value
.op
.op2
;
2928 e
= expr2
->value
.op
.op2
;
2929 if (e
->expr_type
== EXPR_FUNCTION
2930 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2931 e
= e
->value
.function
.actual
->expr
;
2932 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2933 && e
->symtree
!= NULL
2934 && e
->symtree
->n
.sym
== var
);
2935 expr2
= expr2
->value
.op
.op1
;
2936 var_on_left
= false;
2938 gfc_conv_expr (&rse
, expr2
);
2939 gfc_add_block_to_block (&block
, &rse
.pre
);
2943 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
2944 switch (expr2
->value
.function
.isym
->id
)
2964 e
= expr2
->value
.function
.actual
->expr
;
2965 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2966 && e
->symtree
!= NULL
2967 && e
->symtree
->n
.sym
== var
);
2969 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
2970 gfc_add_block_to_block (&block
, &rse
.pre
);
2971 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
2973 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
2974 gfc_actual_arglist
*arg
;
2976 gfc_add_modify (&block
, accum
, rse
.expr
);
2977 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
2980 gfc_init_block (&rse
.pre
);
2981 gfc_conv_expr (&rse
, arg
->expr
);
2982 gfc_add_block_to_block (&block
, &rse
.pre
);
2983 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
2985 gfc_add_modify (&block
, accum
, x
);
2991 expr2
= expr2
->value
.function
.actual
->next
->expr
;
2994 lhsaddr
= save_expr (lhsaddr
);
2995 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
2996 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
2997 || TREE_CODE (TREE_OPERAND (lhsaddr
, 0)) != VAR_DECL
))
2999 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3000 it even after unsharing function body. */
3001 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3002 DECL_CONTEXT (var
) = current_function_decl
;
3003 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3004 NULL_TREE
, NULL_TREE
);
3007 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3009 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3010 == GFC_OMP_ATOMIC_WRITE
)
3011 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3015 x
= convert (TREE_TYPE (rhs
),
3016 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3018 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3020 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3023 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3024 && TREE_CODE (type
) != COMPLEX_TYPE
)
3025 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3026 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3028 gfc_add_block_to_block (&block
, &lse
.pre
);
3029 gfc_add_block_to_block (&block
, &rse
.pre
);
3031 if (aop
== OMP_ATOMIC
)
3033 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3034 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3035 gfc_add_expr_to_block (&block
, x
);
3039 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3042 expr2
= code
->expr2
;
3043 if (expr2
->expr_type
== EXPR_FUNCTION
3044 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3045 expr2
= expr2
->value
.function
.actual
->expr
;
3047 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3048 gfc_conv_expr (&vse
, code
->expr1
);
3049 gfc_add_block_to_block (&block
, &vse
.pre
);
3051 gfc_init_se (&lse
, NULL
);
3052 gfc_conv_expr (&lse
, expr2
);
3053 gfc_add_block_to_block (&block
, &lse
.pre
);
3055 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3056 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3057 x
= convert (TREE_TYPE (vse
.expr
), x
);
3058 gfc_add_modify (&block
, vse
.expr
, x
);
3061 return gfc_finish_block (&block
);
3065 gfc_trans_omp_barrier (void)
3067 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3068 return build_call_expr_loc (input_location
, decl
, 0);
3072 gfc_trans_omp_cancel (gfc_code
*code
)
3075 tree ifc
= boolean_true_node
;
3077 switch (code
->ext
.omp_clauses
->cancel
)
3079 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3080 case OMP_CANCEL_DO
: mask
= 2; break;
3081 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3082 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3083 default: gcc_unreachable ();
3085 gfc_start_block (&block
);
3086 if (code
->ext
.omp_clauses
->if_expr
)
3091 gfc_init_se (&se
, NULL
);
3092 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3093 gfc_add_block_to_block (&block
, &se
.pre
);
3094 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3095 gfc_add_block_to_block (&block
, &se
.post
);
3096 tree type
= TREE_TYPE (if_var
);
3097 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3098 boolean_type_node
, if_var
,
3099 build_zero_cst (type
));
3101 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3102 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3103 ifc
= fold_convert (c_bool_type
, ifc
);
3104 gfc_add_expr_to_block (&block
,
3105 build_call_expr_loc (input_location
, decl
, 2,
3106 build_int_cst (integer_type_node
,
3108 return gfc_finish_block (&block
);
3112 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3115 switch (code
->ext
.omp_clauses
->cancel
)
3117 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3118 case OMP_CANCEL_DO
: mask
= 2; break;
3119 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3120 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3121 default: gcc_unreachable ();
3123 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3124 return build_call_expr_loc (input_location
, decl
, 1,
3125 build_int_cst (integer_type_node
, mask
));
3129 gfc_trans_omp_critical (gfc_code
*code
)
3131 tree name
= NULL_TREE
, stmt
;
3132 if (code
->ext
.omp_name
!= NULL
)
3133 name
= get_identifier (code
->ext
.omp_name
);
3134 stmt
= gfc_trans_code (code
->block
->next
);
3135 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
3139 typedef struct dovar_init_d
{
3146 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3147 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3150 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
3151 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3154 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3155 int i
, collapse
= clauses
->collapse
;
3156 vec
<dovar_init
> inits
= vNULL
;
3163 code
= code
->block
->next
;
3164 gcc_assert (code
->op
== EXEC_DO
);
3166 init
= make_tree_vec (collapse
);
3167 cond
= make_tree_vec (collapse
);
3168 incr
= make_tree_vec (collapse
);
3172 gfc_start_block (&block
);
3176 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3178 for (i
= 0; i
< collapse
; i
++)
3181 int dovar_found
= 0;
3186 gfc_omp_namelist
*n
= NULL
;
3187 if (op
!= EXEC_OMP_DISTRIBUTE
)
3188 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3189 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3190 n
!= NULL
; n
= n
->next
)
3191 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3195 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3196 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3197 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3203 /* Evaluate all the expressions in the iterator. */
3204 gfc_init_se (&se
, NULL
);
3205 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3206 gfc_add_block_to_block (pblock
, &se
.pre
);
3208 type
= TREE_TYPE (dovar
);
3209 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3211 gfc_init_se (&se
, NULL
);
3212 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3213 gfc_add_block_to_block (pblock
, &se
.pre
);
3214 from
= gfc_evaluate_now (se
.expr
, pblock
);
3216 gfc_init_se (&se
, NULL
);
3217 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3218 gfc_add_block_to_block (pblock
, &se
.pre
);
3219 to
= gfc_evaluate_now (se
.expr
, pblock
);
3221 gfc_init_se (&se
, NULL
);
3222 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3223 gfc_add_block_to_block (pblock
, &se
.pre
);
3224 step
= gfc_evaluate_now (se
.expr
, pblock
);
3227 /* Special case simple loops. */
3228 if (TREE_CODE (dovar
) == VAR_DECL
)
3230 if (integer_onep (step
))
3232 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3237 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3243 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3244 /* The condition should not be folded. */
3245 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3246 ? LE_EXPR
: GE_EXPR
,
3247 boolean_type_node
, dovar
, to
);
3248 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3250 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3253 TREE_VEC_ELT (incr
, i
));
3257 /* STEP is not 1 or -1. Use:
3258 for (count = 0; count < (to + step - from) / step; count++)
3260 dovar = from + count * step;
3264 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3265 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3266 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3268 tmp
= gfc_evaluate_now (tmp
, pblock
);
3269 count
= gfc_create_var (type
, "count");
3270 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3271 build_int_cst (type
, 0));
3272 /* The condition should not be folded. */
3273 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3276 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3278 build_int_cst (type
, 1));
3279 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3280 MODIFY_EXPR
, type
, count
,
3281 TREE_VEC_ELT (incr
, i
));
3283 /* Initialize DOVAR. */
3284 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3285 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3286 dovar_init e
= {dovar
, tmp
};
3287 inits
.safe_push (e
);
3290 if (dovar_found
== 2
3291 && op
== EXEC_OMP_SIMD
3295 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3296 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3297 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3299 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3305 if (op
== EXEC_OMP_SIMD
)
3309 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3310 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3311 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3314 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3319 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3320 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3321 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3323 if (dovar_found
== 2)
3330 /* If dovar is lastprivate, but different counter is used,
3331 dovar += step needs to be added to
3332 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3333 will have the value on entry of the last loop, rather
3334 than value after iterator increment. */
3335 tmp
= gfc_evaluate_now (step
, pblock
);
3336 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
3338 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3340 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3341 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3342 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3344 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3347 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3348 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3350 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3354 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3356 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3357 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3358 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3360 tree l
= build_omp_clause (input_location
,
3361 OMP_CLAUSE_LASTPRIVATE
);
3362 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3363 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3364 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3366 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3370 gcc_assert (simple
|| c
!= NULL
);
3374 if (op
!= EXEC_OMP_SIMD
)
3375 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3376 else if (collapse
== 1)
3378 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3379 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3380 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3381 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3384 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3385 OMP_CLAUSE_DECL (tmp
) = count
;
3386 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3389 if (i
+ 1 < collapse
)
3390 code
= code
->block
->next
;
3393 if (pblock
!= &block
)
3396 gfc_start_block (&block
);
3399 gfc_start_block (&body
);
3401 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3402 gfc_add_modify (&body
, di
->var
, di
->init
);
3405 /* Cycle statement is implemented with a goto. Exit statement must not be
3406 present for this loop. */
3407 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3409 /* Put these labels where they can be found later. */
3411 code
->cycle_label
= cycle_label
;
3412 code
->exit_label
= NULL_TREE
;
3414 /* Main loop body. */
3415 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3416 gfc_add_expr_to_block (&body
, tmp
);
3418 /* Label for cycle statements (if needed). */
3419 if (TREE_USED (cycle_label
))
3421 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3422 gfc_add_expr_to_block (&body
, tmp
);
3425 /* End of loop body. */
3428 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3429 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3430 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3431 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3432 default: gcc_unreachable ();
3435 TREE_TYPE (stmt
) = void_type_node
;
3436 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3437 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3438 OMP_FOR_INIT (stmt
) = init
;
3439 OMP_FOR_COND (stmt
) = cond
;
3440 OMP_FOR_INCR (stmt
) = incr
;
3441 gfc_add_expr_to_block (&block
, stmt
);
3443 return gfc_finish_block (&block
);
3446 /* parallel loop and kernels loop. */
3448 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3450 stmtblock_t block
, *pblock
= NULL
;
3451 gfc_omp_clauses construct_clauses
, loop_clauses
;
3452 tree stmt
, oacc_clauses
= NULL_TREE
;
3453 enum tree_code construct_code
;
3457 case EXEC_OACC_PARALLEL_LOOP
:
3458 construct_code
= OACC_PARALLEL
;
3460 case EXEC_OACC_KERNELS_LOOP
:
3461 construct_code
= OACC_KERNELS
;
3467 gfc_start_block (&block
);
3469 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3470 if (code
->ext
.omp_clauses
!= NULL
)
3472 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3473 sizeof (construct_clauses
));
3474 loop_clauses
.collapse
= construct_clauses
.collapse
;
3475 loop_clauses
.gang
= construct_clauses
.gang
;
3476 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
3477 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
3478 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
3479 loop_clauses
.vector
= construct_clauses
.vector
;
3480 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
3481 loop_clauses
.worker
= construct_clauses
.worker
;
3482 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
3483 loop_clauses
.seq
= construct_clauses
.seq
;
3484 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
3485 loop_clauses
.independent
= construct_clauses
.independent
;
3486 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
3487 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
3488 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
3489 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
3490 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
3491 construct_clauses
.gang
= false;
3492 construct_clauses
.gang_static
= false;
3493 construct_clauses
.gang_num_expr
= NULL
;
3494 construct_clauses
.gang_static_expr
= NULL
;
3495 construct_clauses
.vector
= false;
3496 construct_clauses
.vector_expr
= NULL
;
3497 construct_clauses
.worker
= false;
3498 construct_clauses
.worker_expr
= NULL
;
3499 construct_clauses
.seq
= false;
3500 construct_clauses
.par_auto
= false;
3501 construct_clauses
.independent
= false;
3502 construct_clauses
.independent
= false;
3503 construct_clauses
.tile_list
= NULL
;
3504 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
3505 if (construct_code
== OACC_KERNELS
)
3506 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
3507 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3510 if (!loop_clauses
.seq
)
3514 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3515 if (TREE_CODE (stmt
) != BIND_EXPR
)
3516 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3519 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3521 gfc_add_expr_to_block (&block
, stmt
);
3522 return gfc_finish_block (&block
);
3526 gfc_trans_omp_flush (void)
3528 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3529 return build_call_expr_loc (input_location
, decl
, 0);
3533 gfc_trans_omp_master (gfc_code
*code
)
3535 tree stmt
= gfc_trans_code (code
->block
->next
);
3536 if (IS_EMPTY_STMT (stmt
))
3538 return build1_v (OMP_MASTER
, stmt
);
3542 gfc_trans_omp_ordered (gfc_code
*code
)
3544 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
3545 gfc_trans_code (code
->block
->next
), NULL_TREE
);
3549 gfc_trans_omp_parallel (gfc_code
*code
)
3552 tree stmt
, omp_clauses
;
3554 gfc_start_block (&block
);
3555 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3558 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3559 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3560 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3562 gfc_add_expr_to_block (&block
, stmt
);
3563 return gfc_finish_block (&block
);
3570 GFC_OMP_SPLIT_PARALLEL
,
3571 GFC_OMP_SPLIT_DISTRIBUTE
,
3572 GFC_OMP_SPLIT_TEAMS
,
3573 GFC_OMP_SPLIT_TARGET
,
3579 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3580 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3581 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3582 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3583 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3584 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
)
3588 gfc_split_omp_clauses (gfc_code
*code
,
3589 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3591 int mask
= 0, innermost
= 0;
3592 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3595 case EXEC_OMP_DISTRIBUTE
:
3596 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3598 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3599 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3600 innermost
= GFC_OMP_SPLIT_DO
;
3602 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3603 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3604 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3605 innermost
= GFC_OMP_SPLIT_SIMD
;
3607 case EXEC_OMP_DISTRIBUTE_SIMD
:
3608 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3609 innermost
= GFC_OMP_SPLIT_SIMD
;
3612 innermost
= GFC_OMP_SPLIT_DO
;
3614 case EXEC_OMP_DO_SIMD
:
3615 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3616 innermost
= GFC_OMP_SPLIT_SIMD
;
3618 case EXEC_OMP_PARALLEL
:
3619 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3621 case EXEC_OMP_PARALLEL_DO
:
3622 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3623 innermost
= GFC_OMP_SPLIT_DO
;
3625 case EXEC_OMP_PARALLEL_DO_SIMD
:
3626 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3627 innermost
= GFC_OMP_SPLIT_SIMD
;
3630 innermost
= GFC_OMP_SPLIT_SIMD
;
3632 case EXEC_OMP_TARGET
:
3633 innermost
= GFC_OMP_SPLIT_TARGET
;
3635 case EXEC_OMP_TARGET_TEAMS
:
3636 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
3637 innermost
= GFC_OMP_SPLIT_TEAMS
;
3639 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3640 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3641 | GFC_OMP_MASK_DISTRIBUTE
;
3642 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3644 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3645 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3646 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3647 innermost
= GFC_OMP_SPLIT_DO
;
3649 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3650 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3651 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3652 innermost
= GFC_OMP_SPLIT_SIMD
;
3654 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3655 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3656 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3657 innermost
= GFC_OMP_SPLIT_SIMD
;
3659 case EXEC_OMP_TEAMS
:
3660 innermost
= GFC_OMP_SPLIT_TEAMS
;
3662 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3663 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
3664 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3666 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3667 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3668 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3669 innermost
= GFC_OMP_SPLIT_DO
;
3671 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3672 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3673 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3674 innermost
= GFC_OMP_SPLIT_SIMD
;
3676 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3677 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3678 innermost
= GFC_OMP_SPLIT_SIMD
;
3685 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
3688 if (code
->ext
.omp_clauses
!= NULL
)
3690 if (mask
& GFC_OMP_MASK_TARGET
)
3692 /* First the clauses that are unique to some constructs. */
3693 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
3694 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
3695 clausesa
[GFC_OMP_SPLIT_TARGET
].device
3696 = code
->ext
.omp_clauses
->device
;
3698 if (mask
& GFC_OMP_MASK_TEAMS
)
3700 /* First the clauses that are unique to some constructs. */
3701 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
3702 = code
->ext
.omp_clauses
->num_teams
;
3703 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
3704 = code
->ext
.omp_clauses
->thread_limit
;
3705 /* Shared and default clauses are allowed on parallel and teams. */
3706 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
3707 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3708 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
3709 = code
->ext
.omp_clauses
->default_sharing
;
3711 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3713 /* First the clauses that are unique to some constructs. */
3714 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
3715 = code
->ext
.omp_clauses
->dist_sched_kind
;
3716 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
3717 = code
->ext
.omp_clauses
->dist_chunk_size
;
3718 /* Duplicate collapse. */
3719 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
3720 = code
->ext
.omp_clauses
->collapse
;
3722 if (mask
& GFC_OMP_MASK_PARALLEL
)
3724 /* First the clauses that are unique to some constructs. */
3725 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
3726 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
3727 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
3728 = code
->ext
.omp_clauses
->num_threads
;
3729 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
3730 = code
->ext
.omp_clauses
->proc_bind
;
3731 /* Shared and default clauses are allowed on parallel and teams. */
3732 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
3733 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3734 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
3735 = code
->ext
.omp_clauses
->default_sharing
;
3737 if (mask
& GFC_OMP_MASK_DO
)
3739 /* First the clauses that are unique to some constructs. */
3740 clausesa
[GFC_OMP_SPLIT_DO
].ordered
3741 = code
->ext
.omp_clauses
->ordered
;
3742 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
3743 = code
->ext
.omp_clauses
->sched_kind
;
3744 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
3745 = code
->ext
.omp_clauses
->chunk_size
;
3746 clausesa
[GFC_OMP_SPLIT_DO
].nowait
3747 = code
->ext
.omp_clauses
->nowait
;
3748 /* Duplicate collapse. */
3749 clausesa
[GFC_OMP_SPLIT_DO
].collapse
3750 = code
->ext
.omp_clauses
->collapse
;
3752 if (mask
& GFC_OMP_MASK_SIMD
)
3754 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
3755 = code
->ext
.omp_clauses
->safelen_expr
;
3756 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LINEAR
]
3757 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
3758 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
3759 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
3760 /* Duplicate collapse. */
3761 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
3762 = code
->ext
.omp_clauses
->collapse
;
3764 /* Private clause is supported on all constructs but target,
3765 it is enough to put it on the innermost one. For
3766 !$ omp do put it on parallel though,
3767 as that's what we did for OpenMP 3.1. */
3768 clausesa
[innermost
== GFC_OMP_SPLIT_DO
3769 ? (int) GFC_OMP_SPLIT_PARALLEL
3770 : innermost
].lists
[OMP_LIST_PRIVATE
]
3771 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
3772 /* Firstprivate clause is supported on all constructs but
3773 target and simd. Put it on the outermost of those and
3774 duplicate on parallel. */
3775 if (mask
& GFC_OMP_MASK_TEAMS
)
3776 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
3777 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3778 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3779 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
3780 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3781 if (mask
& GFC_OMP_MASK_PARALLEL
)
3782 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
3783 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3784 else if (mask
& GFC_OMP_MASK_DO
)
3785 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
3786 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3787 /* Lastprivate is allowed on do and simd. In
3788 parallel do{, simd} we actually want to put it on
3789 parallel rather than do. */
3790 if (mask
& GFC_OMP_MASK_PARALLEL
)
3791 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
3792 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3793 else if (mask
& GFC_OMP_MASK_DO
)
3794 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
3795 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3796 if (mask
& GFC_OMP_MASK_SIMD
)
3797 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
3798 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3799 /* Reduction is allowed on simd, do, parallel and teams.
3800 Duplicate it on all of them, but omit on do if
3801 parallel is present. */
3802 if (mask
& GFC_OMP_MASK_TEAMS
)
3803 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
3804 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3805 if (mask
& GFC_OMP_MASK_PARALLEL
)
3806 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
3807 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3808 else if (mask
& GFC_OMP_MASK_DO
)
3809 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
3810 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3811 if (mask
& GFC_OMP_MASK_SIMD
)
3812 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
3813 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3814 /* FIXME: This is currently being discussed. */
3815 if (mask
& GFC_OMP_MASK_PARALLEL
)
3816 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
3817 = code
->ext
.omp_clauses
->if_expr
;
3819 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
3820 = code
->ext
.omp_clauses
->if_expr
;
3822 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3823 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3824 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
3828 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3829 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
3832 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3833 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
3836 gfc_start_block (&block
);
3838 gfc_init_block (&block
);
3840 if (clausesa
== NULL
)
3842 clausesa
= clausesa_buf
;
3843 gfc_split_omp_clauses (code
, clausesa
);
3847 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
3848 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
3849 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
3852 if (TREE_CODE (body
) != BIND_EXPR
)
3853 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
3857 else if (TREE_CODE (body
) != BIND_EXPR
)
3858 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
3861 stmt
= make_node (OMP_FOR
);
3862 TREE_TYPE (stmt
) = void_type_node
;
3863 OMP_FOR_BODY (stmt
) = body
;
3864 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
3868 gfc_add_expr_to_block (&block
, stmt
);
3869 return gfc_finish_block (&block
);
3873 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
3874 gfc_omp_clauses
*clausesa
)
3876 stmtblock_t block
, *new_pblock
= pblock
;
3877 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3878 tree stmt
, omp_clauses
= NULL_TREE
;
3881 gfc_start_block (&block
);
3883 gfc_init_block (&block
);
3885 if (clausesa
== NULL
)
3887 clausesa
= clausesa_buf
;
3888 gfc_split_omp_clauses (code
, clausesa
);
3891 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3895 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
3896 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
3897 new_pblock
= &block
;
3901 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
3902 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
3905 if (TREE_CODE (stmt
) != BIND_EXPR
)
3906 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3910 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3911 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3912 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3914 OMP_PARALLEL_COMBINED (stmt
) = 1;
3915 gfc_add_expr_to_block (&block
, stmt
);
3916 return gfc_finish_block (&block
);
3920 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3921 gfc_omp_clauses
*clausesa
)
3924 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3925 tree stmt
, omp_clauses
= NULL_TREE
;
3928 gfc_start_block (&block
);
3930 gfc_init_block (&block
);
3932 if (clausesa
== NULL
)
3934 clausesa
= clausesa_buf
;
3935 gfc_split_omp_clauses (code
, clausesa
);
3939 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3943 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
3946 if (TREE_CODE (stmt
) != BIND_EXPR
)
3947 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3951 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3952 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3955 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3957 OMP_PARALLEL_COMBINED (stmt
) = 1;
3959 gfc_add_expr_to_block (&block
, stmt
);
3960 return gfc_finish_block (&block
);
3964 gfc_trans_omp_parallel_sections (gfc_code
*code
)
3967 gfc_omp_clauses section_clauses
;
3968 tree stmt
, omp_clauses
;
3970 memset (§ion_clauses
, 0, sizeof (section_clauses
));
3971 section_clauses
.nowait
= true;
3973 gfc_start_block (&block
);
3974 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3977 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
3978 if (TREE_CODE (stmt
) != BIND_EXPR
)
3979 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3982 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3984 OMP_PARALLEL_COMBINED (stmt
) = 1;
3985 gfc_add_expr_to_block (&block
, stmt
);
3986 return gfc_finish_block (&block
);
3990 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
3993 gfc_omp_clauses workshare_clauses
;
3994 tree stmt
, omp_clauses
;
3996 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
3997 workshare_clauses
.nowait
= true;
3999 gfc_start_block (&block
);
4000 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4003 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
4004 if (TREE_CODE (stmt
) != BIND_EXPR
)
4005 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4008 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4010 OMP_PARALLEL_COMBINED (stmt
) = 1;
4011 gfc_add_expr_to_block (&block
, stmt
);
4012 return gfc_finish_block (&block
);
4016 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4018 stmtblock_t block
, body
;
4019 tree omp_clauses
, stmt
;
4020 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
4022 gfc_start_block (&block
);
4024 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
4026 gfc_init_block (&body
);
4027 for (code
= code
->block
; code
; code
= code
->block
)
4029 /* Last section is special because of lastprivate, so even if it
4030 is empty, chain it in. */
4031 stmt
= gfc_trans_omp_code (code
->next
,
4032 has_lastprivate
&& code
->block
== NULL
);
4033 if (! IS_EMPTY_STMT (stmt
))
4035 stmt
= build1_v (OMP_SECTION
, stmt
);
4036 gfc_add_expr_to_block (&body
, stmt
);
4039 stmt
= gfc_finish_block (&body
);
4041 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
4043 gfc_add_expr_to_block (&block
, stmt
);
4045 return gfc_finish_block (&block
);
4049 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4051 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4052 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4053 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4059 gfc_trans_omp_task (gfc_code
*code
)
4062 tree stmt
, omp_clauses
;
4064 gfc_start_block (&block
);
4065 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4068 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4069 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4070 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4072 gfc_add_expr_to_block (&block
, stmt
);
4073 return gfc_finish_block (&block
);
4077 gfc_trans_omp_taskgroup (gfc_code
*code
)
4079 tree stmt
= gfc_trans_code (code
->block
->next
);
4080 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4084 gfc_trans_omp_taskwait (void)
4086 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4087 return build_call_expr_loc (input_location
, decl
, 0);
4091 gfc_trans_omp_taskyield (void)
4093 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4094 return build_call_expr_loc (input_location
, decl
, 0);
4098 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4101 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4102 tree stmt
, omp_clauses
= NULL_TREE
;
4104 gfc_start_block (&block
);
4105 if (clausesa
== NULL
)
4107 clausesa
= clausesa_buf
;
4108 gfc_split_omp_clauses (code
, clausesa
);
4112 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4116 case EXEC_OMP_DISTRIBUTE
:
4117 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4118 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4119 /* This is handled in gfc_trans_omp_do. */
4122 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4123 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4124 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4125 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4126 if (TREE_CODE (stmt
) != BIND_EXPR
)
4127 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4131 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4132 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4133 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4134 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4135 if (TREE_CODE (stmt
) != BIND_EXPR
)
4136 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4140 case EXEC_OMP_DISTRIBUTE_SIMD
:
4141 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4142 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4143 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4144 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4145 if (TREE_CODE (stmt
) != BIND_EXPR
)
4146 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4155 tree distribute
= make_node (OMP_DISTRIBUTE
);
4156 TREE_TYPE (distribute
) = void_type_node
;
4157 OMP_FOR_BODY (distribute
) = stmt
;
4158 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4161 gfc_add_expr_to_block (&block
, stmt
);
4162 return gfc_finish_block (&block
);
4166 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4169 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4170 tree stmt
, omp_clauses
= NULL_TREE
;
4171 bool combined
= true;
4173 gfc_start_block (&block
);
4174 if (clausesa
== NULL
)
4176 clausesa
= clausesa_buf
;
4177 gfc_split_omp_clauses (code
, clausesa
);
4181 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4185 case EXEC_OMP_TARGET_TEAMS
:
4186 case EXEC_OMP_TEAMS
:
4187 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4190 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4191 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4192 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4193 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4197 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4200 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4203 OMP_TEAMS_COMBINED (stmt
) = 1;
4204 gfc_add_expr_to_block (&block
, stmt
);
4205 return gfc_finish_block (&block
);
4209 gfc_trans_omp_target (gfc_code
*code
)
4212 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4213 tree stmt
, omp_clauses
= NULL_TREE
;
4215 gfc_start_block (&block
);
4216 gfc_split_omp_clauses (code
, clausesa
);
4219 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4221 if (code
->op
== EXEC_OMP_TARGET
)
4224 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4225 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4230 stmt
= gfc_trans_omp_teams (code
, clausesa
);
4231 if (TREE_CODE (stmt
) != BIND_EXPR
)
4232 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4237 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4239 gfc_add_expr_to_block (&block
, stmt
);
4240 return gfc_finish_block (&block
);
4244 gfc_trans_omp_target_data (gfc_code
*code
)
4247 tree stmt
, omp_clauses
;
4249 gfc_start_block (&block
);
4250 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4252 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4253 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4255 gfc_add_expr_to_block (&block
, stmt
);
4256 return gfc_finish_block (&block
);
4260 gfc_trans_omp_target_update (gfc_code
*code
)
4263 tree stmt
, omp_clauses
;
4265 gfc_start_block (&block
);
4266 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4268 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4270 gfc_add_expr_to_block (&block
, stmt
);
4271 return gfc_finish_block (&block
);
4275 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4277 tree res
, tmp
, stmt
;
4278 stmtblock_t block
, *pblock
= NULL
;
4279 stmtblock_t singleblock
;
4280 int saved_ompws_flags
;
4281 bool singleblock_in_progress
= false;
4282 /* True if previous gfc_code in workshare construct is not workshared. */
4283 bool prev_singleunit
;
4285 code
= code
->block
->next
;
4289 gfc_start_block (&block
);
4292 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4293 prev_singleunit
= false;
4295 /* Translate statements one by one to trees until we reach
4296 the end of the workshare construct. Adjacent gfc_codes that
4297 are a single unit of work are clustered and encapsulated in a
4298 single OMP_SINGLE construct. */
4299 for (; code
; code
= code
->next
)
4301 if (code
->here
!= 0)
4303 res
= gfc_trans_label_here (code
);
4304 gfc_add_expr_to_block (pblock
, res
);
4307 /* No dependence analysis, use for clauses with wait.
4308 If this is the last gfc_code, use default omp_clauses. */
4309 if (code
->next
== NULL
&& clauses
->nowait
)
4310 ompws_flags
|= OMPWS_NOWAIT
;
4312 /* By default, every gfc_code is a single unit of work. */
4313 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4314 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
4323 res
= gfc_trans_assign (code
);
4326 case EXEC_POINTER_ASSIGN
:
4327 res
= gfc_trans_pointer_assign (code
);
4330 case EXEC_INIT_ASSIGN
:
4331 res
= gfc_trans_init_assign (code
);
4335 res
= gfc_trans_forall (code
);
4339 res
= gfc_trans_where (code
);
4342 case EXEC_OMP_ATOMIC
:
4343 res
= gfc_trans_omp_directive (code
);
4346 case EXEC_OMP_PARALLEL
:
4347 case EXEC_OMP_PARALLEL_DO
:
4348 case EXEC_OMP_PARALLEL_SECTIONS
:
4349 case EXEC_OMP_PARALLEL_WORKSHARE
:
4350 case EXEC_OMP_CRITICAL
:
4351 saved_ompws_flags
= ompws_flags
;
4353 res
= gfc_trans_omp_directive (code
);
4354 ompws_flags
= saved_ompws_flags
;
4358 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4361 gfc_set_backend_locus (&code
->loc
);
4363 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4365 if (prev_singleunit
)
4367 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4368 /* Add current gfc_code to single block. */
4369 gfc_add_expr_to_block (&singleblock
, res
);
4372 /* Finish single block and add it to pblock. */
4373 tmp
= gfc_finish_block (&singleblock
);
4374 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4375 void_type_node
, tmp
, NULL_TREE
);
4376 gfc_add_expr_to_block (pblock
, tmp
);
4377 /* Add current gfc_code to pblock. */
4378 gfc_add_expr_to_block (pblock
, res
);
4379 singleblock_in_progress
= false;
4384 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4386 /* Start single block. */
4387 gfc_init_block (&singleblock
);
4388 gfc_add_expr_to_block (&singleblock
, res
);
4389 singleblock_in_progress
= true;
4392 /* Add the new statement to the block. */
4393 gfc_add_expr_to_block (pblock
, res
);
4395 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
4399 /* Finish remaining SINGLE block, if we were in the middle of one. */
4400 if (singleblock_in_progress
)
4402 /* Finish single block and add it to pblock. */
4403 tmp
= gfc_finish_block (&singleblock
);
4404 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
4406 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
4408 gfc_add_expr_to_block (pblock
, tmp
);
4411 stmt
= gfc_finish_block (pblock
);
4412 if (TREE_CODE (stmt
) != BIND_EXPR
)
4414 if (!IS_EMPTY_STMT (stmt
))
4416 tree bindblock
= poplevel (1, 0);
4417 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
4425 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
4426 stmt
= gfc_trans_omp_barrier ();
4433 gfc_trans_oacc_declare (gfc_code
*code
)
4436 tree stmt
, oacc_clauses
;
4437 enum tree_code construct_code
;
4439 construct_code
= OACC_DATA
;
4441 gfc_start_block (&block
);
4443 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
4445 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4446 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
4448 gfc_add_expr_to_block (&block
, stmt
);
4450 return gfc_finish_block (&block
);
4454 gfc_trans_oacc_directive (gfc_code
*code
)
4458 case EXEC_OACC_PARALLEL_LOOP
:
4459 case EXEC_OACC_KERNELS_LOOP
:
4460 return gfc_trans_oacc_combined_directive (code
);
4461 case EXEC_OACC_PARALLEL
:
4462 case EXEC_OACC_KERNELS
:
4463 case EXEC_OACC_DATA
:
4464 case EXEC_OACC_HOST_DATA
:
4465 return gfc_trans_oacc_construct (code
);
4466 case EXEC_OACC_LOOP
:
4467 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4469 case EXEC_OACC_UPDATE
:
4470 case EXEC_OACC_CACHE
:
4471 case EXEC_OACC_ENTER_DATA
:
4472 case EXEC_OACC_EXIT_DATA
:
4473 return gfc_trans_oacc_executable_directive (code
);
4474 case EXEC_OACC_WAIT
:
4475 return gfc_trans_oacc_wait_directive (code
);
4476 case EXEC_OACC_ATOMIC
:
4477 return gfc_trans_omp_atomic (code
);
4478 case EXEC_OACC_DECLARE
:
4479 return gfc_trans_oacc_declare (code
);
4486 gfc_trans_omp_directive (gfc_code
*code
)
4490 case EXEC_OMP_ATOMIC
:
4491 return gfc_trans_omp_atomic (code
);
4492 case EXEC_OMP_BARRIER
:
4493 return gfc_trans_omp_barrier ();
4494 case EXEC_OMP_CANCEL
:
4495 return gfc_trans_omp_cancel (code
);
4496 case EXEC_OMP_CANCELLATION_POINT
:
4497 return gfc_trans_omp_cancellation_point (code
);
4498 case EXEC_OMP_CRITICAL
:
4499 return gfc_trans_omp_critical (code
);
4500 case EXEC_OMP_DISTRIBUTE
:
4503 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4505 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4506 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4507 case EXEC_OMP_DISTRIBUTE_SIMD
:
4508 return gfc_trans_omp_distribute (code
, NULL
);
4509 case EXEC_OMP_DO_SIMD
:
4510 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
4511 case EXEC_OMP_FLUSH
:
4512 return gfc_trans_omp_flush ();
4513 case EXEC_OMP_MASTER
:
4514 return gfc_trans_omp_master (code
);
4515 case EXEC_OMP_ORDERED
:
4516 return gfc_trans_omp_ordered (code
);
4517 case EXEC_OMP_PARALLEL
:
4518 return gfc_trans_omp_parallel (code
);
4519 case EXEC_OMP_PARALLEL_DO
:
4520 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
4521 case EXEC_OMP_PARALLEL_DO_SIMD
:
4522 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
4523 case EXEC_OMP_PARALLEL_SECTIONS
:
4524 return gfc_trans_omp_parallel_sections (code
);
4525 case EXEC_OMP_PARALLEL_WORKSHARE
:
4526 return gfc_trans_omp_parallel_workshare (code
);
4527 case EXEC_OMP_SECTIONS
:
4528 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
4529 case EXEC_OMP_SINGLE
:
4530 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
4531 case EXEC_OMP_TARGET
:
4532 case EXEC_OMP_TARGET_TEAMS
:
4533 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4534 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4535 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4536 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4537 return gfc_trans_omp_target (code
);
4538 case EXEC_OMP_TARGET_DATA
:
4539 return gfc_trans_omp_target_data (code
);
4540 case EXEC_OMP_TARGET_UPDATE
:
4541 return gfc_trans_omp_target_update (code
);
4543 return gfc_trans_omp_task (code
);
4544 case EXEC_OMP_TASKGROUP
:
4545 return gfc_trans_omp_taskgroup (code
);
4546 case EXEC_OMP_TASKWAIT
:
4547 return gfc_trans_omp_taskwait ();
4548 case EXEC_OMP_TASKYIELD
:
4549 return gfc_trans_omp_taskyield ();
4550 case EXEC_OMP_TEAMS
:
4551 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4552 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4553 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4554 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4555 return gfc_trans_omp_teams (code
, NULL
);
4556 case EXEC_OMP_WORKSHARE
:
4557 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
4564 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
4569 gfc_omp_declare_simd
*ods
;
4570 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
4572 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
4573 tree fndecl
= ns
->proc_name
->backend_decl
;
4575 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
4576 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
4577 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
4578 DECL_ATTRIBUTES (fndecl
) = c
;