1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
30 #include "diagnostic-core.h" /* For internal_error. */
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
45 gfc_omp_privatize_by_reference (const_tree decl
)
47 tree type
= TREE_TYPE (decl
);
49 if (TREE_CODE (type
) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
53 if (TREE_CODE (type
) == POINTER_TYPE
)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and aren't scalar pointers, scalar
57 allocatables, Cray pointees or C pointers are supposed to be
58 privatized by reference. */
59 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
60 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
61 || GFC_DECL_CRAY_POINTEE (decl
)
62 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
65 if (!DECL_ARTIFICIAL (decl
)
66 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
69 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
71 if (DECL_LANG_SPECIFIC (decl
)
72 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
79 /* True if OpenMP sharing attribute of DECL is predetermined. */
81 enum omp_clause_default_kind
82 gfc_omp_predetermined_sharing (tree decl
)
84 /* Associate names preserve the association established during ASSOCIATE.
85 As they are implemented either as pointers to the selector or array
86 descriptor and shouldn't really change in the ASSOCIATE region,
87 this decl can be either shared or firstprivate. If it is a pointer,
88 use firstprivate, as it is cheaper that way, otherwise make it shared. */
89 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
91 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
92 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
94 return OMP_CLAUSE_DEFAULT_SHARED
;
97 if (DECL_ARTIFICIAL (decl
)
98 && ! GFC_DECL_RESULT (decl
)
99 && ! (DECL_LANG_SPECIFIC (decl
)
100 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
101 return OMP_CLAUSE_DEFAULT_SHARED
;
103 /* Cray pointees shouldn't be listed in any clauses and should be
104 gimplified to dereference of the corresponding Cray pointer.
105 Make them all private, so that they are emitted in the debug
107 if (GFC_DECL_CRAY_POINTEE (decl
))
108 return OMP_CLAUSE_DEFAULT_PRIVATE
;
110 /* Assumed-size arrays are predetermined shared. */
111 if (TREE_CODE (decl
) == PARM_DECL
112 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
113 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
114 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
115 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
117 return OMP_CLAUSE_DEFAULT_SHARED
;
119 /* Dummy procedures aren't considered variables by OpenMP, thus are
120 disallowed in OpenMP clauses. They are represented as PARM_DECLs
121 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
122 to avoid complaining about their uses with default(none). */
123 if (TREE_CODE (decl
) == PARM_DECL
124 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
125 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
126 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
128 /* COMMON and EQUIVALENCE decls are shared. They
129 are only referenced through DECL_VALUE_EXPR of the variables
130 contained in them. If those are privatized, they will not be
131 gimplified to the COMMON or EQUIVALENCE decls. */
132 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
133 return OMP_CLAUSE_DEFAULT_SHARED
;
135 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
136 return OMP_CLAUSE_DEFAULT_SHARED
;
138 /* These are either array or derived parameters, or vtables.
139 In the former cases, the OpenMP standard doesn't consider them to be
140 variables at all (they can't be redefined), but they can nevertheless appear
141 in parallel/task regions and for default(none) purposes treat them as shared.
142 For vtables likely the same handling is desirable. */
143 if (TREE_CODE (decl
) == VAR_DECL
144 && TREE_READONLY (decl
)
145 && TREE_STATIC (decl
))
146 return OMP_CLAUSE_DEFAULT_SHARED
;
148 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
151 /* Return decl that should be used when reporting DEFAULT(NONE)
155 gfc_omp_report_decl (tree decl
)
157 if (DECL_ARTIFICIAL (decl
)
158 && DECL_LANG_SPECIFIC (decl
)
159 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
160 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
165 /* Return true if TYPE has any allocatable components. */
168 gfc_has_alloc_comps (tree type
, tree decl
)
172 if (POINTER_TYPE_P (type
))
174 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
175 type
= TREE_TYPE (type
);
176 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
180 while (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
181 type
= gfc_get_element_type (type
);
183 if (TREE_CODE (type
) != RECORD_TYPE
)
186 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
188 ftype
= TREE_TYPE (field
);
189 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
191 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
192 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
194 if (gfc_has_alloc_comps (ftype
, field
))
200 /* Return true if DECL in private clause needs
201 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
203 gfc_omp_private_outer_ref (tree decl
)
205 tree type
= TREE_TYPE (decl
);
207 if (GFC_DESCRIPTOR_TYPE_P (type
)
208 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
211 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (gfc_has_alloc_comps (type
, decl
))
223 /* Callback for gfc_omp_unshare_expr. */
226 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
229 enum tree_code code
= TREE_CODE (t
);
231 /* Stop at types, decls, constants like copy_tree_r. */
232 if (TREE_CODE_CLASS (code
) == tcc_type
233 || TREE_CODE_CLASS (code
) == tcc_declaration
234 || TREE_CODE_CLASS (code
) == tcc_constant
237 else if (handled_component_p (t
)
238 || TREE_CODE (t
) == MEM_REF
)
240 *tp
= unshare_expr (t
);
247 /* Unshare in expr anything that the FE which normally doesn't
248 care much about tree sharing (because during gimplification
249 everything is unshared) could cause problems with tree sharing
250 at omp-low.c time. */
253 gfc_omp_unshare_expr (tree expr
)
255 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
259 enum walk_alloc_comps
261 WALK_ALLOC_COMPS_DTOR
,
262 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
263 WALK_ALLOC_COMPS_COPY_CTOR
266 /* Handle allocatable components in OpenMP clauses. */
269 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
270 enum walk_alloc_comps kind
)
272 stmtblock_t block
, tmpblock
;
273 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
274 gfc_init_block (&block
);
276 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
278 if (GFC_DESCRIPTOR_TYPE_P (type
))
280 gfc_init_block (&tmpblock
);
281 tem
= gfc_full_array_size (&tmpblock
, decl
,
282 GFC_TYPE_ARRAY_RANK (type
));
283 then_b
= gfc_finish_block (&tmpblock
);
284 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
285 tem
= gfc_omp_unshare_expr (tem
);
286 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
287 gfc_array_index_type
, tem
,
292 if (!TYPE_DOMAIN (type
)
293 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
294 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
295 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
297 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
298 TYPE_SIZE_UNIT (type
),
299 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
300 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
303 tem
= array_type_nelts (type
);
304 tem
= fold_convert (gfc_array_index_type
, tem
);
307 tree nelems
= gfc_evaluate_now (tem
, &block
);
308 tree index
= gfc_create_var (gfc_array_index_type
, "S");
310 gfc_init_block (&tmpblock
);
311 tem
= gfc_conv_array_data (decl
);
312 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
313 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
314 tree destvar
, destvref
= NULL_TREE
;
317 tem
= gfc_conv_array_data (dest
);
318 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
319 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
321 gfc_add_expr_to_block (&tmpblock
,
322 gfc_walk_alloc_comps (declvref
, destvref
,
326 gfc_init_loopinfo (&loop
);
328 loop
.from
[0] = gfc_index_zero_node
;
329 loop
.loopvar
[0] = index
;
331 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
332 gfc_add_block_to_block (&block
, &loop
.pre
);
333 return gfc_finish_block (&block
);
335 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
337 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
339 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
340 type
= TREE_TYPE (decl
);
343 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
344 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
346 tree ftype
= TREE_TYPE (field
);
347 tree declf
, destf
= NULL_TREE
;
348 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
349 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
350 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
351 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
354 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
355 decl
, field
, NULL_TREE
);
357 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
358 dest
, field
, NULL_TREE
);
363 case WALK_ALLOC_COMPS_DTOR
:
365 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
366 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
367 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
369 gfc_add_modify (&block
, unshare_expr (destf
),
370 unshare_expr (declf
));
371 tem
= gfc_duplicate_allocatable_nocopy
372 (destf
, declf
, ftype
,
373 GFC_TYPE_ARRAY_RANK (ftype
));
375 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
376 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
378 case WALK_ALLOC_COMPS_COPY_CTOR
:
379 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
380 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
381 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
382 GFC_TYPE_ARRAY_RANK (ftype
));
383 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
384 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0);
388 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
391 gfc_init_block (&tmpblock
);
392 gfc_add_expr_to_block (&tmpblock
,
393 gfc_walk_alloc_comps (declf
, destf
,
395 then_b
= gfc_finish_block (&tmpblock
);
396 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
397 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
398 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
399 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
400 tem
= unshare_expr (declf
);
405 tem
= fold_convert (pvoid_type_node
, tem
);
406 tem
= fold_build2_loc (input_location
, NE_EXPR
,
407 boolean_type_node
, tem
,
409 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
411 build_empty_stmt (input_location
));
413 gfc_add_expr_to_block (&block
, then_b
);
415 if (kind
== WALK_ALLOC_COMPS_DTOR
)
417 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
418 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
420 tem
= gfc_trans_dealloc_allocated (unshare_expr (declf
),
422 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
424 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
426 tem
= gfc_call_free (unshare_expr (declf
));
427 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
432 return gfc_finish_block (&block
);
435 /* Return code to initialize DECL with its default constructor, or
436 NULL if there's nothing to do. */
439 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
441 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
442 stmtblock_t block
, cond_block
;
444 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
445 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
446 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
447 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
449 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
450 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
451 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
453 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
456 gfc_start_block (&block
);
457 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
458 OMP_CLAUSE_DECL (clause
),
459 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
460 gfc_add_expr_to_block (&block
, tem
);
461 return gfc_finish_block (&block
);
466 gcc_assert (outer
!= NULL_TREE
);
468 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
469 "not currently allocated" allocation status if outer
470 array is "not currently allocated", otherwise should be allocated. */
471 gfc_start_block (&block
);
473 gfc_init_block (&cond_block
);
475 if (GFC_DESCRIPTOR_TYPE_P (type
))
477 gfc_add_modify (&cond_block
, decl
, outer
);
478 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
479 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
480 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
482 gfc_conv_descriptor_lbound_get (decl
, rank
));
483 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
484 size
, gfc_index_one_node
);
485 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
486 size
= fold_build2_loc (input_location
, MULT_EXPR
,
487 gfc_array_index_type
, size
,
488 gfc_conv_descriptor_stride_get (decl
, rank
));
489 tree esize
= fold_convert (gfc_array_index_type
,
490 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
491 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
493 size
= unshare_expr (size
);
494 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
498 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
499 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
500 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
501 if (GFC_DESCRIPTOR_TYPE_P (type
))
502 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
504 gfc_add_modify (&cond_block
, unshare_expr (decl
),
505 fold_convert (TREE_TYPE (decl
), ptr
));
506 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
508 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
509 OMP_CLAUSE_DECL (clause
),
510 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
511 gfc_add_expr_to_block (&cond_block
, tem
);
513 then_b
= gfc_finish_block (&cond_block
);
515 /* Reduction clause requires allocated ALLOCATABLE. */
516 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
518 gfc_init_block (&cond_block
);
519 if (GFC_DESCRIPTOR_TYPE_P (type
))
520 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
523 gfc_add_modify (&cond_block
, unshare_expr (decl
),
524 build_zero_cst (TREE_TYPE (decl
)));
525 else_b
= gfc_finish_block (&cond_block
);
527 tree tem
= fold_convert (pvoid_type_node
,
528 GFC_DESCRIPTOR_TYPE_P (type
)
529 ? gfc_conv_descriptor_data_get (outer
) : outer
);
530 tem
= unshare_expr (tem
);
531 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
532 tem
, null_pointer_node
);
533 gfc_add_expr_to_block (&block
,
534 build3_loc (input_location
, COND_EXPR
,
535 void_type_node
, cond
, then_b
,
539 gfc_add_expr_to_block (&block
, then_b
);
541 return gfc_finish_block (&block
);
544 /* Build and return code for a copy constructor from SRC to DEST. */
547 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
549 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
550 tree cond
, then_b
, else_b
;
551 stmtblock_t block
, cond_block
;
553 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
554 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
556 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
557 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
558 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
560 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
562 gfc_start_block (&block
);
563 gfc_add_modify (&block
, dest
, src
);
564 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
565 WALK_ALLOC_COMPS_COPY_CTOR
);
566 gfc_add_expr_to_block (&block
, tem
);
567 return gfc_finish_block (&block
);
570 return build2_v (MODIFY_EXPR
, dest
, src
);
573 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
574 and copied from SRC. */
575 gfc_start_block (&block
);
577 gfc_init_block (&cond_block
);
579 gfc_add_modify (&cond_block
, dest
, src
);
580 if (GFC_DESCRIPTOR_TYPE_P (type
))
582 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
583 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
584 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
586 gfc_conv_descriptor_lbound_get (dest
, rank
));
587 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
588 size
, gfc_index_one_node
);
589 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
590 size
= fold_build2_loc (input_location
, MULT_EXPR
,
591 gfc_array_index_type
, size
,
592 gfc_conv_descriptor_stride_get (dest
, rank
));
593 tree esize
= fold_convert (gfc_array_index_type
,
594 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
595 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
597 size
= unshare_expr (size
);
598 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
602 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
603 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
604 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
605 if (GFC_DESCRIPTOR_TYPE_P (type
))
606 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
608 gfc_add_modify (&cond_block
, unshare_expr (dest
),
609 fold_convert (TREE_TYPE (dest
), ptr
));
611 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
612 ? gfc_conv_descriptor_data_get (src
) : src
;
613 srcptr
= unshare_expr (srcptr
);
614 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
615 call
= build_call_expr_loc (input_location
,
616 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
618 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
619 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
621 tree tem
= gfc_walk_alloc_comps (src
, dest
,
622 OMP_CLAUSE_DECL (clause
),
623 WALK_ALLOC_COMPS_COPY_CTOR
);
624 gfc_add_expr_to_block (&cond_block
, tem
);
626 then_b
= gfc_finish_block (&cond_block
);
628 gfc_init_block (&cond_block
);
629 if (GFC_DESCRIPTOR_TYPE_P (type
))
630 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
633 gfc_add_modify (&cond_block
, unshare_expr (dest
),
634 build_zero_cst (TREE_TYPE (dest
)));
635 else_b
= gfc_finish_block (&cond_block
);
637 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
638 unshare_expr (srcptr
), null_pointer_node
);
639 gfc_add_expr_to_block (&block
,
640 build3_loc (input_location
, COND_EXPR
,
641 void_type_node
, cond
, then_b
, else_b
));
643 return gfc_finish_block (&block
);
646 /* Similarly, except use an intrinsic or pointer assignment operator
650 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
652 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
653 tree cond
, then_b
, else_b
;
654 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
656 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
657 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
658 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
660 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
662 gfc_start_block (&block
);
663 /* First dealloc any allocatable components in DEST. */
664 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
665 OMP_CLAUSE_DECL (clause
),
666 WALK_ALLOC_COMPS_DTOR
);
667 gfc_add_expr_to_block (&block
, tem
);
668 /* Then copy over toplevel data. */
669 gfc_add_modify (&block
, dest
, src
);
670 /* Finally allocate any allocatable components and copy. */
671 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
672 WALK_ALLOC_COMPS_COPY_CTOR
);
673 gfc_add_expr_to_block (&block
, tem
);
674 return gfc_finish_block (&block
);
677 return build2_v (MODIFY_EXPR
, dest
, src
);
680 gfc_start_block (&block
);
682 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
684 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
685 WALK_ALLOC_COMPS_DTOR
);
686 tree tem
= fold_convert (pvoid_type_node
,
687 GFC_DESCRIPTOR_TYPE_P (type
)
688 ? gfc_conv_descriptor_data_get (dest
) : dest
);
689 tem
= unshare_expr (tem
);
690 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
691 tem
, null_pointer_node
);
692 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
693 then_b
, build_empty_stmt (input_location
));
694 gfc_add_expr_to_block (&block
, tem
);
697 gfc_init_block (&cond_block
);
699 if (GFC_DESCRIPTOR_TYPE_P (type
))
701 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
702 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
703 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
705 gfc_conv_descriptor_lbound_get (src
, rank
));
706 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
707 size
, gfc_index_one_node
);
708 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
709 size
= fold_build2_loc (input_location
, MULT_EXPR
,
710 gfc_array_index_type
, size
,
711 gfc_conv_descriptor_stride_get (src
, rank
));
712 tree esize
= fold_convert (gfc_array_index_type
,
713 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
714 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
716 size
= unshare_expr (size
);
717 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
721 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
722 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
724 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
725 ? gfc_conv_descriptor_data_get (dest
) : dest
;
726 destptr
= unshare_expr (destptr
);
727 destptr
= fold_convert (pvoid_type_node
, destptr
);
728 gfc_add_modify (&cond_block
, ptr
, destptr
);
730 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
731 destptr
, null_pointer_node
);
733 if (GFC_DESCRIPTOR_TYPE_P (type
))
736 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
738 tree rank
= gfc_rank_cst
[i
];
739 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
740 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
741 gfc_array_index_type
, tem
,
742 gfc_conv_descriptor_lbound_get (src
, rank
));
743 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
744 gfc_array_index_type
, tem
,
745 gfc_conv_descriptor_lbound_get (dest
, rank
));
746 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
747 tem
, gfc_conv_descriptor_ubound_get (dest
,
749 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
750 boolean_type_node
, cond
, tem
);
754 gfc_init_block (&cond_block2
);
756 if (GFC_DESCRIPTOR_TYPE_P (type
))
758 gfc_init_block (&inner_block
);
759 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
760 then_b
= gfc_finish_block (&inner_block
);
762 gfc_init_block (&inner_block
);
763 gfc_add_modify (&inner_block
, ptr
,
764 gfc_call_realloc (&inner_block
, ptr
, size
));
765 else_b
= gfc_finish_block (&inner_block
);
767 gfc_add_expr_to_block (&cond_block2
,
768 build3_loc (input_location
, COND_EXPR
,
770 unshare_expr (nonalloc
),
772 gfc_add_modify (&cond_block2
, dest
, src
);
773 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
777 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
778 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
779 fold_convert (type
, ptr
));
781 then_b
= gfc_finish_block (&cond_block2
);
782 else_b
= build_empty_stmt (input_location
);
784 gfc_add_expr_to_block (&cond_block
,
785 build3_loc (input_location
, COND_EXPR
,
786 void_type_node
, unshare_expr (cond
),
789 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
790 ? gfc_conv_descriptor_data_get (src
) : src
;
791 srcptr
= unshare_expr (srcptr
);
792 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
793 call
= build_call_expr_loc (input_location
,
794 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
796 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
797 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
799 tree tem
= gfc_walk_alloc_comps (src
, dest
,
800 OMP_CLAUSE_DECL (clause
),
801 WALK_ALLOC_COMPS_COPY_CTOR
);
802 gfc_add_expr_to_block (&cond_block
, tem
);
804 then_b
= gfc_finish_block (&cond_block
);
806 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
808 gfc_init_block (&cond_block
);
809 if (GFC_DESCRIPTOR_TYPE_P (type
))
810 gfc_add_expr_to_block (&cond_block
,
811 gfc_trans_dealloc_allocated (unshare_expr (dest
),
815 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
816 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
817 gfc_add_modify (&cond_block
, unshare_expr (dest
),
818 build_zero_cst (TREE_TYPE (dest
)));
820 else_b
= gfc_finish_block (&cond_block
);
822 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
823 unshare_expr (srcptr
), null_pointer_node
);
824 gfc_add_expr_to_block (&block
,
825 build3_loc (input_location
, COND_EXPR
,
826 void_type_node
, cond
,
830 gfc_add_expr_to_block (&block
, then_b
);
832 return gfc_finish_block (&block
);
836 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
837 tree add
, tree nelems
)
839 stmtblock_t tmpblock
;
840 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
841 nelems
= gfc_evaluate_now (nelems
, block
);
843 gfc_init_block (&tmpblock
);
844 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
846 desta
= gfc_build_array_ref (dest
, index
, NULL
);
847 srca
= gfc_build_array_ref (src
, index
, NULL
);
851 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
852 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
853 fold_convert (sizetype
, index
),
854 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
855 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
856 TREE_TYPE (dest
), dest
,
858 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
859 TREE_TYPE (src
), src
,
862 gfc_add_modify (&tmpblock
, desta
,
863 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
867 gfc_init_loopinfo (&loop
);
869 loop
.from
[0] = gfc_index_zero_node
;
870 loop
.loopvar
[0] = index
;
872 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
873 gfc_add_block_to_block (block
, &loop
.pre
);
876 /* Build and return code for a constructor of DEST that initializes
877 it to SRC plus ADD (ADD is scalar integer). */
880 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
882 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
885 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
887 gfc_start_block (&block
);
888 add
= gfc_evaluate_now (add
, &block
);
890 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
891 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
892 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
894 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
895 if (!TYPE_DOMAIN (type
)
896 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
897 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
898 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
900 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
901 TYPE_SIZE_UNIT (type
),
902 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
903 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
906 nelems
= array_type_nelts (type
);
907 nelems
= fold_convert (gfc_array_index_type
, nelems
);
909 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
910 return gfc_finish_block (&block
);
913 /* Allocatable arrays in LINEAR clauses need to be allocated
914 and copied from SRC. */
915 gfc_add_modify (&block
, dest
, src
);
916 if (GFC_DESCRIPTOR_TYPE_P (type
))
918 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
919 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
920 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
922 gfc_conv_descriptor_lbound_get (dest
, rank
));
923 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
924 size
, gfc_index_one_node
);
925 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
926 size
= fold_build2_loc (input_location
, MULT_EXPR
,
927 gfc_array_index_type
, size
,
928 gfc_conv_descriptor_stride_get (dest
, rank
));
929 tree esize
= fold_convert (gfc_array_index_type
,
930 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
931 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
932 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
933 nelems
, unshare_expr (esize
));
934 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
936 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
937 gfc_array_index_type
, nelems
,
941 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
942 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
943 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
944 if (GFC_DESCRIPTOR_TYPE_P (type
))
946 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
947 tree etype
= gfc_get_element_type (type
);
948 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
949 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
950 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
951 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
955 gfc_add_modify (&block
, unshare_expr (dest
),
956 fold_convert (TREE_TYPE (dest
), ptr
));
957 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
958 tree dstm
= build_fold_indirect_ref (ptr
);
959 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
960 gfc_add_modify (&block
, dstm
,
961 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
963 return gfc_finish_block (&block
);
966 /* Build and return code destructing DECL. Return NULL if nothing
970 gfc_omp_clause_dtor (tree clause
, tree decl
)
972 tree type
= TREE_TYPE (decl
), tem
;
974 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
975 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
976 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
978 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
979 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
980 OMP_CLAUSE_DECL (clause
),
981 WALK_ALLOC_COMPS_DTOR
);
985 if (GFC_DESCRIPTOR_TYPE_P (type
))
986 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
987 to be deallocated if they were allocated. */
988 tem
= gfc_trans_dealloc_allocated (decl
, false, NULL
);
990 tem
= gfc_call_free (decl
);
991 tem
= gfc_omp_unshare_expr (tem
);
993 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
998 gfc_init_block (&block
);
999 gfc_add_expr_to_block (&block
,
1000 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1001 OMP_CLAUSE_DECL (clause
),
1002 WALK_ALLOC_COMPS_DTOR
));
1003 gfc_add_expr_to_block (&block
, tem
);
1004 then_b
= gfc_finish_block (&block
);
1006 tem
= fold_convert (pvoid_type_node
,
1007 GFC_DESCRIPTOR_TYPE_P (type
)
1008 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1009 tem
= unshare_expr (tem
);
1010 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1011 tem
, null_pointer_node
);
1012 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1013 then_b
, build_empty_stmt (input_location
));
1020 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1022 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1025 tree decl
= OMP_CLAUSE_DECL (c
);
1026 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1027 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1029 if (!gfc_omp_privatize_by_reference (decl
)
1030 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1031 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1032 && !GFC_DECL_CRAY_POINTEE (decl
)
1033 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1035 tree orig_decl
= decl
;
1036 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1037 OMP_CLAUSE_MAP_KIND (c4
) = OMP_CLAUSE_MAP_POINTER
;
1038 OMP_CLAUSE_DECL (c4
) = decl
;
1039 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1040 decl
= build_fold_indirect_ref (decl
);
1041 OMP_CLAUSE_DECL (c
) = decl
;
1042 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1043 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1044 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1045 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1047 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1048 OMP_CLAUSE_MAP_KIND (c3
) = OMP_CLAUSE_MAP_POINTER
;
1049 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1050 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1051 decl
= build_fold_indirect_ref (decl
);
1052 OMP_CLAUSE_DECL (c
) = decl
;
1055 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1058 gfc_start_block (&block
);
1059 tree type
= TREE_TYPE (decl
);
1060 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1061 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1062 ptr
= build_fold_indirect_ref (ptr
);
1063 OMP_CLAUSE_DECL (c
) = ptr
;
1064 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1065 OMP_CLAUSE_MAP_KIND (c2
) = OMP_CLAUSE_MAP_TO_PSET
;
1066 OMP_CLAUSE_DECL (c2
) = decl
;
1067 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1068 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1069 OMP_CLAUSE_MAP_KIND (c3
) = OMP_CLAUSE_MAP_POINTER
;
1070 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1071 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1072 tree size
= create_tmp_var (gfc_array_index_type
, NULL
);
1073 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1074 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1075 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1076 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1078 stmtblock_t cond_block
;
1079 tree tem
, then_b
, else_b
, zero
, cond
;
1081 gfc_init_block (&cond_block
);
1082 tem
= gfc_full_array_size (&cond_block
, decl
,
1083 GFC_TYPE_ARRAY_RANK (type
));
1084 gfc_add_modify (&cond_block
, size
, tem
);
1085 gfc_add_modify (&cond_block
, size
,
1086 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1088 then_b
= gfc_finish_block (&cond_block
);
1089 gfc_init_block (&cond_block
);
1090 zero
= build_int_cst (gfc_array_index_type
, 0);
1091 gfc_add_modify (&cond_block
, size
, zero
);
1092 else_b
= gfc_finish_block (&cond_block
);
1093 tem
= gfc_conv_descriptor_data_get (decl
);
1094 tem
= fold_convert (pvoid_type_node
, tem
);
1095 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1096 boolean_type_node
, tem
, null_pointer_node
);
1097 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1098 void_type_node
, cond
,
1103 gfc_add_modify (&block
, size
,
1104 gfc_full_array_size (&block
, decl
,
1105 GFC_TYPE_ARRAY_RANK (type
)));
1106 gfc_add_modify (&block
, size
,
1107 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1110 OMP_CLAUSE_SIZE (c
) = size
;
1111 tree stmt
= gfc_finish_block (&block
);
1112 gimplify_and_add (stmt
, pre_p
);
1115 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1117 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1118 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1121 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1122 OMP_CLAUSE_CHAIN (last
) = c2
;
1127 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1128 OMP_CLAUSE_CHAIN (last
) = c3
;
1133 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1134 OMP_CLAUSE_CHAIN (last
) = c4
;
1140 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1141 disregarded in OpenMP construct, because it is going to be
1142 remapped during OpenMP lowering. SHARED is true if DECL
1143 is going to be shared, false if it is going to be privatized. */
1146 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1148 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1149 && DECL_HAS_VALUE_EXPR_P (decl
))
1151 tree value
= DECL_VALUE_EXPR (decl
);
1153 if (TREE_CODE (value
) == COMPONENT_REF
1154 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1155 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1157 /* If variable in COMMON or EQUIVALENCE is privatized, return
1158 true, as just that variable is supposed to be privatized,
1159 not the whole COMMON or whole EQUIVALENCE.
1160 For shared variables in COMMON or EQUIVALENCE, let them be
1161 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1162 from the same COMMON or EQUIVALENCE just one sharing of the
1163 whole COMMON or EQUIVALENCE is enough. */
1168 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1174 /* Return true if DECL that is shared iff SHARED is true should
1175 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1179 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1181 if (GFC_DECL_CRAY_POINTEE (decl
))
1184 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1185 && DECL_HAS_VALUE_EXPR_P (decl
))
1187 tree value
= DECL_VALUE_EXPR (decl
);
1189 if (TREE_CODE (value
) == COMPONENT_REF
1190 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1191 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1198 /* Register language specific type size variables as potentially OpenMP
1199 firstprivate variables. */
1202 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1204 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1208 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1209 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1211 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1212 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1213 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1215 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1216 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1222 gfc_trans_add_clause (tree node
, tree tail
)
1224 OMP_CLAUSE_CHAIN (node
) = tail
;
1229 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1234 gfc_symbol
*proc_sym
;
1235 gfc_formal_arglist
*f
;
1237 gcc_assert (sym
->attr
.dummy
);
1238 proc_sym
= sym
->ns
->proc_name
;
1239 if (proc_sym
->attr
.entry_master
)
1241 if (gfc_return_by_reference (proc_sym
))
1244 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1247 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1253 return build_int_cst (integer_type_node
, cnt
);
1256 tree t
= gfc_get_symbol_decl (sym
);
1260 bool alternate_entry
;
1263 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1264 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1265 && sym
->result
== sym
;
1266 entry_master
= sym
->attr
.result
1267 && sym
->ns
->proc_name
->attr
.entry_master
1268 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1269 parent_decl
= current_function_decl
1270 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1272 if ((t
== parent_decl
&& return_value
)
1273 || (sym
->ns
&& sym
->ns
->proc_name
1274 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1275 && (alternate_entry
|| entry_master
)))
1280 /* Special case for assigning the return value of a function.
1281 Self recursive functions must have an explicit return value. */
1282 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1283 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1285 /* Similarly for alternate entry points. */
1286 else if (alternate_entry
1287 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1290 gfc_entry_list
*el
= NULL
;
1292 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1295 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1300 else if (entry_master
1301 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1303 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1309 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1310 gfc_omp_namelist
*namelist
, tree list
,
1313 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1314 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1316 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1317 if (t
!= error_mark_node
)
1319 tree node
= build_omp_clause (input_location
, code
);
1320 OMP_CLAUSE_DECL (node
) = t
;
1321 list
= gfc_trans_add_clause (node
, list
);
1327 struct omp_udr_find_orig_data
1329 gfc_omp_udr
*omp_udr
;
1334 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1337 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1338 if ((*e
)->expr_type
== EXPR_VARIABLE
1339 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1340 cd
->omp_orig_seen
= true;
1346 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1348 gfc_symbol
*sym
= n
->sym
;
1349 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1350 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1351 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1352 gfc_symbol omp_var_copy
[4];
1353 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1355 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1356 locus old_loc
= gfc_current_locus
;
1359 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1361 decl
= OMP_CLAUSE_DECL (c
);
1362 gfc_current_locus
= where
;
1363 type
= TREE_TYPE (decl
);
1364 outer_decl
= create_tmp_var_raw (type
, NULL
);
1365 if (TREE_CODE (decl
) == PARM_DECL
1366 && TREE_CODE (type
) == REFERENCE_TYPE
1367 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1368 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1370 decl
= build_fold_indirect_ref (decl
);
1371 type
= TREE_TYPE (type
);
1374 /* Create a fake symbol for init value. */
1375 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1376 init_val_sym
.ns
= sym
->ns
;
1377 init_val_sym
.name
= sym
->name
;
1378 init_val_sym
.ts
= sym
->ts
;
1379 init_val_sym
.attr
.referenced
= 1;
1380 init_val_sym
.declared_at
= where
;
1381 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1382 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1383 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1384 else if (udr
->initializer_ns
)
1385 backend_decl
= NULL
;
1387 switch (sym
->ts
.type
)
1393 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1396 backend_decl
= NULL_TREE
;
1399 init_val_sym
.backend_decl
= backend_decl
;
1401 /* Create a fake symbol for the outer array reference. */
1404 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1405 outer_sym
.attr
.dummy
= 0;
1406 outer_sym
.attr
.result
= 0;
1407 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1408 outer_sym
.backend_decl
= outer_decl
;
1409 if (decl
!= OMP_CLAUSE_DECL (c
))
1410 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1412 /* Create fake symtrees for it. */
1413 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1414 symtree1
->n
.sym
= sym
;
1415 gcc_assert (symtree1
== root1
);
1417 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1418 symtree2
->n
.sym
= &init_val_sym
;
1419 gcc_assert (symtree2
== root2
);
1421 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1422 symtree3
->n
.sym
= &outer_sym
;
1423 gcc_assert (symtree3
== root3
);
1425 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1428 omp_var_copy
[0] = *udr
->omp_out
;
1429 omp_var_copy
[1] = *udr
->omp_in
;
1430 *udr
->omp_out
= outer_sym
;
1431 *udr
->omp_in
= *sym
;
1432 if (udr
->initializer_ns
)
1434 omp_var_copy
[2] = *udr
->omp_priv
;
1435 omp_var_copy
[3] = *udr
->omp_orig
;
1436 *udr
->omp_priv
= *sym
;
1437 *udr
->omp_orig
= outer_sym
;
1441 /* Create expressions. */
1442 e1
= gfc_get_expr ();
1443 e1
->expr_type
= EXPR_VARIABLE
;
1445 e1
->symtree
= symtree1
;
1447 if (sym
->attr
.dimension
)
1449 e1
->ref
= ref
= gfc_get_ref ();
1450 ref
->type
= REF_ARRAY
;
1451 ref
->u
.ar
.where
= where
;
1452 ref
->u
.ar
.as
= sym
->as
;
1453 ref
->u
.ar
.type
= AR_FULL
;
1454 ref
->u
.ar
.dimen
= 0;
1456 t
= gfc_resolve_expr (e1
);
1460 if (backend_decl
!= NULL_TREE
)
1462 e2
= gfc_get_expr ();
1463 e2
->expr_type
= EXPR_VARIABLE
;
1465 e2
->symtree
= symtree2
;
1467 t
= gfc_resolve_expr (e2
);
1470 else if (udr
->initializer_ns
== NULL
)
1472 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1473 e2
= gfc_default_initializer (&sym
->ts
);
1475 t
= gfc_resolve_expr (e2
);
1478 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1480 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1481 t
= gfc_resolve_expr (e2
);
1484 if (udr
&& udr
->initializer_ns
)
1486 struct omp_udr_find_orig_data cd
;
1488 cd
.omp_orig_seen
= false;
1489 gfc_code_walker (&n
->udr
->initializer
,
1490 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1491 if (cd
.omp_orig_seen
)
1492 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1495 e3
= gfc_copy_expr (e1
);
1496 e3
->symtree
= symtree3
;
1497 t
= gfc_resolve_expr (e3
);
1502 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1506 e4
= gfc_add (e3
, e1
);
1509 e4
= gfc_multiply (e3
, e1
);
1511 case TRUTH_ANDIF_EXPR
:
1512 e4
= gfc_and (e3
, e1
);
1514 case TRUTH_ORIF_EXPR
:
1515 e4
= gfc_or (e3
, e1
);
1518 e4
= gfc_eqv (e3
, e1
);
1521 e4
= gfc_neqv (e3
, e1
);
1539 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1542 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1543 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1544 t
= gfc_resolve_expr (e3
);
1546 t
= gfc_resolve_expr (e4
);
1555 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1556 intrinsic_sym
.ns
= sym
->ns
;
1557 intrinsic_sym
.name
= iname
;
1558 intrinsic_sym
.ts
= sym
->ts
;
1559 intrinsic_sym
.attr
.referenced
= 1;
1560 intrinsic_sym
.attr
.intrinsic
= 1;
1561 intrinsic_sym
.attr
.function
= 1;
1562 intrinsic_sym
.result
= &intrinsic_sym
;
1563 intrinsic_sym
.declared_at
= where
;
1565 symtree4
= gfc_new_symtree (&root4
, iname
);
1566 symtree4
->n
.sym
= &intrinsic_sym
;
1567 gcc_assert (symtree4
== root4
);
1569 e4
= gfc_get_expr ();
1570 e4
->expr_type
= EXPR_FUNCTION
;
1572 e4
->symtree
= symtree4
;
1573 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1574 e4
->value
.function
.actual
->expr
= e3
;
1575 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1576 e4
->value
.function
.actual
->next
->expr
= e1
;
1578 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1580 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1581 e1
= gfc_copy_expr (e1
);
1582 e3
= gfc_copy_expr (e3
);
1583 t
= gfc_resolve_expr (e4
);
1587 /* Create the init statement list. */
1590 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1592 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1593 NULL_TREE
, NULL_TREE
, false);
1594 if (TREE_CODE (stmt
) != BIND_EXPR
)
1595 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1598 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1600 /* Create the merge statement list. */
1603 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1605 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1606 NULL_TREE
, NULL_TREE
, false);
1607 if (TREE_CODE (stmt
) != BIND_EXPR
)
1608 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1611 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1613 /* And stick the placeholder VAR_DECL into the clause as well. */
1614 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1616 gfc_current_locus
= old_loc
;
1629 gfc_free_array_spec (outer_sym
.as
);
1633 *udr
->omp_out
= omp_var_copy
[0];
1634 *udr
->omp_in
= omp_var_copy
[1];
1635 if (udr
->initializer_ns
)
1637 *udr
->omp_priv
= omp_var_copy
[2];
1638 *udr
->omp_orig
= omp_var_copy
[3];
1644 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1647 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1648 if (namelist
->sym
->attr
.referenced
)
1650 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1651 if (t
!= error_mark_node
)
1653 tree node
= build_omp_clause (where
.lb
->location
,
1654 OMP_CLAUSE_REDUCTION
);
1655 OMP_CLAUSE_DECL (node
) = t
;
1656 switch (namelist
->u
.reduction_op
)
1658 case OMP_REDUCTION_PLUS
:
1659 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1661 case OMP_REDUCTION_MINUS
:
1662 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1664 case OMP_REDUCTION_TIMES
:
1665 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1667 case OMP_REDUCTION_AND
:
1668 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1670 case OMP_REDUCTION_OR
:
1671 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1673 case OMP_REDUCTION_EQV
:
1674 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1676 case OMP_REDUCTION_NEQV
:
1677 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1679 case OMP_REDUCTION_MAX
:
1680 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1682 case OMP_REDUCTION_MIN
:
1683 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1685 case OMP_REDUCTION_IAND
:
1686 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1688 case OMP_REDUCTION_IOR
:
1689 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1691 case OMP_REDUCTION_IEOR
:
1692 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1694 case OMP_REDUCTION_USER
:
1695 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1700 if (namelist
->sym
->attr
.dimension
1701 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1702 || namelist
->sym
->attr
.allocatable
)
1703 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1704 list
= gfc_trans_add_clause (node
, list
);
1711 gfc_trans_omp_map_clause_list (enum omp_clause_map_kind kind
,
1712 gfc_omp_namelist
*namelist
, tree list
)
1714 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1715 if (namelist
->sym
->attr
.referenced
)
1717 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1718 if (t
!= error_mark_node
)
1720 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1721 OMP_CLAUSE_DECL (node
) = t
;
1722 OMP_CLAUSE_MAP_KIND (node
) = kind
;
1723 list
= gfc_trans_add_clause (node
, list
);
1730 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1735 gfc_init_se (&se
, NULL
);
1736 gfc_conv_expr (&se
, expr
);
1737 gfc_add_block_to_block (block
, &se
.pre
);
1738 result
= gfc_evaluate_now (se
.expr
, block
);
1739 gfc_add_block_to_block (block
, &se
.post
);
1745 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1746 locus where
, bool declare_simd
= false)
1748 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1750 enum omp_clause_code clause_code
;
1753 if (clauses
== NULL
)
1756 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1758 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1762 if (list
>= OMP_LIST_DATA_CLAUSE_FIRST
1763 && list
<= OMP_LIST_DATA_CLAUSE_LAST
)
1765 enum omp_clause_map_kind kind
;
1768 case OMP_LIST_DEVICEPTR
:
1769 kind
= OMP_CLAUSE_MAP_FORCE_DEVICEPTR
;
1774 omp_clauses
= gfc_trans_omp_map_clause_list (kind
, n
, omp_clauses
);
1779 case OMP_LIST_REDUCTION
:
1780 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
);
1782 case OMP_LIST_PRIVATE
:
1783 clause_code
= OMP_CLAUSE_PRIVATE
;
1785 case OMP_LIST_SHARED
:
1786 clause_code
= OMP_CLAUSE_SHARED
;
1788 case OMP_LIST_FIRSTPRIVATE
:
1789 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1791 case OMP_LIST_LASTPRIVATE
:
1792 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1794 case OMP_LIST_COPYIN
:
1795 clause_code
= OMP_CLAUSE_COPYIN
;
1797 case OMP_LIST_COPYPRIVATE
:
1798 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1800 case OMP_LIST_UNIFORM
:
1801 clause_code
= OMP_CLAUSE_UNIFORM
;
1803 case OMP_LIST_USE_DEVICE
:
1804 clause_code
= OMP_CLAUSE_USE_DEVICE
;
1806 case OMP_LIST_DEVICE_RESIDENT
:
1807 clause_code
= OMP_CLAUSE_DEVICE_RESIDENT
;
1810 clause_code
= OMP_CLAUSE_HOST
;
1812 case OMP_LIST_DEVICE
:
1813 clause_code
= OMP_CLAUSE_OACC_DEVICE
;
1815 case OMP_LIST_CACHE
:
1816 clause_code
= OMP_NO_CLAUSE_CACHE
;
1821 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1824 case OMP_LIST_ALIGNED
:
1825 for (; n
!= NULL
; n
= n
->next
)
1826 if (n
->sym
->attr
.referenced
|| declare_simd
)
1828 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1829 if (t
!= error_mark_node
)
1831 tree node
= build_omp_clause (input_location
,
1832 OMP_CLAUSE_ALIGNED
);
1833 OMP_CLAUSE_DECL (node
) = t
;
1839 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1842 gfc_init_se (&se
, NULL
);
1843 gfc_conv_expr (&se
, n
->expr
);
1844 gfc_add_block_to_block (block
, &se
.pre
);
1845 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1846 gfc_add_block_to_block (block
, &se
.post
);
1848 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1850 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1854 case OMP_LIST_LINEAR
:
1856 gfc_expr
*last_step_expr
= NULL
;
1857 tree last_step
= NULL_TREE
;
1859 for (; n
!= NULL
; n
= n
->next
)
1863 last_step_expr
= n
->expr
;
1864 last_step
= NULL_TREE
;
1866 if (n
->sym
->attr
.referenced
|| declare_simd
)
1868 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1869 if (t
!= error_mark_node
)
1871 tree node
= build_omp_clause (input_location
,
1873 OMP_CLAUSE_DECL (node
) = t
;
1874 if (last_step_expr
&& last_step
== NULL_TREE
)
1878 = gfc_conv_constant_to_tree (last_step_expr
);
1881 gfc_init_se (&se
, NULL
);
1882 gfc_conv_expr (&se
, last_step_expr
);
1883 gfc_add_block_to_block (block
, &se
.pre
);
1884 last_step
= gfc_evaluate_now (se
.expr
, block
);
1885 gfc_add_block_to_block (block
, &se
.post
);
1888 OMP_CLAUSE_LINEAR_STEP (node
)
1889 = fold_convert (gfc_typenode_for_spec (&n
->sym
->ts
),
1891 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1892 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1893 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1899 case OMP_LIST_DEPEND
:
1900 for (; n
!= NULL
; n
= n
->next
)
1902 if (!n
->sym
->attr
.referenced
)
1905 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
1906 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1908 tree decl
= gfc_get_symbol_decl (n
->sym
);
1909 if (gfc_omp_privatize_by_reference (decl
))
1910 decl
= build_fold_indirect_ref (decl
);
1911 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1913 decl
= gfc_conv_descriptor_data_get (decl
);
1914 decl
= fold_convert (build_pointer_type (char_type_node
),
1916 decl
= build_fold_indirect_ref (decl
);
1918 else if (DECL_P (decl
))
1919 TREE_ADDRESSABLE (decl
) = 1;
1920 OMP_CLAUSE_DECL (node
) = decl
;
1925 gfc_init_se (&se
, NULL
);
1926 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
1928 gfc_conv_expr_reference (&se
, n
->expr
);
1933 gfc_conv_expr_descriptor (&se
, n
->expr
);
1934 ptr
= gfc_conv_array_data (se
.expr
);
1936 gfc_add_block_to_block (block
, &se
.pre
);
1937 gfc_add_block_to_block (block
, &se
.post
);
1938 ptr
= fold_convert (build_pointer_type (char_type_node
),
1940 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
1942 switch (n
->u
.depend_op
)
1945 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
1947 case OMP_DEPEND_OUT
:
1948 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
1950 case OMP_DEPEND_INOUT
:
1951 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
1956 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1960 for (; n
!= NULL
; n
= n
->next
)
1962 if (!n
->sym
->attr
.referenced
)
1965 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1966 tree node2
= NULL_TREE
;
1967 tree node3
= NULL_TREE
;
1968 tree node4
= NULL_TREE
;
1969 tree decl
= gfc_get_symbol_decl (n
->sym
);
1971 TREE_ADDRESSABLE (decl
) = 1;
1972 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1974 if (POINTER_TYPE_P (TREE_TYPE (decl
))
1975 && (gfc_omp_privatize_by_reference (decl
)
1976 || GFC_DECL_GET_SCALAR_POINTER (decl
)
1977 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1978 || GFC_DECL_CRAY_POINTEE (decl
)
1979 || GFC_DESCRIPTOR_TYPE_P
1980 (TREE_TYPE (TREE_TYPE (decl
)))))
1982 tree orig_decl
= decl
;
1983 node4
= build_omp_clause (input_location
,
1985 OMP_CLAUSE_MAP_KIND (node4
) = OMP_CLAUSE_MAP_POINTER
;
1986 OMP_CLAUSE_DECL (node4
) = decl
;
1987 OMP_CLAUSE_SIZE (node4
) = size_int (0);
1988 decl
= build_fold_indirect_ref (decl
);
1989 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1990 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1991 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1993 node3
= build_omp_clause (input_location
,
1995 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
1996 OMP_CLAUSE_DECL (node3
) = decl
;
1997 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1998 decl
= build_fold_indirect_ref (decl
);
2001 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2003 tree type
= TREE_TYPE (decl
);
2004 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2005 ptr
= fold_convert (build_pointer_type (char_type_node
),
2007 ptr
= build_fold_indirect_ref (ptr
);
2008 OMP_CLAUSE_DECL (node
) = ptr
;
2009 node2
= build_omp_clause (input_location
,
2011 OMP_CLAUSE_MAP_KIND (node2
) = OMP_CLAUSE_MAP_TO_PSET
;
2012 OMP_CLAUSE_DECL (node2
) = decl
;
2013 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2014 node3
= build_omp_clause (input_location
,
2016 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
2017 OMP_CLAUSE_DECL (node3
)
2018 = gfc_conv_descriptor_data_get (decl
);
2019 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2020 if (n
->sym
->attr
.pointer
)
2022 stmtblock_t cond_block
;
2024 = gfc_create_var (gfc_array_index_type
, NULL
);
2025 tree tem
, then_b
, else_b
, zero
, cond
;
2027 gfc_init_block (&cond_block
);
2029 = gfc_full_array_size (&cond_block
, decl
,
2030 GFC_TYPE_ARRAY_RANK (type
));
2031 gfc_add_modify (&cond_block
, size
, tem
);
2032 then_b
= gfc_finish_block (&cond_block
);
2033 gfc_init_block (&cond_block
);
2034 zero
= build_int_cst (gfc_array_index_type
, 0);
2035 gfc_add_modify (&cond_block
, size
, zero
);
2036 else_b
= gfc_finish_block (&cond_block
);
2037 tem
= gfc_conv_descriptor_data_get (decl
);
2038 tem
= fold_convert (pvoid_type_node
, tem
);
2039 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2041 tem
, null_pointer_node
);
2042 gfc_add_expr_to_block (block
,
2043 build3_loc (input_location
,
2048 OMP_CLAUSE_SIZE (node
) = size
;
2051 OMP_CLAUSE_SIZE (node
)
2052 = gfc_full_array_size (block
, decl
,
2053 GFC_TYPE_ARRAY_RANK (type
));
2055 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2056 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2057 OMP_CLAUSE_SIZE (node
)
2058 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2059 OMP_CLAUSE_SIZE (node
), elemsz
);
2062 OMP_CLAUSE_DECL (node
) = decl
;
2067 gfc_init_se (&se
, NULL
);
2068 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2070 gfc_conv_expr_reference (&se
, n
->expr
);
2071 gfc_add_block_to_block (block
, &se
.pre
);
2073 OMP_CLAUSE_SIZE (node
)
2074 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2078 gfc_conv_expr_descriptor (&se
, n
->expr
);
2079 ptr
= gfc_conv_array_data (se
.expr
);
2080 tree type
= TREE_TYPE (se
.expr
);
2081 gfc_add_block_to_block (block
, &se
.pre
);
2082 OMP_CLAUSE_SIZE (node
)
2083 = gfc_full_array_size (block
, se
.expr
,
2084 GFC_TYPE_ARRAY_RANK (type
));
2086 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2087 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2088 OMP_CLAUSE_SIZE (node
)
2089 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2090 OMP_CLAUSE_SIZE (node
), elemsz
);
2092 gfc_add_block_to_block (block
, &se
.post
);
2093 ptr
= fold_convert (build_pointer_type (char_type_node
),
2095 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2097 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2098 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2100 node4
= build_omp_clause (input_location
,
2102 OMP_CLAUSE_MAP_KIND (node4
) = OMP_CLAUSE_MAP_POINTER
;
2103 OMP_CLAUSE_DECL (node4
) = decl
;
2104 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2105 decl
= build_fold_indirect_ref (decl
);
2107 ptr
= fold_convert (sizetype
, ptr
);
2108 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2110 tree type
= TREE_TYPE (decl
);
2111 ptr2
= gfc_conv_descriptor_data_get (decl
);
2112 node2
= build_omp_clause (input_location
,
2114 OMP_CLAUSE_MAP_KIND (node2
) = OMP_CLAUSE_MAP_TO_PSET
;
2115 OMP_CLAUSE_DECL (node2
) = decl
;
2116 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2117 node3
= build_omp_clause (input_location
,
2119 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
2120 OMP_CLAUSE_DECL (node3
)
2121 = gfc_conv_descriptor_data_get (decl
);
2125 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2126 ptr2
= build_fold_addr_expr (decl
);
2129 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2132 node3
= build_omp_clause (input_location
,
2134 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
2135 OMP_CLAUSE_DECL (node3
) = decl
;
2137 ptr2
= fold_convert (sizetype
, ptr2
);
2138 OMP_CLAUSE_SIZE (node3
)
2139 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2141 switch (n
->u
.map_op
)
2144 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_ALLOC
;
2147 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_TO
;
2150 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FROM
;
2152 case OMP_MAP_TOFROM
:
2153 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_TOFROM
;
2155 case OMP_MAP_FORCE_ALLOC
:
2156 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FORCE_ALLOC
;
2158 case OMP_MAP_FORCE_DEALLOC
:
2159 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FORCE_DEALLOC
;
2161 case OMP_MAP_FORCE_TO
:
2162 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FORCE_TO
;
2164 case OMP_MAP_FORCE_FROM
:
2165 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FORCE_FROM
;
2167 case OMP_MAP_FORCE_TOFROM
:
2168 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FORCE_TOFROM
;
2170 case OMP_MAP_FORCE_PRESENT
:
2171 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FORCE_PRESENT
;
2176 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2178 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2180 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2182 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2187 for (; n
!= NULL
; n
= n
->next
)
2189 if (!n
->sym
->attr
.referenced
)
2192 tree node
= build_omp_clause (input_location
,
2194 ? OMP_CLAUSE_TO
: OMP_CLAUSE_FROM
);
2195 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2197 tree decl
= gfc_get_symbol_decl (n
->sym
);
2198 if (gfc_omp_privatize_by_reference (decl
))
2199 decl
= build_fold_indirect_ref (decl
);
2200 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2202 tree type
= TREE_TYPE (decl
);
2203 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2204 ptr
= fold_convert (build_pointer_type (char_type_node
),
2206 ptr
= build_fold_indirect_ref (ptr
);
2207 OMP_CLAUSE_DECL (node
) = ptr
;
2208 OMP_CLAUSE_SIZE (node
)
2209 = gfc_full_array_size (block
, decl
,
2210 GFC_TYPE_ARRAY_RANK (type
));
2212 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2213 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2214 OMP_CLAUSE_SIZE (node
)
2215 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2216 OMP_CLAUSE_SIZE (node
), elemsz
);
2219 OMP_CLAUSE_DECL (node
) = decl
;
2224 gfc_init_se (&se
, NULL
);
2225 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2227 gfc_conv_expr_reference (&se
, n
->expr
);
2229 gfc_add_block_to_block (block
, &se
.pre
);
2230 OMP_CLAUSE_SIZE (node
)
2231 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2235 gfc_conv_expr_descriptor (&se
, n
->expr
);
2236 ptr
= gfc_conv_array_data (se
.expr
);
2237 tree type
= TREE_TYPE (se
.expr
);
2238 gfc_add_block_to_block (block
, &se
.pre
);
2239 OMP_CLAUSE_SIZE (node
)
2240 = gfc_full_array_size (block
, se
.expr
,
2241 GFC_TYPE_ARRAY_RANK (type
));
2243 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2244 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2245 OMP_CLAUSE_SIZE (node
)
2246 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2247 OMP_CLAUSE_SIZE (node
), elemsz
);
2249 gfc_add_block_to_block (block
, &se
.post
);
2250 ptr
= fold_convert (build_pointer_type (char_type_node
),
2252 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2254 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2262 if (clauses
->if_expr
)
2266 gfc_init_se (&se
, NULL
);
2267 gfc_conv_expr (&se
, clauses
->if_expr
);
2268 gfc_add_block_to_block (block
, &se
.pre
);
2269 if_var
= gfc_evaluate_now (se
.expr
, block
);
2270 gfc_add_block_to_block (block
, &se
.post
);
2272 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2273 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2274 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2277 if (clauses
->final_expr
)
2281 gfc_init_se (&se
, NULL
);
2282 gfc_conv_expr (&se
, clauses
->final_expr
);
2283 gfc_add_block_to_block (block
, &se
.pre
);
2284 final_var
= gfc_evaluate_now (se
.expr
, block
);
2285 gfc_add_block_to_block (block
, &se
.post
);
2287 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2288 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2289 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2292 if (clauses
->num_threads
)
2296 gfc_init_se (&se
, NULL
);
2297 gfc_conv_expr (&se
, clauses
->num_threads
);
2298 gfc_add_block_to_block (block
, &se
.pre
);
2299 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2300 gfc_add_block_to_block (block
, &se
.post
);
2302 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2303 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2304 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2307 chunk_size
= NULL_TREE
;
2308 if (clauses
->chunk_size
)
2310 gfc_init_se (&se
, NULL
);
2311 gfc_conv_expr (&se
, clauses
->chunk_size
);
2312 gfc_add_block_to_block (block
, &se
.pre
);
2313 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2314 gfc_add_block_to_block (block
, &se
.post
);
2317 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2319 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2320 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2321 switch (clauses
->sched_kind
)
2323 case OMP_SCHED_STATIC
:
2324 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2326 case OMP_SCHED_DYNAMIC
:
2327 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2329 case OMP_SCHED_GUIDED
:
2330 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2332 case OMP_SCHED_RUNTIME
:
2333 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2335 case OMP_SCHED_AUTO
:
2336 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2341 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2344 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2346 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2347 switch (clauses
->default_sharing
)
2349 case OMP_DEFAULT_NONE
:
2350 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2352 case OMP_DEFAULT_SHARED
:
2353 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2355 case OMP_DEFAULT_PRIVATE
:
2356 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2358 case OMP_DEFAULT_FIRSTPRIVATE
:
2359 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2364 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2367 if (clauses
->nowait
)
2369 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2370 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2373 if (clauses
->ordered
)
2375 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2376 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2379 if (clauses
->untied
)
2381 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2382 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2385 if (clauses
->mergeable
)
2387 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2388 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2391 if (clauses
->collapse
)
2393 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2394 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2395 = build_int_cst (integer_type_node
, clauses
->collapse
);
2396 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2399 if (clauses
->inbranch
)
2401 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2402 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2405 if (clauses
->notinbranch
)
2407 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2408 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2411 switch (clauses
->cancel
)
2413 case OMP_CANCEL_UNKNOWN
:
2415 case OMP_CANCEL_PARALLEL
:
2416 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2417 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2419 case OMP_CANCEL_SECTIONS
:
2420 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2421 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2424 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2425 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2427 case OMP_CANCEL_TASKGROUP
:
2428 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2429 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2433 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2435 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2436 switch (clauses
->proc_bind
)
2438 case OMP_PROC_BIND_MASTER
:
2439 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2441 case OMP_PROC_BIND_SPREAD
:
2442 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2444 case OMP_PROC_BIND_CLOSE
:
2445 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2450 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2453 if (clauses
->safelen_expr
)
2457 gfc_init_se (&se
, NULL
);
2458 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2459 gfc_add_block_to_block (block
, &se
.pre
);
2460 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2461 gfc_add_block_to_block (block
, &se
.post
);
2463 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2464 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2465 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2468 if (clauses
->simdlen_expr
)
2470 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2471 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2472 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2473 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2476 if (clauses
->num_teams
)
2480 gfc_init_se (&se
, NULL
);
2481 gfc_conv_expr (&se
, clauses
->num_teams
);
2482 gfc_add_block_to_block (block
, &se
.pre
);
2483 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2484 gfc_add_block_to_block (block
, &se
.post
);
2486 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2487 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2488 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2491 if (clauses
->device
)
2495 gfc_init_se (&se
, NULL
);
2496 gfc_conv_expr (&se
, clauses
->device
);
2497 gfc_add_block_to_block (block
, &se
.pre
);
2498 device
= gfc_evaluate_now (se
.expr
, block
);
2499 gfc_add_block_to_block (block
, &se
.post
);
2501 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2502 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2503 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2506 if (clauses
->thread_limit
)
2510 gfc_init_se (&se
, NULL
);
2511 gfc_conv_expr (&se
, clauses
->thread_limit
);
2512 gfc_add_block_to_block (block
, &se
.pre
);
2513 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2514 gfc_add_block_to_block (block
, &se
.post
);
2516 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2517 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2518 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2521 chunk_size
= NULL_TREE
;
2522 if (clauses
->dist_chunk_size
)
2524 gfc_init_se (&se
, NULL
);
2525 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2526 gfc_add_block_to_block (block
, &se
.pre
);
2527 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2528 gfc_add_block_to_block (block
, &se
.post
);
2531 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2533 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2534 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2535 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2540 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2541 if (clauses
->async_expr
)
2542 OMP_CLAUSE_ASYNC_EXPR (c
) =
2543 gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2545 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2546 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2550 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2551 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2553 if (clauses
->independent
)
2555 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2556 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2558 if (clauses
->num_gangs_expr
)
2560 tree num_gangs_var
=
2561 gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2562 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2563 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2564 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2566 if (clauses
->num_workers_expr
)
2568 tree num_workers_var
=
2569 gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2570 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2571 OMP_CLAUSE_NUM_WORKERS_EXPR (c
)= num_workers_var
;
2572 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2574 if (clauses
->vector_length_expr
)
2576 tree vector_length_var
=
2577 gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2578 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2579 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
)= vector_length_var
;
2580 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2582 if (clauses
->vector
)
2584 if (clauses
->vector_expr
)
2587 gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2588 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2589 OMP_CLAUSE_VECTOR_EXPR (c
)= vector_var
;
2590 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2594 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2595 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2598 if (clauses
->worker
)
2600 if (clauses
->worker_expr
)
2603 gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2604 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2605 OMP_CLAUSE_WORKER_EXPR (c
)= worker_var
;
2606 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2610 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2611 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2616 if (clauses
->gang_expr
)
2619 gfc_convert_expr_to_tree (block
, clauses
->gang_expr
);
2620 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2621 OMP_CLAUSE_GANG_EXPR (c
)= gang_var
;
2622 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2626 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2627 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2630 if (clauses
->non_clause_wait_expr
)
2633 gfc_convert_expr_to_tree (block
, clauses
->non_clause_wait_expr
);
2634 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2635 OMP_CLAUSE_WAIT_EXPR (c
)= wait_var
;
2636 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2639 return nreverse (omp_clauses
);
2642 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2645 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2650 stmt
= gfc_trans_code (code
);
2651 if (TREE_CODE (stmt
) != BIND_EXPR
)
2653 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2655 tree block
= poplevel (1, 0);
2656 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
2666 /* Trans OpenACC directives. */
2667 /* parallel, kernels, data and host_data. */
2669 gfc_trans_oacc_construct (gfc_code
*code
)
2672 tree stmt
, oacc_clauses
;
2673 enum tree_code construct_code
;
2677 case EXEC_OACC_PARALLEL
:
2678 construct_code
= OACC_PARALLEL
;
2680 case EXEC_OACC_KERNELS
:
2681 construct_code
= OACC_KERNELS
;
2683 case EXEC_OACC_DATA
:
2684 construct_code
= OACC_DATA
;
2686 case EXEC_OACC_HOST_DATA
:
2687 construct_code
= OACC_HOST_DATA
;
2693 gfc_start_block (&block
);
2694 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2696 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
2697 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
2699 gfc_add_expr_to_block (&block
, stmt
);
2700 return gfc_finish_block (&block
);
2703 /* update, enter_data, exit_data, wait, cache. */
2705 gfc_trans_oacc_executable_directive (gfc_code
*code
)
2708 tree stmt
, oacc_clauses
;
2709 enum tree_code construct_code
;
2713 case EXEC_OACC_UPDATE
:
2714 construct_code
= OACC_UPDATE
;
2716 case EXEC_OACC_ENTER_DATA
:
2717 construct_code
= OACC_ENTER_DATA
;
2719 case EXEC_OACC_EXIT_DATA
:
2720 construct_code
= OACC_EXIT_DATA
;
2722 case EXEC_OACC_WAIT
:
2723 construct_code
= OACC_WAIT
;
2725 case EXEC_OACC_CACHE
:
2726 construct_code
= OACC_CACHE
;
2732 gfc_start_block (&block
);
2733 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2735 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
2737 gfc_add_expr_to_block (&block
, stmt
);
2738 return gfc_finish_block (&block
);
2741 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
2742 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
2745 gfc_trans_omp_atomic (gfc_code
*code
)
2747 gfc_code
*atomic_code
= code
;
2751 gfc_expr
*expr2
, *e
;
2754 tree lhsaddr
, type
, rhs
, x
;
2755 enum tree_code op
= ERROR_MARK
;
2756 enum tree_code aop
= OMP_ATOMIC
;
2757 bool var_on_left
= false;
2758 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
2760 code
= code
->block
->next
;
2761 gcc_assert (code
->op
== EXEC_ASSIGN
);
2762 var
= code
->expr1
->symtree
->n
.sym
;
2764 gfc_init_se (&lse
, NULL
);
2765 gfc_init_se (&rse
, NULL
);
2766 gfc_init_se (&vse
, NULL
);
2767 gfc_start_block (&block
);
2769 expr2
= code
->expr2
;
2770 if (expr2
->expr_type
== EXPR_FUNCTION
2771 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2772 expr2
= expr2
->value
.function
.actual
->expr
;
2774 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2776 case GFC_OMP_ATOMIC_READ
:
2777 gfc_conv_expr (&vse
, code
->expr1
);
2778 gfc_add_block_to_block (&block
, &vse
.pre
);
2780 gfc_conv_expr (&lse
, expr2
);
2781 gfc_add_block_to_block (&block
, &lse
.pre
);
2782 type
= TREE_TYPE (lse
.expr
);
2783 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2785 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
2786 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2787 x
= convert (TREE_TYPE (vse
.expr
), x
);
2788 gfc_add_modify (&block
, vse
.expr
, x
);
2790 gfc_add_block_to_block (&block
, &lse
.pre
);
2791 gfc_add_block_to_block (&block
, &rse
.pre
);
2793 return gfc_finish_block (&block
);
2794 case GFC_OMP_ATOMIC_CAPTURE
:
2795 aop
= OMP_ATOMIC_CAPTURE_NEW
;
2796 if (expr2
->expr_type
== EXPR_VARIABLE
)
2798 aop
= OMP_ATOMIC_CAPTURE_OLD
;
2799 gfc_conv_expr (&vse
, code
->expr1
);
2800 gfc_add_block_to_block (&block
, &vse
.pre
);
2802 gfc_conv_expr (&lse
, expr2
);
2803 gfc_add_block_to_block (&block
, &lse
.pre
);
2804 gfc_init_se (&lse
, NULL
);
2806 var
= code
->expr1
->symtree
->n
.sym
;
2807 expr2
= code
->expr2
;
2808 if (expr2
->expr_type
== EXPR_FUNCTION
2809 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2810 expr2
= expr2
->value
.function
.actual
->expr
;
2817 gfc_conv_expr (&lse
, code
->expr1
);
2818 gfc_add_block_to_block (&block
, &lse
.pre
);
2819 type
= TREE_TYPE (lse
.expr
);
2820 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2822 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2823 == GFC_OMP_ATOMIC_WRITE
)
2824 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2826 gfc_conv_expr (&rse
, expr2
);
2827 gfc_add_block_to_block (&block
, &rse
.pre
);
2829 else if (expr2
->expr_type
== EXPR_OP
)
2832 switch (expr2
->value
.op
.op
)
2834 case INTRINSIC_PLUS
:
2837 case INTRINSIC_TIMES
:
2840 case INTRINSIC_MINUS
:
2843 case INTRINSIC_DIVIDE
:
2844 if (expr2
->ts
.type
== BT_INTEGER
)
2845 op
= TRUNC_DIV_EXPR
;
2850 op
= TRUTH_ANDIF_EXPR
;
2853 op
= TRUTH_ORIF_EXPR
;
2858 case INTRINSIC_NEQV
:
2864 e
= expr2
->value
.op
.op1
;
2865 if (e
->expr_type
== EXPR_FUNCTION
2866 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2867 e
= e
->value
.function
.actual
->expr
;
2868 if (e
->expr_type
== EXPR_VARIABLE
2869 && e
->symtree
!= NULL
2870 && e
->symtree
->n
.sym
== var
)
2872 expr2
= expr2
->value
.op
.op2
;
2877 e
= expr2
->value
.op
.op2
;
2878 if (e
->expr_type
== EXPR_FUNCTION
2879 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2880 e
= e
->value
.function
.actual
->expr
;
2881 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2882 && e
->symtree
!= NULL
2883 && e
->symtree
->n
.sym
== var
);
2884 expr2
= expr2
->value
.op
.op1
;
2885 var_on_left
= false;
2887 gfc_conv_expr (&rse
, expr2
);
2888 gfc_add_block_to_block (&block
, &rse
.pre
);
2892 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
2893 switch (expr2
->value
.function
.isym
->id
)
2913 e
= expr2
->value
.function
.actual
->expr
;
2914 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2915 && e
->symtree
!= NULL
2916 && e
->symtree
->n
.sym
== var
);
2918 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
2919 gfc_add_block_to_block (&block
, &rse
.pre
);
2920 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
2922 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
2923 gfc_actual_arglist
*arg
;
2925 gfc_add_modify (&block
, accum
, rse
.expr
);
2926 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
2929 gfc_init_block (&rse
.pre
);
2930 gfc_conv_expr (&rse
, arg
->expr
);
2931 gfc_add_block_to_block (&block
, &rse
.pre
);
2932 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
2934 gfc_add_modify (&block
, accum
, x
);
2940 expr2
= expr2
->value
.function
.actual
->next
->expr
;
2943 lhsaddr
= save_expr (lhsaddr
);
2944 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
2946 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2947 == GFC_OMP_ATOMIC_WRITE
)
2948 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2952 x
= convert (TREE_TYPE (rhs
),
2953 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
2955 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
2957 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
2960 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
2961 && TREE_CODE (type
) != COMPLEX_TYPE
)
2962 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
2963 TREE_TYPE (TREE_TYPE (rhs
)), x
);
2965 gfc_add_block_to_block (&block
, &lse
.pre
);
2966 gfc_add_block_to_block (&block
, &rse
.pre
);
2968 if (aop
== OMP_ATOMIC
)
2970 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
2971 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2972 gfc_add_expr_to_block (&block
, x
);
2976 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
2979 expr2
= code
->expr2
;
2980 if (expr2
->expr_type
== EXPR_FUNCTION
2981 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2982 expr2
= expr2
->value
.function
.actual
->expr
;
2984 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
2985 gfc_conv_expr (&vse
, code
->expr1
);
2986 gfc_add_block_to_block (&block
, &vse
.pre
);
2988 gfc_init_se (&lse
, NULL
);
2989 gfc_conv_expr (&lse
, expr2
);
2990 gfc_add_block_to_block (&block
, &lse
.pre
);
2992 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
2993 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2994 x
= convert (TREE_TYPE (vse
.expr
), x
);
2995 gfc_add_modify (&block
, vse
.expr
, x
);
2998 return gfc_finish_block (&block
);
3002 gfc_trans_omp_barrier (void)
3004 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3005 return build_call_expr_loc (input_location
, decl
, 0);
3009 gfc_trans_omp_cancel (gfc_code
*code
)
3012 tree ifc
= boolean_true_node
;
3014 switch (code
->ext
.omp_clauses
->cancel
)
3016 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3017 case OMP_CANCEL_DO
: mask
= 2; break;
3018 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3019 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3020 default: gcc_unreachable ();
3022 gfc_start_block (&block
);
3023 if (code
->ext
.omp_clauses
->if_expr
)
3028 gfc_init_se (&se
, NULL
);
3029 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3030 gfc_add_block_to_block (&block
, &se
.pre
);
3031 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3032 gfc_add_block_to_block (&block
, &se
.post
);
3033 tree type
= TREE_TYPE (if_var
);
3034 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3035 boolean_type_node
, if_var
,
3036 build_zero_cst (type
));
3038 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3039 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3040 ifc
= fold_convert (c_bool_type
, ifc
);
3041 gfc_add_expr_to_block (&block
,
3042 build_call_expr_loc (input_location
, decl
, 2,
3043 build_int_cst (integer_type_node
,
3045 return gfc_finish_block (&block
);
3049 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3052 switch (code
->ext
.omp_clauses
->cancel
)
3054 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3055 case OMP_CANCEL_DO
: mask
= 2; break;
3056 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3057 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3058 default: gcc_unreachable ();
3060 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3061 return build_call_expr_loc (input_location
, decl
, 1,
3062 build_int_cst (integer_type_node
, mask
));
3066 gfc_trans_omp_critical (gfc_code
*code
)
3068 tree name
= NULL_TREE
, stmt
;
3069 if (code
->ext
.omp_name
!= NULL
)
3070 name
= get_identifier (code
->ext
.omp_name
);
3071 stmt
= gfc_trans_code (code
->block
->next
);
3072 return build2_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
, name
);
3075 typedef struct dovar_init_d
{
3082 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3083 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3086 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
3087 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3090 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3091 int i
, collapse
= clauses
->collapse
;
3092 vec
<dovar_init
> inits
= vNULL
;
3099 code
= code
->block
->next
;
3100 gcc_assert (code
->op
== EXEC_DO
);
3102 init
= make_tree_vec (collapse
);
3103 cond
= make_tree_vec (collapse
);
3104 incr
= make_tree_vec (collapse
);
3108 gfc_start_block (&block
);
3112 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3114 for (i
= 0; i
< collapse
; i
++)
3117 int dovar_found
= 0;
3122 gfc_omp_namelist
*n
= NULL
;
3123 if (op
!= EXEC_OMP_DISTRIBUTE
)
3124 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3125 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3126 n
!= NULL
; n
= n
->next
)
3127 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3131 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3132 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3133 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3139 /* Evaluate all the expressions in the iterator. */
3140 gfc_init_se (&se
, NULL
);
3141 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3142 gfc_add_block_to_block (pblock
, &se
.pre
);
3144 type
= TREE_TYPE (dovar
);
3145 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3147 gfc_init_se (&se
, NULL
);
3148 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3149 gfc_add_block_to_block (pblock
, &se
.pre
);
3150 from
= gfc_evaluate_now (se
.expr
, pblock
);
3152 gfc_init_se (&se
, NULL
);
3153 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3154 gfc_add_block_to_block (pblock
, &se
.pre
);
3155 to
= gfc_evaluate_now (se
.expr
, pblock
);
3157 gfc_init_se (&se
, NULL
);
3158 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3159 gfc_add_block_to_block (pblock
, &se
.pre
);
3160 step
= gfc_evaluate_now (se
.expr
, pblock
);
3163 /* Special case simple loops. */
3164 if (TREE_CODE (dovar
) == VAR_DECL
)
3166 if (integer_onep (step
))
3168 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3173 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3179 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3180 /* The condition should not be folded. */
3181 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3182 ? LE_EXPR
: GE_EXPR
,
3183 boolean_type_node
, dovar
, to
);
3184 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3186 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3189 TREE_VEC_ELT (incr
, i
));
3193 /* STEP is not 1 or -1. Use:
3194 for (count = 0; count < (to + step - from) / step; count++)
3196 dovar = from + count * step;
3200 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3201 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3202 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3204 tmp
= gfc_evaluate_now (tmp
, pblock
);
3205 count
= gfc_create_var (type
, "count");
3206 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3207 build_int_cst (type
, 0));
3208 /* The condition should not be folded. */
3209 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3212 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3214 build_int_cst (type
, 1));
3215 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3216 MODIFY_EXPR
, type
, count
,
3217 TREE_VEC_ELT (incr
, i
));
3219 /* Initialize DOVAR. */
3220 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3221 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3222 dovar_init e
= {dovar
, tmp
};
3223 inits
.safe_push (e
);
3228 if (op
== EXEC_OMP_SIMD
)
3232 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3233 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3236 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3241 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3242 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3243 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3245 if (dovar_found
== 2)
3252 /* If dovar is lastprivate, but different counter is used,
3253 dovar += step needs to be added to
3254 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3255 will have the value on entry of the last loop, rather
3256 than value after iterator increment. */
3257 tmp
= gfc_evaluate_now (step
, pblock
);
3258 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
3260 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3262 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3263 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3264 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3266 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3269 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3270 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3272 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3276 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3278 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3279 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3280 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3282 tree l
= build_omp_clause (input_location
,
3283 OMP_CLAUSE_LASTPRIVATE
);
3284 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3285 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3286 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3288 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3292 gcc_assert (simple
|| c
!= NULL
);
3296 if (op
!= EXEC_OMP_SIMD
)
3297 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3298 else if (collapse
== 1)
3300 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3301 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3302 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3303 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3306 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3307 OMP_CLAUSE_DECL (tmp
) = count
;
3308 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3311 if (i
+ 1 < collapse
)
3312 code
= code
->block
->next
;
3315 if (pblock
!= &block
)
3318 gfc_start_block (&block
);
3321 gfc_start_block (&body
);
3323 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3324 gfc_add_modify (&body
, di
->var
, di
->init
);
3327 /* Cycle statement is implemented with a goto. Exit statement must not be
3328 present for this loop. */
3329 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3331 /* Put these labels where they can be found later. */
3333 code
->cycle_label
= cycle_label
;
3334 code
->exit_label
= NULL_TREE
;
3336 /* Main loop body. */
3337 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3338 gfc_add_expr_to_block (&body
, tmp
);
3340 /* Label for cycle statements (if needed). */
3341 if (TREE_USED (cycle_label
))
3343 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3344 gfc_add_expr_to_block (&body
, tmp
);
3347 /* End of loop body. */
3350 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3351 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3352 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3353 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3354 default: gcc_unreachable ();
3357 TREE_TYPE (stmt
) = void_type_node
;
3358 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3359 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3360 OMP_FOR_INIT (stmt
) = init
;
3361 OMP_FOR_COND (stmt
) = cond
;
3362 OMP_FOR_INCR (stmt
) = incr
;
3363 gfc_add_expr_to_block (&block
, stmt
);
3365 return gfc_finish_block (&block
);
3368 /* parallel loop and kernels loop. */
3370 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3372 stmtblock_t block
, *pblock
= NULL
;
3373 gfc_omp_clauses construct_clauses
, loop_clauses
;
3374 tree stmt
, oacc_clauses
= NULL_TREE
;
3375 enum tree_code construct_code
;
3379 case EXEC_OACC_PARALLEL_LOOP
:
3380 construct_code
= OACC_PARALLEL
;
3382 case EXEC_OACC_KERNELS_LOOP
:
3383 construct_code
= OACC_KERNELS
;
3389 gfc_start_block (&block
);
3391 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3392 if (code
->ext
.omp_clauses
!= NULL
)
3394 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3395 sizeof (construct_clauses
));
3396 loop_clauses
.collapse
= construct_clauses
.collapse
;
3397 loop_clauses
.gang
= construct_clauses
.gang
;
3398 loop_clauses
.vector
= construct_clauses
.vector
;
3399 loop_clauses
.worker
= construct_clauses
.worker
;
3400 loop_clauses
.seq
= construct_clauses
.seq
;
3401 loop_clauses
.independent
= construct_clauses
.independent
;
3402 construct_clauses
.collapse
= 0;
3403 construct_clauses
.gang
= false;
3404 construct_clauses
.vector
= false;
3405 construct_clauses
.worker
= false;
3406 construct_clauses
.seq
= false;
3407 construct_clauses
.independent
= false;
3408 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3411 if (!loop_clauses
.seq
)
3415 stmt
= gfc_trans_omp_do (code
, code
->op
, pblock
, &loop_clauses
, NULL
);
3416 if (TREE_CODE (stmt
) != BIND_EXPR
)
3417 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3420 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3422 if (code
->op
== EXEC_OACC_KERNELS_LOOP
)
3423 OACC_KERNELS_COMBINED (stmt
) = 1;
3425 OACC_PARALLEL_COMBINED (stmt
) = 1;
3426 gfc_add_expr_to_block (&block
, stmt
);
3427 return gfc_finish_block (&block
);
3431 gfc_trans_omp_flush (void)
3433 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3434 return build_call_expr_loc (input_location
, decl
, 0);
3438 gfc_trans_omp_master (gfc_code
*code
)
3440 tree stmt
= gfc_trans_code (code
->block
->next
);
3441 if (IS_EMPTY_STMT (stmt
))
3443 return build1_v (OMP_MASTER
, stmt
);
3447 gfc_trans_omp_ordered (gfc_code
*code
)
3449 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
3453 gfc_trans_omp_parallel (gfc_code
*code
)
3456 tree stmt
, omp_clauses
;
3458 gfc_start_block (&block
);
3459 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3461 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3462 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3464 gfc_add_expr_to_block (&block
, stmt
);
3465 return gfc_finish_block (&block
);
3472 GFC_OMP_SPLIT_PARALLEL
,
3473 GFC_OMP_SPLIT_DISTRIBUTE
,
3474 GFC_OMP_SPLIT_TEAMS
,
3475 GFC_OMP_SPLIT_TARGET
,
3481 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3482 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3483 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3484 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3485 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3486 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
)
3490 gfc_split_omp_clauses (gfc_code
*code
,
3491 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3493 int mask
= 0, innermost
= 0;
3494 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3497 case EXEC_OMP_DISTRIBUTE
:
3498 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3500 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3501 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3502 innermost
= GFC_OMP_SPLIT_DO
;
3504 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3505 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3506 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3507 innermost
= GFC_OMP_SPLIT_SIMD
;
3509 case EXEC_OMP_DISTRIBUTE_SIMD
:
3510 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3511 innermost
= GFC_OMP_SPLIT_SIMD
;
3514 innermost
= GFC_OMP_SPLIT_DO
;
3516 case EXEC_OMP_DO_SIMD
:
3517 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3518 innermost
= GFC_OMP_SPLIT_SIMD
;
3520 case EXEC_OMP_PARALLEL
:
3521 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3523 case EXEC_OMP_PARALLEL_DO
:
3524 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3525 innermost
= GFC_OMP_SPLIT_DO
;
3527 case EXEC_OMP_PARALLEL_DO_SIMD
:
3528 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3529 innermost
= GFC_OMP_SPLIT_SIMD
;
3532 innermost
= GFC_OMP_SPLIT_SIMD
;
3534 case EXEC_OMP_TARGET
:
3535 innermost
= GFC_OMP_SPLIT_TARGET
;
3537 case EXEC_OMP_TARGET_TEAMS
:
3538 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
3539 innermost
= GFC_OMP_SPLIT_TEAMS
;
3541 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3542 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3543 | GFC_OMP_MASK_DISTRIBUTE
;
3544 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3546 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3547 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3548 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3549 innermost
= GFC_OMP_SPLIT_DO
;
3551 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3552 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3553 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3554 innermost
= GFC_OMP_SPLIT_SIMD
;
3556 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3557 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3558 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3559 innermost
= GFC_OMP_SPLIT_SIMD
;
3561 case EXEC_OMP_TEAMS
:
3562 innermost
= GFC_OMP_SPLIT_TEAMS
;
3564 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3565 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
3566 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3568 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3569 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3570 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3571 innermost
= GFC_OMP_SPLIT_DO
;
3573 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3574 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3575 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3576 innermost
= GFC_OMP_SPLIT_SIMD
;
3578 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3579 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3580 innermost
= GFC_OMP_SPLIT_SIMD
;
3587 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
3590 if (code
->ext
.omp_clauses
!= NULL
)
3592 if (mask
& GFC_OMP_MASK_TARGET
)
3594 /* First the clauses that are unique to some constructs. */
3595 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
3596 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
3597 clausesa
[GFC_OMP_SPLIT_TARGET
].device
3598 = code
->ext
.omp_clauses
->device
;
3600 if (mask
& GFC_OMP_MASK_TEAMS
)
3602 /* First the clauses that are unique to some constructs. */
3603 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
3604 = code
->ext
.omp_clauses
->num_teams
;
3605 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
3606 = code
->ext
.omp_clauses
->thread_limit
;
3607 /* Shared and default clauses are allowed on parallel and teams. */
3608 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
3609 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3610 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
3611 = code
->ext
.omp_clauses
->default_sharing
;
3613 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3615 /* First the clauses that are unique to some constructs. */
3616 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
3617 = code
->ext
.omp_clauses
->dist_sched_kind
;
3618 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
3619 = code
->ext
.omp_clauses
->dist_chunk_size
;
3620 /* Duplicate collapse. */
3621 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
3622 = code
->ext
.omp_clauses
->collapse
;
3624 if (mask
& GFC_OMP_MASK_PARALLEL
)
3626 /* First the clauses that are unique to some constructs. */
3627 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
3628 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
3629 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
3630 = code
->ext
.omp_clauses
->num_threads
;
3631 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
3632 = code
->ext
.omp_clauses
->proc_bind
;
3633 /* Shared and default clauses are allowed on parallel and teams. */
3634 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
3635 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3636 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
3637 = code
->ext
.omp_clauses
->default_sharing
;
3639 if (mask
& GFC_OMP_MASK_DO
)
3641 /* First the clauses that are unique to some constructs. */
3642 clausesa
[GFC_OMP_SPLIT_DO
].ordered
3643 = code
->ext
.omp_clauses
->ordered
;
3644 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
3645 = code
->ext
.omp_clauses
->sched_kind
;
3646 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
3647 = code
->ext
.omp_clauses
->chunk_size
;
3648 clausesa
[GFC_OMP_SPLIT_DO
].nowait
3649 = code
->ext
.omp_clauses
->nowait
;
3650 /* Duplicate collapse. */
3651 clausesa
[GFC_OMP_SPLIT_DO
].collapse
3652 = code
->ext
.omp_clauses
->collapse
;
3654 if (mask
& GFC_OMP_MASK_SIMD
)
3656 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
3657 = code
->ext
.omp_clauses
->safelen_expr
;
3658 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LINEAR
]
3659 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
3660 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
3661 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
3662 /* Duplicate collapse. */
3663 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
3664 = code
->ext
.omp_clauses
->collapse
;
3666 /* Private clause is supported on all constructs but target,
3667 it is enough to put it on the innermost one. For
3668 !$ omp do put it on parallel though,
3669 as that's what we did for OpenMP 3.1. */
3670 clausesa
[innermost
== GFC_OMP_SPLIT_DO
3671 ? (int) GFC_OMP_SPLIT_PARALLEL
3672 : innermost
].lists
[OMP_LIST_PRIVATE
]
3673 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
3674 /* Firstprivate clause is supported on all constructs but
3675 target and simd. Put it on the outermost of those and
3676 duplicate on parallel. */
3677 if (mask
& GFC_OMP_MASK_TEAMS
)
3678 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
3679 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3680 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3681 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
3682 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3683 if (mask
& GFC_OMP_MASK_PARALLEL
)
3684 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
3685 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3686 else if (mask
& GFC_OMP_MASK_DO
)
3687 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
3688 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3689 /* Lastprivate is allowed on do and simd. In
3690 parallel do{, simd} we actually want to put it on
3691 parallel rather than do. */
3692 if (mask
& GFC_OMP_MASK_PARALLEL
)
3693 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
3694 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3695 else if (mask
& GFC_OMP_MASK_DO
)
3696 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
3697 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3698 if (mask
& GFC_OMP_MASK_SIMD
)
3699 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
3700 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3701 /* Reduction is allowed on simd, do, parallel and teams.
3702 Duplicate it on all of them, but omit on do if
3703 parallel is present. */
3704 if (mask
& GFC_OMP_MASK_TEAMS
)
3705 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
3706 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3707 if (mask
& GFC_OMP_MASK_PARALLEL
)
3708 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
3709 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3710 else if (mask
& GFC_OMP_MASK_DO
)
3711 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
3712 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3713 if (mask
& GFC_OMP_MASK_SIMD
)
3714 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
3715 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3716 /* FIXME: This is currently being discussed. */
3717 if (mask
& GFC_OMP_MASK_PARALLEL
)
3718 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
3719 = code
->ext
.omp_clauses
->if_expr
;
3721 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
3722 = code
->ext
.omp_clauses
->if_expr
;
3724 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3725 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3726 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
3730 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3731 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
3734 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3735 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
3738 gfc_start_block (&block
);
3740 gfc_init_block (&block
);
3742 if (clausesa
== NULL
)
3744 clausesa
= clausesa_buf
;
3745 gfc_split_omp_clauses (code
, clausesa
);
3747 if (gfc_option
.gfc_flag_openmp
)
3749 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
3750 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
3751 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
3754 if (TREE_CODE (body
) != BIND_EXPR
)
3755 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
3759 else if (TREE_CODE (body
) != BIND_EXPR
)
3760 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
3761 if (gfc_option
.gfc_flag_openmp
)
3763 stmt
= make_node (OMP_FOR
);
3764 TREE_TYPE (stmt
) = void_type_node
;
3765 OMP_FOR_BODY (stmt
) = body
;
3766 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
3770 gfc_add_expr_to_block (&block
, stmt
);
3771 return gfc_finish_block (&block
);
3775 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
3776 gfc_omp_clauses
*clausesa
)
3778 stmtblock_t block
, *new_pblock
= pblock
;
3779 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3780 tree stmt
, omp_clauses
= NULL_TREE
;
3783 gfc_start_block (&block
);
3785 gfc_init_block (&block
);
3787 if (clausesa
== NULL
)
3789 clausesa
= clausesa_buf
;
3790 gfc_split_omp_clauses (code
, clausesa
);
3793 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3797 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
3798 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
3799 new_pblock
= &block
;
3803 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
3804 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
3807 if (TREE_CODE (stmt
) != BIND_EXPR
)
3808 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3812 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3813 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3814 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3816 OMP_PARALLEL_COMBINED (stmt
) = 1;
3817 gfc_add_expr_to_block (&block
, stmt
);
3818 return gfc_finish_block (&block
);
3822 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3823 gfc_omp_clauses
*clausesa
)
3826 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3827 tree stmt
, omp_clauses
= NULL_TREE
;
3830 gfc_start_block (&block
);
3832 gfc_init_block (&block
);
3834 if (clausesa
== NULL
)
3836 clausesa
= clausesa_buf
;
3837 gfc_split_omp_clauses (code
, clausesa
);
3839 if (gfc_option
.gfc_flag_openmp
)
3841 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3845 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
3848 if (TREE_CODE (stmt
) != BIND_EXPR
)
3849 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3853 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3854 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3855 if (gfc_option
.gfc_flag_openmp
)
3857 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3859 OMP_PARALLEL_COMBINED (stmt
) = 1;
3861 gfc_add_expr_to_block (&block
, stmt
);
3862 return gfc_finish_block (&block
);
3866 gfc_trans_omp_parallel_sections (gfc_code
*code
)
3869 gfc_omp_clauses section_clauses
;
3870 tree stmt
, omp_clauses
;
3872 memset (§ion_clauses
, 0, sizeof (section_clauses
));
3873 section_clauses
.nowait
= true;
3875 gfc_start_block (&block
);
3876 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3879 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
3880 if (TREE_CODE (stmt
) != BIND_EXPR
)
3881 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3884 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3886 OMP_PARALLEL_COMBINED (stmt
) = 1;
3887 gfc_add_expr_to_block (&block
, stmt
);
3888 return gfc_finish_block (&block
);
3892 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
3895 gfc_omp_clauses workshare_clauses
;
3896 tree stmt
, omp_clauses
;
3898 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
3899 workshare_clauses
.nowait
= true;
3901 gfc_start_block (&block
);
3902 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3905 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
3906 if (TREE_CODE (stmt
) != BIND_EXPR
)
3907 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3910 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3912 OMP_PARALLEL_COMBINED (stmt
) = 1;
3913 gfc_add_expr_to_block (&block
, stmt
);
3914 return gfc_finish_block (&block
);
3918 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3920 stmtblock_t block
, body
;
3921 tree omp_clauses
, stmt
;
3922 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
3924 gfc_start_block (&block
);
3926 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
3928 gfc_init_block (&body
);
3929 for (code
= code
->block
; code
; code
= code
->block
)
3931 /* Last section is special because of lastprivate, so even if it
3932 is empty, chain it in. */
3933 stmt
= gfc_trans_omp_code (code
->next
,
3934 has_lastprivate
&& code
->block
== NULL
);
3935 if (! IS_EMPTY_STMT (stmt
))
3937 stmt
= build1_v (OMP_SECTION
, stmt
);
3938 gfc_add_expr_to_block (&body
, stmt
);
3941 stmt
= gfc_finish_block (&body
);
3943 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
3945 gfc_add_expr_to_block (&block
, stmt
);
3947 return gfc_finish_block (&block
);
3951 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3953 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
3954 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3955 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
3961 gfc_trans_omp_task (gfc_code
*code
)
3964 tree stmt
, omp_clauses
;
3966 gfc_start_block (&block
);
3967 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3969 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3970 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
3972 gfc_add_expr_to_block (&block
, stmt
);
3973 return gfc_finish_block (&block
);
3977 gfc_trans_omp_taskgroup (gfc_code
*code
)
3979 tree stmt
= gfc_trans_code (code
->block
->next
);
3980 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
3984 gfc_trans_omp_taskwait (void)
3986 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
3987 return build_call_expr_loc (input_location
, decl
, 0);
3991 gfc_trans_omp_taskyield (void)
3993 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
3994 return build_call_expr_loc (input_location
, decl
, 0);
3998 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4001 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4002 tree stmt
, omp_clauses
= NULL_TREE
;
4004 gfc_start_block (&block
);
4005 if (clausesa
== NULL
)
4007 clausesa
= clausesa_buf
;
4008 gfc_split_omp_clauses (code
, clausesa
);
4010 if (gfc_option
.gfc_flag_openmp
)
4012 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4016 case EXEC_OMP_DISTRIBUTE
:
4017 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4018 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4019 /* This is handled in gfc_trans_omp_do. */
4022 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4023 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4024 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4025 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4026 if (TREE_CODE (stmt
) != BIND_EXPR
)
4027 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4031 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4032 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4033 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4034 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4035 if (TREE_CODE (stmt
) != BIND_EXPR
)
4036 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4040 case EXEC_OMP_DISTRIBUTE_SIMD
:
4041 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4042 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4043 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4044 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4045 if (TREE_CODE (stmt
) != BIND_EXPR
)
4046 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4053 if (gfc_option
.gfc_flag_openmp
)
4055 tree distribute
= make_node (OMP_DISTRIBUTE
);
4056 TREE_TYPE (distribute
) = void_type_node
;
4057 OMP_FOR_BODY (distribute
) = stmt
;
4058 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4061 gfc_add_expr_to_block (&block
, stmt
);
4062 return gfc_finish_block (&block
);
4066 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4069 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4070 tree stmt
, omp_clauses
= NULL_TREE
;
4072 gfc_start_block (&block
);
4073 if (clausesa
== NULL
)
4075 clausesa
= clausesa_buf
;
4076 gfc_split_omp_clauses (code
, clausesa
);
4078 if (gfc_option
.gfc_flag_openmp
)
4080 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4084 case EXEC_OMP_TARGET_TEAMS
:
4085 case EXEC_OMP_TEAMS
:
4086 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4088 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4089 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4090 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4091 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4095 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4098 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4100 gfc_add_expr_to_block (&block
, stmt
);
4101 return gfc_finish_block (&block
);
4105 gfc_trans_omp_target (gfc_code
*code
)
4108 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4109 tree stmt
, omp_clauses
= NULL_TREE
;
4111 gfc_start_block (&block
);
4112 gfc_split_omp_clauses (code
, clausesa
);
4113 if (gfc_option
.gfc_flag_openmp
)
4115 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4117 if (code
->op
== EXEC_OMP_TARGET
)
4118 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4120 stmt
= gfc_trans_omp_teams (code
, clausesa
);
4121 if (TREE_CODE (stmt
) != BIND_EXPR
)
4122 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4123 if (gfc_option
.gfc_flag_openmp
)
4124 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4126 gfc_add_expr_to_block (&block
, stmt
);
4127 return gfc_finish_block (&block
);
4131 gfc_trans_omp_target_data (gfc_code
*code
)
4134 tree stmt
, omp_clauses
;
4136 gfc_start_block (&block
);
4137 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4139 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4140 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4142 gfc_add_expr_to_block (&block
, stmt
);
4143 return gfc_finish_block (&block
);
4147 gfc_trans_omp_target_update (gfc_code
*code
)
4150 tree stmt
, omp_clauses
;
4152 gfc_start_block (&block
);
4153 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4155 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4157 gfc_add_expr_to_block (&block
, stmt
);
4158 return gfc_finish_block (&block
);
4162 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4164 tree res
, tmp
, stmt
;
4165 stmtblock_t block
, *pblock
= NULL
;
4166 stmtblock_t singleblock
;
4167 int saved_ompws_flags
;
4168 bool singleblock_in_progress
= false;
4169 /* True if previous gfc_code in workshare construct is not workshared. */
4170 bool prev_singleunit
;
4172 code
= code
->block
->next
;
4176 gfc_start_block (&block
);
4179 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4180 prev_singleunit
= false;
4182 /* Translate statements one by one to trees until we reach
4183 the end of the workshare construct. Adjacent gfc_codes that
4184 are a single unit of work are clustered and encapsulated in a
4185 single OMP_SINGLE construct. */
4186 for (; code
; code
= code
->next
)
4188 if (code
->here
!= 0)
4190 res
= gfc_trans_label_here (code
);
4191 gfc_add_expr_to_block (pblock
, res
);
4194 /* No dependence analysis, use for clauses with wait.
4195 If this is the last gfc_code, use default omp_clauses. */
4196 if (code
->next
== NULL
&& clauses
->nowait
)
4197 ompws_flags
|= OMPWS_NOWAIT
;
4199 /* By default, every gfc_code is a single unit of work. */
4200 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4201 ompws_flags
&= ~OMPWS_SCALARIZER_WS
;
4210 res
= gfc_trans_assign (code
);
4213 case EXEC_POINTER_ASSIGN
:
4214 res
= gfc_trans_pointer_assign (code
);
4217 case EXEC_INIT_ASSIGN
:
4218 res
= gfc_trans_init_assign (code
);
4222 res
= gfc_trans_forall (code
);
4226 res
= gfc_trans_where (code
);
4229 case EXEC_OMP_ATOMIC
:
4230 res
= gfc_trans_omp_directive (code
);
4233 case EXEC_OMP_PARALLEL
:
4234 case EXEC_OMP_PARALLEL_DO
:
4235 case EXEC_OMP_PARALLEL_SECTIONS
:
4236 case EXEC_OMP_PARALLEL_WORKSHARE
:
4237 case EXEC_OMP_CRITICAL
:
4238 saved_ompws_flags
= ompws_flags
;
4240 res
= gfc_trans_omp_directive (code
);
4241 ompws_flags
= saved_ompws_flags
;
4245 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4248 gfc_set_backend_locus (&code
->loc
);
4250 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4252 if (prev_singleunit
)
4254 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4255 /* Add current gfc_code to single block. */
4256 gfc_add_expr_to_block (&singleblock
, res
);
4259 /* Finish single block and add it to pblock. */
4260 tmp
= gfc_finish_block (&singleblock
);
4261 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4262 void_type_node
, tmp
, NULL_TREE
);
4263 gfc_add_expr_to_block (pblock
, tmp
);
4264 /* Add current gfc_code to pblock. */
4265 gfc_add_expr_to_block (pblock
, res
);
4266 singleblock_in_progress
= false;
4271 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4273 /* Start single block. */
4274 gfc_init_block (&singleblock
);
4275 gfc_add_expr_to_block (&singleblock
, res
);
4276 singleblock_in_progress
= true;
4279 /* Add the new statement to the block. */
4280 gfc_add_expr_to_block (pblock
, res
);
4282 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
4286 /* Finish remaining SINGLE block, if we were in the middle of one. */
4287 if (singleblock_in_progress
)
4289 /* Finish single block and add it to pblock. */
4290 tmp
= gfc_finish_block (&singleblock
);
4291 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
4293 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
4295 gfc_add_expr_to_block (pblock
, tmp
);
4298 stmt
= gfc_finish_block (pblock
);
4299 if (TREE_CODE (stmt
) != BIND_EXPR
)
4301 if (!IS_EMPTY_STMT (stmt
))
4303 tree bindblock
= poplevel (1, 0);
4304 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
4312 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
4313 stmt
= gfc_trans_omp_barrier ();
4320 gfc_trans_oacc_declare (stmtblock_t
*block
, gfc_namespace
*ns
)
4323 oacc_clauses
= gfc_trans_omp_clauses (block
, ns
->oacc_declare_clauses
,
4324 ns
->oacc_declare_clauses
->ext
.loc
);
4325 return build1_loc (ns
->oacc_declare_clauses
->ext
.loc
.lb
->location
,
4326 OACC_DECLARE
, void_type_node
, oacc_clauses
);
4330 gfc_trans_oacc_directive (gfc_code
*code
)
4334 case EXEC_OACC_PARALLEL_LOOP
:
4335 case EXEC_OACC_KERNELS_LOOP
:
4336 return gfc_trans_oacc_combined_directive (code
);
4337 case EXEC_OACC_PARALLEL
:
4338 case EXEC_OACC_KERNELS
:
4339 case EXEC_OACC_DATA
:
4340 case EXEC_OACC_HOST_DATA
:
4341 return gfc_trans_oacc_construct (code
);
4342 case EXEC_OACC_LOOP
:
4343 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4345 case EXEC_OACC_UPDATE
:
4346 case EXEC_OACC_WAIT
:
4347 case EXEC_OACC_CACHE
:
4348 case EXEC_OACC_ENTER_DATA
:
4349 case EXEC_OACC_EXIT_DATA
:
4350 return gfc_trans_oacc_executable_directive (code
);
4357 gfc_trans_omp_directive (gfc_code
*code
)
4361 case EXEC_OMP_ATOMIC
:
4362 return gfc_trans_omp_atomic (code
);
4363 case EXEC_OMP_BARRIER
:
4364 return gfc_trans_omp_barrier ();
4365 case EXEC_OMP_CANCEL
:
4366 return gfc_trans_omp_cancel (code
);
4367 case EXEC_OMP_CANCELLATION_POINT
:
4368 return gfc_trans_omp_cancellation_point (code
);
4369 case EXEC_OMP_CRITICAL
:
4370 return gfc_trans_omp_critical (code
);
4371 case EXEC_OMP_DISTRIBUTE
:
4374 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4376 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4377 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4378 case EXEC_OMP_DISTRIBUTE_SIMD
:
4379 return gfc_trans_omp_distribute (code
, NULL
);
4380 case EXEC_OMP_DO_SIMD
:
4381 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
4382 case EXEC_OMP_FLUSH
:
4383 return gfc_trans_omp_flush ();
4384 case EXEC_OMP_MASTER
:
4385 return gfc_trans_omp_master (code
);
4386 case EXEC_OMP_ORDERED
:
4387 return gfc_trans_omp_ordered (code
);
4388 case EXEC_OMP_PARALLEL
:
4389 return gfc_trans_omp_parallel (code
);
4390 case EXEC_OMP_PARALLEL_DO
:
4391 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
4392 case EXEC_OMP_PARALLEL_DO_SIMD
:
4393 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
4394 case EXEC_OMP_PARALLEL_SECTIONS
:
4395 return gfc_trans_omp_parallel_sections (code
);
4396 case EXEC_OMP_PARALLEL_WORKSHARE
:
4397 return gfc_trans_omp_parallel_workshare (code
);
4398 case EXEC_OMP_SECTIONS
:
4399 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
4400 case EXEC_OMP_SINGLE
:
4401 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
4402 case EXEC_OMP_TARGET
:
4403 case EXEC_OMP_TARGET_TEAMS
:
4404 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4405 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4406 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4407 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4408 return gfc_trans_omp_target (code
);
4409 case EXEC_OMP_TARGET_DATA
:
4410 return gfc_trans_omp_target_data (code
);
4411 case EXEC_OMP_TARGET_UPDATE
:
4412 return gfc_trans_omp_target_update (code
);
4414 return gfc_trans_omp_task (code
);
4415 case EXEC_OMP_TASKGROUP
:
4416 return gfc_trans_omp_taskgroup (code
);
4417 case EXEC_OMP_TASKWAIT
:
4418 return gfc_trans_omp_taskwait ();
4419 case EXEC_OMP_TASKYIELD
:
4420 return gfc_trans_omp_taskyield ();
4421 case EXEC_OMP_TEAMS
:
4422 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4423 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4424 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4425 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4426 return gfc_trans_omp_teams (code
, NULL
);
4427 case EXEC_OMP_WORKSHARE
:
4428 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
4435 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
4440 gfc_omp_declare_simd
*ods
;
4441 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
4443 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
4444 tree fndecl
= ns
->proc_name
->backend_decl
;
4446 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
4447 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
4448 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
4449 DECL_ATTRIBUTES (fndecl
) = c
;