1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "double-int.h"
36 #include "fold-const.h"
37 #include "gimple-expr.h"
38 #include "gimplify.h" /* For create_tmp_var_raw. */
39 #include "stringpool.h"
41 #include "diagnostic-core.h" /* For internal_error. */
43 #include "trans-stmt.h"
44 #include "trans-types.h"
45 #include "trans-array.h"
46 #include "trans-const.h"
49 #include "gomp-constants.h"
53 /* True if OpenMP should privatize what this DECL points to rather
54 than the DECL itself. */
57 gfc_omp_privatize_by_reference (const_tree decl
)
59 tree type
= TREE_TYPE (decl
);
61 if (TREE_CODE (type
) == REFERENCE_TYPE
62 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
65 if (TREE_CODE (type
) == POINTER_TYPE
)
67 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
68 that have POINTER_TYPE type and aren't scalar pointers, scalar
69 allocatables, Cray pointees or C pointers are supposed to be
70 privatized by reference. */
71 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
72 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
73 || GFC_DECL_CRAY_POINTEE (decl
)
74 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
77 if (!DECL_ARTIFICIAL (decl
)
78 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
81 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
83 if (DECL_LANG_SPECIFIC (decl
)
84 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
91 /* True if OpenMP sharing attribute of DECL is predetermined. */
93 enum omp_clause_default_kind
94 gfc_omp_predetermined_sharing (tree decl
)
96 /* Associate names preserve the association established during ASSOCIATE.
97 As they are implemented either as pointers to the selector or array
98 descriptor and shouldn't really change in the ASSOCIATE region,
99 this decl can be either shared or firstprivate. If it is a pointer,
100 use firstprivate, as it is cheaper that way, otherwise make it shared. */
101 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
103 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
104 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
106 return OMP_CLAUSE_DEFAULT_SHARED
;
109 if (DECL_ARTIFICIAL (decl
)
110 && ! GFC_DECL_RESULT (decl
)
111 && ! (DECL_LANG_SPECIFIC (decl
)
112 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
113 return OMP_CLAUSE_DEFAULT_SHARED
;
115 /* Cray pointees shouldn't be listed in any clauses and should be
116 gimplified to dereference of the corresponding Cray pointer.
117 Make them all private, so that they are emitted in the debug
119 if (GFC_DECL_CRAY_POINTEE (decl
))
120 return OMP_CLAUSE_DEFAULT_PRIVATE
;
122 /* Assumed-size arrays are predetermined shared. */
123 if (TREE_CODE (decl
) == PARM_DECL
124 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
125 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
126 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
127 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
129 return OMP_CLAUSE_DEFAULT_SHARED
;
131 /* Dummy procedures aren't considered variables by OpenMP, thus are
132 disallowed in OpenMP clauses. They are represented as PARM_DECLs
133 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
134 to avoid complaining about their uses with default(none). */
135 if (TREE_CODE (decl
) == PARM_DECL
136 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
137 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
138 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
140 /* COMMON and EQUIVALENCE decls are shared. They
141 are only referenced through DECL_VALUE_EXPR of the variables
142 contained in them. If those are privatized, they will not be
143 gimplified to the COMMON or EQUIVALENCE decls. */
144 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
145 return OMP_CLAUSE_DEFAULT_SHARED
;
147 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
148 return OMP_CLAUSE_DEFAULT_SHARED
;
150 /* These are either array or derived parameters, or vtables.
151 In the former cases, the OpenMP standard doesn't consider them to be
152 variables at all (they can't be redefined), but they can nevertheless appear
153 in parallel/task regions and for default(none) purposes treat them as shared.
154 For vtables likely the same handling is desirable. */
155 if (TREE_CODE (decl
) == VAR_DECL
156 && TREE_READONLY (decl
)
157 && TREE_STATIC (decl
))
158 return OMP_CLAUSE_DEFAULT_SHARED
;
160 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
163 /* Return decl that should be used when reporting DEFAULT(NONE)
167 gfc_omp_report_decl (tree decl
)
169 if (DECL_ARTIFICIAL (decl
)
170 && DECL_LANG_SPECIFIC (decl
)
171 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
172 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
177 /* Return true if TYPE has any allocatable components. */
180 gfc_has_alloc_comps (tree type
, tree decl
)
184 if (POINTER_TYPE_P (type
))
186 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
187 type
= TREE_TYPE (type
);
188 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
192 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
193 type
= gfc_get_element_type (type
);
195 if (TREE_CODE (type
) != RECORD_TYPE
)
198 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
200 ftype
= TREE_TYPE (field
);
201 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
203 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
204 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
206 if (gfc_has_alloc_comps (ftype
, field
))
212 /* Return true if DECL in private clause needs
213 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
215 gfc_omp_private_outer_ref (tree decl
)
217 tree type
= TREE_TYPE (decl
);
219 if (GFC_DESCRIPTOR_TYPE_P (type
)
220 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
223 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
226 if (gfc_omp_privatize_by_reference (decl
))
227 type
= TREE_TYPE (type
);
229 if (gfc_has_alloc_comps (type
, decl
))
235 /* Callback for gfc_omp_unshare_expr. */
238 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
241 enum tree_code code
= TREE_CODE (t
);
243 /* Stop at types, decls, constants like copy_tree_r. */
244 if (TREE_CODE_CLASS (code
) == tcc_type
245 || TREE_CODE_CLASS (code
) == tcc_declaration
246 || TREE_CODE_CLASS (code
) == tcc_constant
249 else if (handled_component_p (t
)
250 || TREE_CODE (t
) == MEM_REF
)
252 *tp
= unshare_expr (t
);
259 /* Unshare in expr anything that the FE which normally doesn't
260 care much about tree sharing (because during gimplification
261 everything is unshared) could cause problems with tree sharing
262 at omp-low.c time. */
265 gfc_omp_unshare_expr (tree expr
)
267 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
271 enum walk_alloc_comps
273 WALK_ALLOC_COMPS_DTOR
,
274 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
275 WALK_ALLOC_COMPS_COPY_CTOR
278 /* Handle allocatable components in OpenMP clauses. */
281 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
282 enum walk_alloc_comps kind
)
284 stmtblock_t block
, tmpblock
;
285 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
286 gfc_init_block (&block
);
288 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
290 if (GFC_DESCRIPTOR_TYPE_P (type
))
292 gfc_init_block (&tmpblock
);
293 tem
= gfc_full_array_size (&tmpblock
, decl
,
294 GFC_TYPE_ARRAY_RANK (type
));
295 then_b
= gfc_finish_block (&tmpblock
);
296 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
297 tem
= gfc_omp_unshare_expr (tem
);
298 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
299 gfc_array_index_type
, tem
,
304 if (!TYPE_DOMAIN (type
)
305 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
306 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
307 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
309 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
310 TYPE_SIZE_UNIT (type
),
311 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
312 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
315 tem
= array_type_nelts (type
);
316 tem
= fold_convert (gfc_array_index_type
, tem
);
319 tree nelems
= gfc_evaluate_now (tem
, &block
);
320 tree index
= gfc_create_var (gfc_array_index_type
, "S");
322 gfc_init_block (&tmpblock
);
323 tem
= gfc_conv_array_data (decl
);
324 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
325 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
326 tree destvar
, destvref
= NULL_TREE
;
329 tem
= gfc_conv_array_data (dest
);
330 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
331 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
333 gfc_add_expr_to_block (&tmpblock
,
334 gfc_walk_alloc_comps (declvref
, destvref
,
338 gfc_init_loopinfo (&loop
);
340 loop
.from
[0] = gfc_index_zero_node
;
341 loop
.loopvar
[0] = index
;
343 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
344 gfc_add_block_to_block (&block
, &loop
.pre
);
345 return gfc_finish_block (&block
);
347 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
349 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
351 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
352 type
= TREE_TYPE (decl
);
355 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
356 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
358 tree ftype
= TREE_TYPE (field
);
359 tree declf
, destf
= NULL_TREE
;
360 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
361 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
362 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
363 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
366 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
367 decl
, field
, NULL_TREE
);
369 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
370 dest
, field
, NULL_TREE
);
375 case WALK_ALLOC_COMPS_DTOR
:
377 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
378 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
379 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
381 gfc_add_modify (&block
, unshare_expr (destf
),
382 unshare_expr (declf
));
383 tem
= gfc_duplicate_allocatable_nocopy
384 (destf
, declf
, ftype
,
385 GFC_TYPE_ARRAY_RANK (ftype
));
387 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
388 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
390 case WALK_ALLOC_COMPS_COPY_CTOR
:
391 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
392 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
393 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
394 GFC_TYPE_ARRAY_RANK (ftype
));
395 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
396 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0);
400 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
403 gfc_init_block (&tmpblock
);
404 gfc_add_expr_to_block (&tmpblock
,
405 gfc_walk_alloc_comps (declf
, destf
,
407 then_b
= gfc_finish_block (&tmpblock
);
408 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
409 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
410 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
411 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
412 tem
= unshare_expr (declf
);
417 tem
= fold_convert (pvoid_type_node
, tem
);
418 tem
= fold_build2_loc (input_location
, NE_EXPR
,
419 boolean_type_node
, tem
,
421 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
423 build_empty_stmt (input_location
));
425 gfc_add_expr_to_block (&block
, then_b
);
427 if (kind
== WALK_ALLOC_COMPS_DTOR
)
429 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
430 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
432 tem
= gfc_trans_dealloc_allocated (unshare_expr (declf
),
434 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
438 tem
= gfc_call_free (unshare_expr (declf
));
439 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
444 return gfc_finish_block (&block
);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
451 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
453 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
454 stmtblock_t block
, cond_block
;
456 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
462 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
465 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
468 gfc_start_block (&block
);
469 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
470 OMP_CLAUSE_DECL (clause
),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
472 gfc_add_expr_to_block (&block
, tem
);
473 return gfc_finish_block (&block
);
478 gcc_assert (outer
!= NULL_TREE
);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block
);
485 gfc_init_block (&cond_block
);
487 if (GFC_DESCRIPTOR_TYPE_P (type
))
489 gfc_add_modify (&cond_block
, decl
, outer
);
490 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
491 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
492 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
494 gfc_conv_descriptor_lbound_get (decl
, rank
));
495 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
496 size
, gfc_index_one_node
);
497 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
498 size
= fold_build2_loc (input_location
, MULT_EXPR
,
499 gfc_array_index_type
, size
,
500 gfc_conv_descriptor_stride_get (decl
, rank
));
501 tree esize
= fold_convert (gfc_array_index_type
,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
503 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
505 size
= unshare_expr (size
);
506 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
510 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
511 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
512 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
513 if (GFC_DESCRIPTOR_TYPE_P (type
))
514 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
516 gfc_add_modify (&cond_block
, unshare_expr (decl
),
517 fold_convert (TREE_TYPE (decl
), ptr
));
518 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
520 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
521 OMP_CLAUSE_DECL (clause
),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
523 gfc_add_expr_to_block (&cond_block
, tem
);
525 then_b
= gfc_finish_block (&cond_block
);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
530 gfc_init_block (&cond_block
);
531 if (GFC_DESCRIPTOR_TYPE_P (type
))
532 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
535 gfc_add_modify (&cond_block
, unshare_expr (decl
),
536 build_zero_cst (TREE_TYPE (decl
)));
537 else_b
= gfc_finish_block (&cond_block
);
539 tree tem
= fold_convert (pvoid_type_node
,
540 GFC_DESCRIPTOR_TYPE_P (type
)
541 ? gfc_conv_descriptor_data_get (outer
) : outer
);
542 tem
= unshare_expr (tem
);
543 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
544 tem
, null_pointer_node
);
545 gfc_add_expr_to_block (&block
,
546 build3_loc (input_location
, COND_EXPR
,
547 void_type_node
, cond
, then_b
,
551 gfc_add_expr_to_block (&block
, then_b
);
553 return gfc_finish_block (&block
);
556 /* Build and return code for a copy constructor from SRC to DEST. */
559 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
561 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
562 tree cond
, then_b
, else_b
;
563 stmtblock_t block
, cond_block
;
565 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
569 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
572 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
574 gfc_start_block (&block
);
575 gfc_add_modify (&block
, dest
, src
);
576 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
577 WALK_ALLOC_COMPS_COPY_CTOR
);
578 gfc_add_expr_to_block (&block
, tem
);
579 return gfc_finish_block (&block
);
582 return build2_v (MODIFY_EXPR
, dest
, src
);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block
);
589 gfc_init_block (&cond_block
);
591 gfc_add_modify (&cond_block
, dest
, src
);
592 if (GFC_DESCRIPTOR_TYPE_P (type
))
594 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
595 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
596 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
598 gfc_conv_descriptor_lbound_get (dest
, rank
));
599 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
600 size
, gfc_index_one_node
);
601 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
602 size
= fold_build2_loc (input_location
, MULT_EXPR
,
603 gfc_array_index_type
, size
,
604 gfc_conv_descriptor_stride_get (dest
, rank
));
605 tree esize
= fold_convert (gfc_array_index_type
,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
607 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
609 size
= unshare_expr (size
);
610 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
614 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
615 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
616 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
617 if (GFC_DESCRIPTOR_TYPE_P (type
))
618 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
620 gfc_add_modify (&cond_block
, unshare_expr (dest
),
621 fold_convert (TREE_TYPE (dest
), ptr
));
623 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
624 ? gfc_conv_descriptor_data_get (src
) : src
;
625 srcptr
= unshare_expr (srcptr
);
626 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
627 call
= build_call_expr_loc (input_location
,
628 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
630 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
631 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
633 tree tem
= gfc_walk_alloc_comps (src
, dest
,
634 OMP_CLAUSE_DECL (clause
),
635 WALK_ALLOC_COMPS_COPY_CTOR
);
636 gfc_add_expr_to_block (&cond_block
, tem
);
638 then_b
= gfc_finish_block (&cond_block
);
640 gfc_init_block (&cond_block
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
645 gfc_add_modify (&cond_block
, unshare_expr (dest
),
646 build_zero_cst (TREE_TYPE (dest
)));
647 else_b
= gfc_finish_block (&cond_block
);
649 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
650 unshare_expr (srcptr
), null_pointer_node
);
651 gfc_add_expr_to_block (&block
,
652 build3_loc (input_location
, COND_EXPR
,
653 void_type_node
, cond
, then_b
, else_b
));
655 return gfc_finish_block (&block
);
658 /* Similarly, except use an intrinsic or pointer assignment operator
662 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
664 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
665 tree cond
, then_b
, else_b
;
666 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
669 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
672 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
674 gfc_start_block (&block
);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
677 OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_DTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block
, dest
, src
);
682 /* Finally allocate any allocatable components and copy. */
683 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
684 WALK_ALLOC_COMPS_COPY_CTOR
);
685 gfc_add_expr_to_block (&block
, tem
);
686 return gfc_finish_block (&block
);
689 return build2_v (MODIFY_EXPR
, dest
, src
);
692 gfc_start_block (&block
);
694 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
696 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
697 WALK_ALLOC_COMPS_DTOR
);
698 tree tem
= fold_convert (pvoid_type_node
,
699 GFC_DESCRIPTOR_TYPE_P (type
)
700 ? gfc_conv_descriptor_data_get (dest
) : dest
);
701 tem
= unshare_expr (tem
);
702 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
703 tem
, null_pointer_node
);
704 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
705 then_b
, build_empty_stmt (input_location
));
706 gfc_add_expr_to_block (&block
, tem
);
709 gfc_init_block (&cond_block
);
711 if (GFC_DESCRIPTOR_TYPE_P (type
))
713 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
714 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
715 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
717 gfc_conv_descriptor_lbound_get (src
, rank
));
718 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
719 size
, gfc_index_one_node
);
720 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
721 size
= fold_build2_loc (input_location
, MULT_EXPR
,
722 gfc_array_index_type
, size
,
723 gfc_conv_descriptor_stride_get (src
, rank
));
724 tree esize
= fold_convert (gfc_array_index_type
,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
726 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
728 size
= unshare_expr (size
);
729 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
733 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
734 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
736 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
737 ? gfc_conv_descriptor_data_get (dest
) : dest
;
738 destptr
= unshare_expr (destptr
);
739 destptr
= fold_convert (pvoid_type_node
, destptr
);
740 gfc_add_modify (&cond_block
, ptr
, destptr
);
742 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
743 destptr
, null_pointer_node
);
745 if (GFC_DESCRIPTOR_TYPE_P (type
))
748 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
750 tree rank
= gfc_rank_cst
[i
];
751 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
752 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
753 gfc_array_index_type
, tem
,
754 gfc_conv_descriptor_lbound_get (src
, rank
));
755 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
756 gfc_array_index_type
, tem
,
757 gfc_conv_descriptor_lbound_get (dest
, rank
));
758 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
759 tem
, gfc_conv_descriptor_ubound_get (dest
,
761 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
762 boolean_type_node
, cond
, tem
);
766 gfc_init_block (&cond_block2
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
770 gfc_init_block (&inner_block
);
771 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
772 then_b
= gfc_finish_block (&inner_block
);
774 gfc_init_block (&inner_block
);
775 gfc_add_modify (&inner_block
, ptr
,
776 gfc_call_realloc (&inner_block
, ptr
, size
));
777 else_b
= gfc_finish_block (&inner_block
);
779 gfc_add_expr_to_block (&cond_block2
,
780 build3_loc (input_location
, COND_EXPR
,
782 unshare_expr (nonalloc
),
784 gfc_add_modify (&cond_block2
, dest
, src
);
785 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
789 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
790 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
791 fold_convert (type
, ptr
));
793 then_b
= gfc_finish_block (&cond_block2
);
794 else_b
= build_empty_stmt (input_location
);
796 gfc_add_expr_to_block (&cond_block
,
797 build3_loc (input_location
, COND_EXPR
,
798 void_type_node
, unshare_expr (cond
),
801 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
802 ? gfc_conv_descriptor_data_get (src
) : src
;
803 srcptr
= unshare_expr (srcptr
);
804 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
805 call
= build_call_expr_loc (input_location
,
806 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
808 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
809 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
811 tree tem
= gfc_walk_alloc_comps (src
, dest
,
812 OMP_CLAUSE_DECL (clause
),
813 WALK_ALLOC_COMPS_COPY_CTOR
);
814 gfc_add_expr_to_block (&cond_block
, tem
);
816 then_b
= gfc_finish_block (&cond_block
);
818 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
820 gfc_init_block (&cond_block
);
821 if (GFC_DESCRIPTOR_TYPE_P (type
))
822 gfc_add_expr_to_block (&cond_block
,
823 gfc_trans_dealloc_allocated (unshare_expr (dest
),
827 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
828 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
829 gfc_add_modify (&cond_block
, unshare_expr (dest
),
830 build_zero_cst (TREE_TYPE (dest
)));
832 else_b
= gfc_finish_block (&cond_block
);
834 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
835 unshare_expr (srcptr
), null_pointer_node
);
836 gfc_add_expr_to_block (&block
,
837 build3_loc (input_location
, COND_EXPR
,
838 void_type_node
, cond
,
842 gfc_add_expr_to_block (&block
, then_b
);
844 return gfc_finish_block (&block
);
848 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
849 tree add
, tree nelems
)
851 stmtblock_t tmpblock
;
852 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
853 nelems
= gfc_evaluate_now (nelems
, block
);
855 gfc_init_block (&tmpblock
);
856 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
858 desta
= gfc_build_array_ref (dest
, index
, NULL
);
859 srca
= gfc_build_array_ref (src
, index
, NULL
);
863 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
864 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
865 fold_convert (sizetype
, index
),
866 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
867 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
868 TREE_TYPE (dest
), dest
,
870 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
871 TREE_TYPE (src
), src
,
874 gfc_add_modify (&tmpblock
, desta
,
875 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
879 gfc_init_loopinfo (&loop
);
881 loop
.from
[0] = gfc_index_zero_node
;
882 loop
.loopvar
[0] = index
;
884 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
885 gfc_add_block_to_block (block
, &loop
.pre
);
888 /* Build and return code for a constructor of DEST that initializes
889 it to SRC plus ADD (ADD is scalar integer). */
892 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
894 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
897 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
899 gfc_start_block (&block
);
900 add
= gfc_evaluate_now (add
, &block
);
902 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
903 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
904 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
906 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
907 if (!TYPE_DOMAIN (type
)
908 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
909 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
910 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
912 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
913 TYPE_SIZE_UNIT (type
),
914 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
915 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
918 nelems
= array_type_nelts (type
);
919 nelems
= fold_convert (gfc_array_index_type
, nelems
);
921 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
922 return gfc_finish_block (&block
);
925 /* Allocatable arrays in LINEAR clauses need to be allocated
926 and copied from SRC. */
927 gfc_add_modify (&block
, dest
, src
);
928 if (GFC_DESCRIPTOR_TYPE_P (type
))
930 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
931 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
932 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
934 gfc_conv_descriptor_lbound_get (dest
, rank
));
935 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
936 size
, gfc_index_one_node
);
937 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
938 size
= fold_build2_loc (input_location
, MULT_EXPR
,
939 gfc_array_index_type
, size
,
940 gfc_conv_descriptor_stride_get (dest
, rank
));
941 tree esize
= fold_convert (gfc_array_index_type
,
942 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
943 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
944 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
945 nelems
, unshare_expr (esize
));
946 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
948 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
949 gfc_array_index_type
, nelems
,
953 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
954 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
955 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
956 if (GFC_DESCRIPTOR_TYPE_P (type
))
958 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
959 tree etype
= gfc_get_element_type (type
);
960 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
961 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
962 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
963 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
967 gfc_add_modify (&block
, unshare_expr (dest
),
968 fold_convert (TREE_TYPE (dest
), ptr
));
969 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
970 tree dstm
= build_fold_indirect_ref (ptr
);
971 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
972 gfc_add_modify (&block
, dstm
,
973 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
975 return gfc_finish_block (&block
);
978 /* Build and return code destructing DECL. Return NULL if nothing
982 gfc_omp_clause_dtor (tree clause
, tree decl
)
984 tree type
= TREE_TYPE (decl
), tem
;
986 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
987 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
988 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
990 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
991 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
992 OMP_CLAUSE_DECL (clause
),
993 WALK_ALLOC_COMPS_DTOR
);
997 if (GFC_DESCRIPTOR_TYPE_P (type
))
998 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
999 to be deallocated if they were allocated. */
1000 tem
= gfc_trans_dealloc_allocated (decl
, false, NULL
);
1002 tem
= gfc_call_free (decl
);
1003 tem
= gfc_omp_unshare_expr (tem
);
1005 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1010 gfc_init_block (&block
);
1011 gfc_add_expr_to_block (&block
,
1012 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1013 OMP_CLAUSE_DECL (clause
),
1014 WALK_ALLOC_COMPS_DTOR
));
1015 gfc_add_expr_to_block (&block
, tem
);
1016 then_b
= gfc_finish_block (&block
);
1018 tem
= fold_convert (pvoid_type_node
,
1019 GFC_DESCRIPTOR_TYPE_P (type
)
1020 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1021 tem
= unshare_expr (tem
);
1022 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1023 tem
, null_pointer_node
);
1024 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1025 then_b
, build_empty_stmt (input_location
));
1032 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1034 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1037 tree decl
= OMP_CLAUSE_DECL (c
);
1038 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1039 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1041 if (!gfc_omp_privatize_by_reference (decl
)
1042 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1043 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1044 && !GFC_DECL_CRAY_POINTEE (decl
)
1045 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1047 tree orig_decl
= decl
;
1048 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1049 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1050 OMP_CLAUSE_DECL (c4
) = decl
;
1051 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1052 decl
= build_fold_indirect_ref (decl
);
1053 OMP_CLAUSE_DECL (c
) = decl
;
1054 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1055 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1056 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1057 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1059 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1060 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1061 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1062 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1063 decl
= build_fold_indirect_ref (decl
);
1064 OMP_CLAUSE_DECL (c
) = decl
;
1067 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1070 gfc_start_block (&block
);
1071 tree type
= TREE_TYPE (decl
);
1072 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1073 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1074 ptr
= build_fold_indirect_ref (ptr
);
1075 OMP_CLAUSE_DECL (c
) = ptr
;
1076 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1077 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1078 OMP_CLAUSE_DECL (c2
) = decl
;
1079 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1080 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1081 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1082 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1083 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1084 tree size
= create_tmp_var (gfc_array_index_type
);
1085 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1086 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1087 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1088 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1090 stmtblock_t cond_block
;
1091 tree tem
, then_b
, else_b
, zero
, cond
;
1093 gfc_init_block (&cond_block
);
1094 tem
= gfc_full_array_size (&cond_block
, decl
,
1095 GFC_TYPE_ARRAY_RANK (type
));
1096 gfc_add_modify (&cond_block
, size
, tem
);
1097 gfc_add_modify (&cond_block
, size
,
1098 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1100 then_b
= gfc_finish_block (&cond_block
);
1101 gfc_init_block (&cond_block
);
1102 zero
= build_int_cst (gfc_array_index_type
, 0);
1103 gfc_add_modify (&cond_block
, size
, zero
);
1104 else_b
= gfc_finish_block (&cond_block
);
1105 tem
= gfc_conv_descriptor_data_get (decl
);
1106 tem
= fold_convert (pvoid_type_node
, tem
);
1107 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1108 boolean_type_node
, tem
, null_pointer_node
);
1109 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1110 void_type_node
, cond
,
1115 gfc_add_modify (&block
, size
,
1116 gfc_full_array_size (&block
, decl
,
1117 GFC_TYPE_ARRAY_RANK (type
)));
1118 gfc_add_modify (&block
, size
,
1119 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1122 OMP_CLAUSE_SIZE (c
) = size
;
1123 tree stmt
= gfc_finish_block (&block
);
1124 gimplify_and_add (stmt
, pre_p
);
1127 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1129 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1130 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1133 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1134 OMP_CLAUSE_CHAIN (last
) = c2
;
1139 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1140 OMP_CLAUSE_CHAIN (last
) = c3
;
1145 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1146 OMP_CLAUSE_CHAIN (last
) = c4
;
1152 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1153 disregarded in OpenMP construct, because it is going to be
1154 remapped during OpenMP lowering. SHARED is true if DECL
1155 is going to be shared, false if it is going to be privatized. */
1158 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1160 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1161 && DECL_HAS_VALUE_EXPR_P (decl
))
1163 tree value
= DECL_VALUE_EXPR (decl
);
1165 if (TREE_CODE (value
) == COMPONENT_REF
1166 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1167 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1169 /* If variable in COMMON or EQUIVALENCE is privatized, return
1170 true, as just that variable is supposed to be privatized,
1171 not the whole COMMON or whole EQUIVALENCE.
1172 For shared variables in COMMON or EQUIVALENCE, let them be
1173 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1174 from the same COMMON or EQUIVALENCE just one sharing of the
1175 whole COMMON or EQUIVALENCE is enough. */
1180 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1186 /* Return true if DECL that is shared iff SHARED is true should
1187 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1191 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1193 if (GFC_DECL_CRAY_POINTEE (decl
))
1196 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1197 && DECL_HAS_VALUE_EXPR_P (decl
))
1199 tree value
= DECL_VALUE_EXPR (decl
);
1201 if (TREE_CODE (value
) == COMPONENT_REF
1202 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1203 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1210 /* Register language specific type size variables as potentially OpenMP
1211 firstprivate variables. */
1214 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1216 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1220 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1221 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1223 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1224 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1225 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1227 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1228 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1234 gfc_trans_add_clause (tree node
, tree tail
)
1236 OMP_CLAUSE_CHAIN (node
) = tail
;
1241 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1246 gfc_symbol
*proc_sym
;
1247 gfc_formal_arglist
*f
;
1249 gcc_assert (sym
->attr
.dummy
);
1250 proc_sym
= sym
->ns
->proc_name
;
1251 if (proc_sym
->attr
.entry_master
)
1253 if (gfc_return_by_reference (proc_sym
))
1256 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1259 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1265 return build_int_cst (integer_type_node
, cnt
);
1268 tree t
= gfc_get_symbol_decl (sym
);
1272 bool alternate_entry
;
1275 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1276 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1277 && sym
->result
== sym
;
1278 entry_master
= sym
->attr
.result
1279 && sym
->ns
->proc_name
->attr
.entry_master
1280 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1281 parent_decl
= current_function_decl
1282 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1284 if ((t
== parent_decl
&& return_value
)
1285 || (sym
->ns
&& sym
->ns
->proc_name
1286 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1287 && (alternate_entry
|| entry_master
)))
1292 /* Special case for assigning the return value of a function.
1293 Self recursive functions must have an explicit return value. */
1294 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1295 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1297 /* Similarly for alternate entry points. */
1298 else if (alternate_entry
1299 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1302 gfc_entry_list
*el
= NULL
;
1304 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1307 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1312 else if (entry_master
1313 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1315 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1321 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1322 gfc_omp_namelist
*namelist
, tree list
,
1325 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1326 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1328 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1329 if (t
!= error_mark_node
)
1331 tree node
= build_omp_clause (input_location
, code
);
1332 OMP_CLAUSE_DECL (node
) = t
;
1333 list
= gfc_trans_add_clause (node
, list
);
1339 struct omp_udr_find_orig_data
1341 gfc_omp_udr
*omp_udr
;
1346 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1349 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1350 if ((*e
)->expr_type
== EXPR_VARIABLE
1351 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1352 cd
->omp_orig_seen
= true;
1358 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1360 gfc_symbol
*sym
= n
->sym
;
1361 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1362 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1363 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1364 gfc_symbol omp_var_copy
[4];
1365 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1367 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1368 locus old_loc
= gfc_current_locus
;
1371 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1373 decl
= OMP_CLAUSE_DECL (c
);
1374 gfc_current_locus
= where
;
1375 type
= TREE_TYPE (decl
);
1376 outer_decl
= create_tmp_var_raw (type
);
1377 if (TREE_CODE (decl
) == PARM_DECL
1378 && TREE_CODE (type
) == REFERENCE_TYPE
1379 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1380 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1382 decl
= build_fold_indirect_ref (decl
);
1383 type
= TREE_TYPE (type
);
1386 /* Create a fake symbol for init value. */
1387 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1388 init_val_sym
.ns
= sym
->ns
;
1389 init_val_sym
.name
= sym
->name
;
1390 init_val_sym
.ts
= sym
->ts
;
1391 init_val_sym
.attr
.referenced
= 1;
1392 init_val_sym
.declared_at
= where
;
1393 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1394 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1395 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1396 else if (udr
->initializer_ns
)
1397 backend_decl
= NULL
;
1399 switch (sym
->ts
.type
)
1405 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1408 backend_decl
= NULL_TREE
;
1411 init_val_sym
.backend_decl
= backend_decl
;
1413 /* Create a fake symbol for the outer array reference. */
1416 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1417 outer_sym
.attr
.dummy
= 0;
1418 outer_sym
.attr
.result
= 0;
1419 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1420 outer_sym
.backend_decl
= outer_decl
;
1421 if (decl
!= OMP_CLAUSE_DECL (c
))
1422 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1424 /* Create fake symtrees for it. */
1425 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1426 symtree1
->n
.sym
= sym
;
1427 gcc_assert (symtree1
== root1
);
1429 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1430 symtree2
->n
.sym
= &init_val_sym
;
1431 gcc_assert (symtree2
== root2
);
1433 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1434 symtree3
->n
.sym
= &outer_sym
;
1435 gcc_assert (symtree3
== root3
);
1437 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1440 omp_var_copy
[0] = *udr
->omp_out
;
1441 omp_var_copy
[1] = *udr
->omp_in
;
1442 *udr
->omp_out
= outer_sym
;
1443 *udr
->omp_in
= *sym
;
1444 if (udr
->initializer_ns
)
1446 omp_var_copy
[2] = *udr
->omp_priv
;
1447 omp_var_copy
[3] = *udr
->omp_orig
;
1448 *udr
->omp_priv
= *sym
;
1449 *udr
->omp_orig
= outer_sym
;
1453 /* Create expressions. */
1454 e1
= gfc_get_expr ();
1455 e1
->expr_type
= EXPR_VARIABLE
;
1457 e1
->symtree
= symtree1
;
1459 if (sym
->attr
.dimension
)
1461 e1
->ref
= ref
= gfc_get_ref ();
1462 ref
->type
= REF_ARRAY
;
1463 ref
->u
.ar
.where
= where
;
1464 ref
->u
.ar
.as
= sym
->as
;
1465 ref
->u
.ar
.type
= AR_FULL
;
1466 ref
->u
.ar
.dimen
= 0;
1468 t
= gfc_resolve_expr (e1
);
1472 if (backend_decl
!= NULL_TREE
)
1474 e2
= gfc_get_expr ();
1475 e2
->expr_type
= EXPR_VARIABLE
;
1477 e2
->symtree
= symtree2
;
1479 t
= gfc_resolve_expr (e2
);
1482 else if (udr
->initializer_ns
== NULL
)
1484 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1485 e2
= gfc_default_initializer (&sym
->ts
);
1487 t
= gfc_resolve_expr (e2
);
1490 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1492 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1493 t
= gfc_resolve_expr (e2
);
1496 if (udr
&& udr
->initializer_ns
)
1498 struct omp_udr_find_orig_data cd
;
1500 cd
.omp_orig_seen
= false;
1501 gfc_code_walker (&n
->udr
->initializer
,
1502 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1503 if (cd
.omp_orig_seen
)
1504 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1507 e3
= gfc_copy_expr (e1
);
1508 e3
->symtree
= symtree3
;
1509 t
= gfc_resolve_expr (e3
);
1514 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1518 e4
= gfc_add (e3
, e1
);
1521 e4
= gfc_multiply (e3
, e1
);
1523 case TRUTH_ANDIF_EXPR
:
1524 e4
= gfc_and (e3
, e1
);
1526 case TRUTH_ORIF_EXPR
:
1527 e4
= gfc_or (e3
, e1
);
1530 e4
= gfc_eqv (e3
, e1
);
1533 e4
= gfc_neqv (e3
, e1
);
1551 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1554 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1555 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1556 t
= gfc_resolve_expr (e3
);
1558 t
= gfc_resolve_expr (e4
);
1567 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1568 intrinsic_sym
.ns
= sym
->ns
;
1569 intrinsic_sym
.name
= iname
;
1570 intrinsic_sym
.ts
= sym
->ts
;
1571 intrinsic_sym
.attr
.referenced
= 1;
1572 intrinsic_sym
.attr
.intrinsic
= 1;
1573 intrinsic_sym
.attr
.function
= 1;
1574 intrinsic_sym
.result
= &intrinsic_sym
;
1575 intrinsic_sym
.declared_at
= where
;
1577 symtree4
= gfc_new_symtree (&root4
, iname
);
1578 symtree4
->n
.sym
= &intrinsic_sym
;
1579 gcc_assert (symtree4
== root4
);
1581 e4
= gfc_get_expr ();
1582 e4
->expr_type
= EXPR_FUNCTION
;
1584 e4
->symtree
= symtree4
;
1585 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1586 e4
->value
.function
.actual
->expr
= e3
;
1587 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1588 e4
->value
.function
.actual
->next
->expr
= e1
;
1590 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1592 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1593 e1
= gfc_copy_expr (e1
);
1594 e3
= gfc_copy_expr (e3
);
1595 t
= gfc_resolve_expr (e4
);
1599 /* Create the init statement list. */
1602 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1604 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1605 NULL_TREE
, NULL_TREE
, false);
1606 if (TREE_CODE (stmt
) != BIND_EXPR
)
1607 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1610 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1612 /* Create the merge statement list. */
1615 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1617 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1618 NULL_TREE
, NULL_TREE
, false);
1619 if (TREE_CODE (stmt
) != BIND_EXPR
)
1620 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1623 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1625 /* And stick the placeholder VAR_DECL into the clause as well. */
1626 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1628 gfc_current_locus
= old_loc
;
1641 gfc_free_array_spec (outer_sym
.as
);
1645 *udr
->omp_out
= omp_var_copy
[0];
1646 *udr
->omp_in
= omp_var_copy
[1];
1647 if (udr
->initializer_ns
)
1649 *udr
->omp_priv
= omp_var_copy
[2];
1650 *udr
->omp_orig
= omp_var_copy
[3];
1656 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1659 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1660 if (namelist
->sym
->attr
.referenced
)
1662 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1663 if (t
!= error_mark_node
)
1665 tree node
= build_omp_clause (where
.lb
->location
,
1666 OMP_CLAUSE_REDUCTION
);
1667 OMP_CLAUSE_DECL (node
) = t
;
1668 switch (namelist
->u
.reduction_op
)
1670 case OMP_REDUCTION_PLUS
:
1671 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1673 case OMP_REDUCTION_MINUS
:
1674 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1676 case OMP_REDUCTION_TIMES
:
1677 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1679 case OMP_REDUCTION_AND
:
1680 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1682 case OMP_REDUCTION_OR
:
1683 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1685 case OMP_REDUCTION_EQV
:
1686 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1688 case OMP_REDUCTION_NEQV
:
1689 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1691 case OMP_REDUCTION_MAX
:
1692 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1694 case OMP_REDUCTION_MIN
:
1695 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1697 case OMP_REDUCTION_IAND
:
1698 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1700 case OMP_REDUCTION_IOR
:
1701 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1703 case OMP_REDUCTION_IEOR
:
1704 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1706 case OMP_REDUCTION_USER
:
1707 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1712 if (namelist
->sym
->attr
.dimension
1713 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1714 || namelist
->sym
->attr
.allocatable
)
1715 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1716 list
= gfc_trans_add_clause (node
, list
);
1723 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1728 gfc_init_se (&se
, NULL
);
1729 gfc_conv_expr (&se
, expr
);
1730 gfc_add_block_to_block (block
, &se
.pre
);
1731 result
= gfc_evaluate_now (se
.expr
, block
);
1732 gfc_add_block_to_block (block
, &se
.post
);
1738 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1739 locus where
, bool declare_simd
= false)
1741 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1743 enum omp_clause_code clause_code
;
1746 if (clauses
== NULL
)
1749 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1751 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1757 case OMP_LIST_REDUCTION
:
1758 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
);
1760 case OMP_LIST_PRIVATE
:
1761 clause_code
= OMP_CLAUSE_PRIVATE
;
1763 case OMP_LIST_SHARED
:
1764 clause_code
= OMP_CLAUSE_SHARED
;
1766 case OMP_LIST_FIRSTPRIVATE
:
1767 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1769 case OMP_LIST_LASTPRIVATE
:
1770 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1772 case OMP_LIST_COPYIN
:
1773 clause_code
= OMP_CLAUSE_COPYIN
;
1775 case OMP_LIST_COPYPRIVATE
:
1776 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1778 case OMP_LIST_UNIFORM
:
1779 clause_code
= OMP_CLAUSE_UNIFORM
;
1781 case OMP_LIST_USE_DEVICE
:
1782 clause_code
= OMP_CLAUSE_USE_DEVICE
;
1784 case OMP_LIST_DEVICE_RESIDENT
:
1785 clause_code
= OMP_CLAUSE_DEVICE_RESIDENT
;
1787 case OMP_LIST_CACHE
:
1788 clause_code
= OMP_CLAUSE__CACHE_
;
1793 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1796 case OMP_LIST_ALIGNED
:
1797 for (; n
!= NULL
; n
= n
->next
)
1798 if (n
->sym
->attr
.referenced
|| declare_simd
)
1800 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1801 if (t
!= error_mark_node
)
1803 tree node
= build_omp_clause (input_location
,
1804 OMP_CLAUSE_ALIGNED
);
1805 OMP_CLAUSE_DECL (node
) = t
;
1811 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1814 gfc_init_se (&se
, NULL
);
1815 gfc_conv_expr (&se
, n
->expr
);
1816 gfc_add_block_to_block (block
, &se
.pre
);
1817 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1818 gfc_add_block_to_block (block
, &se
.post
);
1820 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1822 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1826 case OMP_LIST_LINEAR
:
1828 gfc_expr
*last_step_expr
= NULL
;
1829 tree last_step
= NULL_TREE
;
1831 for (; n
!= NULL
; n
= n
->next
)
1835 last_step_expr
= n
->expr
;
1836 last_step
= NULL_TREE
;
1838 if (n
->sym
->attr
.referenced
|| declare_simd
)
1840 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1841 if (t
!= error_mark_node
)
1843 tree node
= build_omp_clause (input_location
,
1845 OMP_CLAUSE_DECL (node
) = t
;
1846 if (last_step_expr
&& last_step
== NULL_TREE
)
1850 = gfc_conv_constant_to_tree (last_step_expr
);
1853 gfc_init_se (&se
, NULL
);
1854 gfc_conv_expr (&se
, last_step_expr
);
1855 gfc_add_block_to_block (block
, &se
.pre
);
1856 last_step
= gfc_evaluate_now (se
.expr
, block
);
1857 gfc_add_block_to_block (block
, &se
.post
);
1860 OMP_CLAUSE_LINEAR_STEP (node
)
1861 = fold_convert (gfc_typenode_for_spec (&n
->sym
->ts
),
1863 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1864 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1865 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1871 case OMP_LIST_DEPEND
:
1872 for (; n
!= NULL
; n
= n
->next
)
1874 if (!n
->sym
->attr
.referenced
)
1877 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
1878 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1880 tree decl
= gfc_get_symbol_decl (n
->sym
);
1881 if (gfc_omp_privatize_by_reference (decl
))
1882 decl
= build_fold_indirect_ref (decl
);
1883 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1885 decl
= gfc_conv_descriptor_data_get (decl
);
1886 decl
= fold_convert (build_pointer_type (char_type_node
),
1888 decl
= build_fold_indirect_ref (decl
);
1890 else if (DECL_P (decl
))
1891 TREE_ADDRESSABLE (decl
) = 1;
1892 OMP_CLAUSE_DECL (node
) = decl
;
1897 gfc_init_se (&se
, NULL
);
1898 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
1900 gfc_conv_expr_reference (&se
, n
->expr
);
1905 gfc_conv_expr_descriptor (&se
, n
->expr
);
1906 ptr
= gfc_conv_array_data (se
.expr
);
1908 gfc_add_block_to_block (block
, &se
.pre
);
1909 gfc_add_block_to_block (block
, &se
.post
);
1910 ptr
= fold_convert (build_pointer_type (char_type_node
),
1912 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
1914 switch (n
->u
.depend_op
)
1917 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
1919 case OMP_DEPEND_OUT
:
1920 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
1922 case OMP_DEPEND_INOUT
:
1923 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
1928 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1932 for (; n
!= NULL
; n
= n
->next
)
1934 if (!n
->sym
->attr
.referenced
)
1937 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1938 tree node2
= NULL_TREE
;
1939 tree node3
= NULL_TREE
;
1940 tree node4
= NULL_TREE
;
1941 tree decl
= gfc_get_symbol_decl (n
->sym
);
1943 TREE_ADDRESSABLE (decl
) = 1;
1944 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1946 if (POINTER_TYPE_P (TREE_TYPE (decl
))
1947 && (gfc_omp_privatize_by_reference (decl
)
1948 || GFC_DECL_GET_SCALAR_POINTER (decl
)
1949 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1950 || GFC_DECL_CRAY_POINTEE (decl
)
1951 || GFC_DESCRIPTOR_TYPE_P
1952 (TREE_TYPE (TREE_TYPE (decl
)))))
1954 tree orig_decl
= decl
;
1955 node4
= build_omp_clause (input_location
,
1957 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
1958 OMP_CLAUSE_DECL (node4
) = decl
;
1959 OMP_CLAUSE_SIZE (node4
) = size_int (0);
1960 decl
= build_fold_indirect_ref (decl
);
1961 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1962 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1963 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1965 node3
= build_omp_clause (input_location
,
1967 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1968 OMP_CLAUSE_DECL (node3
) = decl
;
1969 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1970 decl
= build_fold_indirect_ref (decl
);
1973 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1975 tree type
= TREE_TYPE (decl
);
1976 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1977 ptr
= fold_convert (build_pointer_type (char_type_node
),
1979 ptr
= build_fold_indirect_ref (ptr
);
1980 OMP_CLAUSE_DECL (node
) = ptr
;
1981 node2
= build_omp_clause (input_location
,
1983 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
1984 OMP_CLAUSE_DECL (node2
) = decl
;
1985 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
1986 node3
= build_omp_clause (input_location
,
1988 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1989 OMP_CLAUSE_DECL (node3
)
1990 = gfc_conv_descriptor_data_get (decl
);
1991 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1993 /* We have to check for n->sym->attr.dimension because
1994 of scalar coarrays. */
1995 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
1997 stmtblock_t cond_block
;
1999 = gfc_create_var (gfc_array_index_type
, NULL
);
2000 tree tem
, then_b
, else_b
, zero
, cond
;
2002 gfc_init_block (&cond_block
);
2004 = gfc_full_array_size (&cond_block
, decl
,
2005 GFC_TYPE_ARRAY_RANK (type
));
2006 gfc_add_modify (&cond_block
, size
, tem
);
2007 then_b
= gfc_finish_block (&cond_block
);
2008 gfc_init_block (&cond_block
);
2009 zero
= build_int_cst (gfc_array_index_type
, 0);
2010 gfc_add_modify (&cond_block
, size
, zero
);
2011 else_b
= gfc_finish_block (&cond_block
);
2012 tem
= gfc_conv_descriptor_data_get (decl
);
2013 tem
= fold_convert (pvoid_type_node
, tem
);
2014 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2016 tem
, null_pointer_node
);
2017 gfc_add_expr_to_block (block
,
2018 build3_loc (input_location
,
2023 OMP_CLAUSE_SIZE (node
) = size
;
2025 else if (n
->sym
->attr
.dimension
)
2026 OMP_CLAUSE_SIZE (node
)
2027 = gfc_full_array_size (block
, decl
,
2028 GFC_TYPE_ARRAY_RANK (type
));
2029 if (n
->sym
->attr
.dimension
)
2032 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2033 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2034 OMP_CLAUSE_SIZE (node
)
2035 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2036 OMP_CLAUSE_SIZE (node
), elemsz
);
2040 OMP_CLAUSE_DECL (node
) = decl
;
2045 gfc_init_se (&se
, NULL
);
2046 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2048 gfc_conv_expr_reference (&se
, n
->expr
);
2049 gfc_add_block_to_block (block
, &se
.pre
);
2051 OMP_CLAUSE_SIZE (node
)
2052 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2056 gfc_conv_expr_descriptor (&se
, n
->expr
);
2057 ptr
= gfc_conv_array_data (se
.expr
);
2058 tree type
= TREE_TYPE (se
.expr
);
2059 gfc_add_block_to_block (block
, &se
.pre
);
2060 OMP_CLAUSE_SIZE (node
)
2061 = gfc_full_array_size (block
, se
.expr
,
2062 GFC_TYPE_ARRAY_RANK (type
));
2064 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2065 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2066 OMP_CLAUSE_SIZE (node
)
2067 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2068 OMP_CLAUSE_SIZE (node
), elemsz
);
2070 gfc_add_block_to_block (block
, &se
.post
);
2071 ptr
= fold_convert (build_pointer_type (char_type_node
),
2073 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2075 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2076 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2078 node4
= build_omp_clause (input_location
,
2080 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2081 OMP_CLAUSE_DECL (node4
) = decl
;
2082 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2083 decl
= build_fold_indirect_ref (decl
);
2085 ptr
= fold_convert (sizetype
, ptr
);
2086 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2088 tree type
= TREE_TYPE (decl
);
2089 ptr2
= gfc_conv_descriptor_data_get (decl
);
2090 node2
= build_omp_clause (input_location
,
2092 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2093 OMP_CLAUSE_DECL (node2
) = decl
;
2094 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2095 node3
= build_omp_clause (input_location
,
2097 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2098 OMP_CLAUSE_DECL (node3
)
2099 = gfc_conv_descriptor_data_get (decl
);
2103 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2104 ptr2
= build_fold_addr_expr (decl
);
2107 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2110 node3
= build_omp_clause (input_location
,
2112 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2113 OMP_CLAUSE_DECL (node3
) = decl
;
2115 ptr2
= fold_convert (sizetype
, ptr2
);
2116 OMP_CLAUSE_SIZE (node3
)
2117 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2119 switch (n
->u
.map_op
)
2122 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2125 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2128 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2130 case OMP_MAP_TOFROM
:
2131 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2133 case OMP_MAP_FORCE_ALLOC
:
2134 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2136 case OMP_MAP_FORCE_DEALLOC
:
2137 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEALLOC
);
2139 case OMP_MAP_FORCE_TO
:
2140 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2142 case OMP_MAP_FORCE_FROM
:
2143 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2145 case OMP_MAP_FORCE_TOFROM
:
2146 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2148 case OMP_MAP_FORCE_PRESENT
:
2149 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2151 case OMP_MAP_FORCE_DEVICEPTR
:
2152 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2157 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2159 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2161 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2163 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2168 for (; n
!= NULL
; n
= n
->next
)
2170 if (!n
->sym
->attr
.referenced
)
2173 tree node
= build_omp_clause (input_location
,
2175 ? OMP_CLAUSE_TO
: OMP_CLAUSE_FROM
);
2176 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2178 tree decl
= gfc_get_symbol_decl (n
->sym
);
2179 if (gfc_omp_privatize_by_reference (decl
))
2180 decl
= build_fold_indirect_ref (decl
);
2181 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2183 tree type
= TREE_TYPE (decl
);
2184 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2185 ptr
= fold_convert (build_pointer_type (char_type_node
),
2187 ptr
= build_fold_indirect_ref (ptr
);
2188 OMP_CLAUSE_DECL (node
) = ptr
;
2189 OMP_CLAUSE_SIZE (node
)
2190 = gfc_full_array_size (block
, decl
,
2191 GFC_TYPE_ARRAY_RANK (type
));
2193 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2194 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2195 OMP_CLAUSE_SIZE (node
)
2196 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2197 OMP_CLAUSE_SIZE (node
), elemsz
);
2200 OMP_CLAUSE_DECL (node
) = decl
;
2205 gfc_init_se (&se
, NULL
);
2206 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2208 gfc_conv_expr_reference (&se
, n
->expr
);
2210 gfc_add_block_to_block (block
, &se
.pre
);
2211 OMP_CLAUSE_SIZE (node
)
2212 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2216 gfc_conv_expr_descriptor (&se
, n
->expr
);
2217 ptr
= gfc_conv_array_data (se
.expr
);
2218 tree type
= TREE_TYPE (se
.expr
);
2219 gfc_add_block_to_block (block
, &se
.pre
);
2220 OMP_CLAUSE_SIZE (node
)
2221 = gfc_full_array_size (block
, se
.expr
,
2222 GFC_TYPE_ARRAY_RANK (type
));
2224 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2225 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2226 OMP_CLAUSE_SIZE (node
)
2227 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2228 OMP_CLAUSE_SIZE (node
), elemsz
);
2230 gfc_add_block_to_block (block
, &se
.post
);
2231 ptr
= fold_convert (build_pointer_type (char_type_node
),
2233 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2235 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2243 if (clauses
->if_expr
)
2247 gfc_init_se (&se
, NULL
);
2248 gfc_conv_expr (&se
, clauses
->if_expr
);
2249 gfc_add_block_to_block (block
, &se
.pre
);
2250 if_var
= gfc_evaluate_now (se
.expr
, block
);
2251 gfc_add_block_to_block (block
, &se
.post
);
2253 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2254 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2255 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2258 if (clauses
->final_expr
)
2262 gfc_init_se (&se
, NULL
);
2263 gfc_conv_expr (&se
, clauses
->final_expr
);
2264 gfc_add_block_to_block (block
, &se
.pre
);
2265 final_var
= gfc_evaluate_now (se
.expr
, block
);
2266 gfc_add_block_to_block (block
, &se
.post
);
2268 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2269 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2270 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2273 if (clauses
->num_threads
)
2277 gfc_init_se (&se
, NULL
);
2278 gfc_conv_expr (&se
, clauses
->num_threads
);
2279 gfc_add_block_to_block (block
, &se
.pre
);
2280 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2281 gfc_add_block_to_block (block
, &se
.post
);
2283 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2284 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2285 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2288 chunk_size
= NULL_TREE
;
2289 if (clauses
->chunk_size
)
2291 gfc_init_se (&se
, NULL
);
2292 gfc_conv_expr (&se
, clauses
->chunk_size
);
2293 gfc_add_block_to_block (block
, &se
.pre
);
2294 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2295 gfc_add_block_to_block (block
, &se
.post
);
2298 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2300 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2301 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2302 switch (clauses
->sched_kind
)
2304 case OMP_SCHED_STATIC
:
2305 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2307 case OMP_SCHED_DYNAMIC
:
2308 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2310 case OMP_SCHED_GUIDED
:
2311 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2313 case OMP_SCHED_RUNTIME
:
2314 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2316 case OMP_SCHED_AUTO
:
2317 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2322 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2325 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2327 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2328 switch (clauses
->default_sharing
)
2330 case OMP_DEFAULT_NONE
:
2331 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2333 case OMP_DEFAULT_SHARED
:
2334 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2336 case OMP_DEFAULT_PRIVATE
:
2337 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2339 case OMP_DEFAULT_FIRSTPRIVATE
:
2340 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2345 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2348 if (clauses
->nowait
)
2350 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2351 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2354 if (clauses
->ordered
)
2356 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2357 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2360 if (clauses
->untied
)
2362 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2363 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2366 if (clauses
->mergeable
)
2368 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2369 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2372 if (clauses
->collapse
)
2374 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2375 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2376 = build_int_cst (integer_type_node
, clauses
->collapse
);
2377 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2380 if (clauses
->inbranch
)
2382 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2383 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2386 if (clauses
->notinbranch
)
2388 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2389 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2392 switch (clauses
->cancel
)
2394 case OMP_CANCEL_UNKNOWN
:
2396 case OMP_CANCEL_PARALLEL
:
2397 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2398 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2400 case OMP_CANCEL_SECTIONS
:
2401 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2402 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2405 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2406 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2408 case OMP_CANCEL_TASKGROUP
:
2409 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2410 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2414 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2416 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2417 switch (clauses
->proc_bind
)
2419 case OMP_PROC_BIND_MASTER
:
2420 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2422 case OMP_PROC_BIND_SPREAD
:
2423 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2425 case OMP_PROC_BIND_CLOSE
:
2426 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2431 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2434 if (clauses
->safelen_expr
)
2438 gfc_init_se (&se
, NULL
);
2439 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2440 gfc_add_block_to_block (block
, &se
.pre
);
2441 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2442 gfc_add_block_to_block (block
, &se
.post
);
2444 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2445 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2446 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2449 if (clauses
->simdlen_expr
)
2451 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2452 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2453 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2454 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2457 if (clauses
->num_teams
)
2461 gfc_init_se (&se
, NULL
);
2462 gfc_conv_expr (&se
, clauses
->num_teams
);
2463 gfc_add_block_to_block (block
, &se
.pre
);
2464 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2465 gfc_add_block_to_block (block
, &se
.post
);
2467 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2468 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2469 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2472 if (clauses
->device
)
2476 gfc_init_se (&se
, NULL
);
2477 gfc_conv_expr (&se
, clauses
->device
);
2478 gfc_add_block_to_block (block
, &se
.pre
);
2479 device
= gfc_evaluate_now (se
.expr
, block
);
2480 gfc_add_block_to_block (block
, &se
.post
);
2482 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2483 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2484 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2487 if (clauses
->thread_limit
)
2491 gfc_init_se (&se
, NULL
);
2492 gfc_conv_expr (&se
, clauses
->thread_limit
);
2493 gfc_add_block_to_block (block
, &se
.pre
);
2494 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2495 gfc_add_block_to_block (block
, &se
.post
);
2497 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2498 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2499 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2502 chunk_size
= NULL_TREE
;
2503 if (clauses
->dist_chunk_size
)
2505 gfc_init_se (&se
, NULL
);
2506 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2507 gfc_add_block_to_block (block
, &se
.pre
);
2508 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2509 gfc_add_block_to_block (block
, &se
.post
);
2512 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2514 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2515 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2516 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2521 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2522 if (clauses
->async_expr
)
2523 OMP_CLAUSE_ASYNC_EXPR (c
)
2524 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2526 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2527 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2531 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2532 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2534 if (clauses
->independent
)
2536 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2537 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2539 if (clauses
->wait_list
)
2543 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2545 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2546 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2547 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2551 if (clauses
->num_gangs_expr
)
2554 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2555 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2556 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2557 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2559 if (clauses
->num_workers_expr
)
2561 tree num_workers_var
2562 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2563 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2564 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2565 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2567 if (clauses
->vector_length_expr
)
2569 tree vector_length_var
2570 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2571 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2572 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2573 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2575 if (clauses
->vector
)
2577 if (clauses
->vector_expr
)
2580 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2581 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2582 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2583 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2587 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2588 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2591 if (clauses
->worker
)
2593 if (clauses
->worker_expr
)
2596 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2597 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2598 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2599 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2603 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2604 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2609 if (clauses
->gang_expr
)
2612 = gfc_convert_expr_to_tree (block
, clauses
->gang_expr
);
2613 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2614 OMP_CLAUSE_GANG_EXPR (c
) = gang_var
;
2615 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2619 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2620 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2624 return nreverse (omp_clauses
);
2627 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2630 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2635 stmt
= gfc_trans_code (code
);
2636 if (TREE_CODE (stmt
) != BIND_EXPR
)
2638 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2640 tree block
= poplevel (1, 0);
2641 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
2651 /* Trans OpenACC directives. */
2652 /* parallel, kernels, data and host_data. */
2654 gfc_trans_oacc_construct (gfc_code
*code
)
2657 tree stmt
, oacc_clauses
;
2658 enum tree_code construct_code
;
2662 case EXEC_OACC_PARALLEL
:
2663 construct_code
= OACC_PARALLEL
;
2665 case EXEC_OACC_KERNELS
:
2666 construct_code
= OACC_KERNELS
;
2668 case EXEC_OACC_DATA
:
2669 construct_code
= OACC_DATA
;
2671 case EXEC_OACC_HOST_DATA
:
2672 construct_code
= OACC_HOST_DATA
;
2678 gfc_start_block (&block
);
2679 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2681 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
2682 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
2684 gfc_add_expr_to_block (&block
, stmt
);
2685 return gfc_finish_block (&block
);
2688 /* update, enter_data, exit_data, cache. */
2690 gfc_trans_oacc_executable_directive (gfc_code
*code
)
2693 tree stmt
, oacc_clauses
;
2694 enum tree_code construct_code
;
2698 case EXEC_OACC_UPDATE
:
2699 construct_code
= OACC_UPDATE
;
2701 case EXEC_OACC_ENTER_DATA
:
2702 construct_code
= OACC_ENTER_DATA
;
2704 case EXEC_OACC_EXIT_DATA
:
2705 construct_code
= OACC_EXIT_DATA
;
2707 case EXEC_OACC_CACHE
:
2708 construct_code
= OACC_CACHE
;
2714 gfc_start_block (&block
);
2715 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2717 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
2719 gfc_add_expr_to_block (&block
, stmt
);
2720 return gfc_finish_block (&block
);
2724 gfc_trans_oacc_wait_directive (gfc_code
*code
)
2728 vec
<tree
, va_gc
> *args
;
2731 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
2732 location_t loc
= input_location
;
2734 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2737 vec_alloc (args
, nparms
+ 2);
2738 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
2740 gfc_start_block (&block
);
2742 if (clauses
->async_expr
)
2743 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
2745 t
= build_int_cst (integer_type_node
, -2);
2747 args
->quick_push (t
);
2748 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
2750 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2751 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
2753 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
2754 gfc_add_expr_to_block (&block
, stmt
);
2758 return gfc_finish_block (&block
);
2761 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
2762 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
2765 gfc_trans_omp_atomic (gfc_code
*code
)
2767 gfc_code
*atomic_code
= code
;
2771 gfc_expr
*expr2
, *e
;
2774 tree lhsaddr
, type
, rhs
, x
;
2775 enum tree_code op
= ERROR_MARK
;
2776 enum tree_code aop
= OMP_ATOMIC
;
2777 bool var_on_left
= false;
2778 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
2780 code
= code
->block
->next
;
2781 gcc_assert (code
->op
== EXEC_ASSIGN
);
2782 var
= code
->expr1
->symtree
->n
.sym
;
2784 gfc_init_se (&lse
, NULL
);
2785 gfc_init_se (&rse
, NULL
);
2786 gfc_init_se (&vse
, NULL
);
2787 gfc_start_block (&block
);
2789 expr2
= code
->expr2
;
2790 if (expr2
->expr_type
== EXPR_FUNCTION
2791 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2792 expr2
= expr2
->value
.function
.actual
->expr
;
2794 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2796 case GFC_OMP_ATOMIC_READ
:
2797 gfc_conv_expr (&vse
, code
->expr1
);
2798 gfc_add_block_to_block (&block
, &vse
.pre
);
2800 gfc_conv_expr (&lse
, expr2
);
2801 gfc_add_block_to_block (&block
, &lse
.pre
);
2802 type
= TREE_TYPE (lse
.expr
);
2803 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2805 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
2806 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2807 x
= convert (TREE_TYPE (vse
.expr
), x
);
2808 gfc_add_modify (&block
, vse
.expr
, x
);
2810 gfc_add_block_to_block (&block
, &lse
.pre
);
2811 gfc_add_block_to_block (&block
, &rse
.pre
);
2813 return gfc_finish_block (&block
);
2814 case GFC_OMP_ATOMIC_CAPTURE
:
2815 aop
= OMP_ATOMIC_CAPTURE_NEW
;
2816 if (expr2
->expr_type
== EXPR_VARIABLE
)
2818 aop
= OMP_ATOMIC_CAPTURE_OLD
;
2819 gfc_conv_expr (&vse
, code
->expr1
);
2820 gfc_add_block_to_block (&block
, &vse
.pre
);
2822 gfc_conv_expr (&lse
, expr2
);
2823 gfc_add_block_to_block (&block
, &lse
.pre
);
2824 gfc_init_se (&lse
, NULL
);
2826 var
= code
->expr1
->symtree
->n
.sym
;
2827 expr2
= code
->expr2
;
2828 if (expr2
->expr_type
== EXPR_FUNCTION
2829 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2830 expr2
= expr2
->value
.function
.actual
->expr
;
2837 gfc_conv_expr (&lse
, code
->expr1
);
2838 gfc_add_block_to_block (&block
, &lse
.pre
);
2839 type
= TREE_TYPE (lse
.expr
);
2840 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2842 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2843 == GFC_OMP_ATOMIC_WRITE
)
2844 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2846 gfc_conv_expr (&rse
, expr2
);
2847 gfc_add_block_to_block (&block
, &rse
.pre
);
2849 else if (expr2
->expr_type
== EXPR_OP
)
2852 switch (expr2
->value
.op
.op
)
2854 case INTRINSIC_PLUS
:
2857 case INTRINSIC_TIMES
:
2860 case INTRINSIC_MINUS
:
2863 case INTRINSIC_DIVIDE
:
2864 if (expr2
->ts
.type
== BT_INTEGER
)
2865 op
= TRUNC_DIV_EXPR
;
2870 op
= TRUTH_ANDIF_EXPR
;
2873 op
= TRUTH_ORIF_EXPR
;
2878 case INTRINSIC_NEQV
:
2884 e
= expr2
->value
.op
.op1
;
2885 if (e
->expr_type
== EXPR_FUNCTION
2886 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2887 e
= e
->value
.function
.actual
->expr
;
2888 if (e
->expr_type
== EXPR_VARIABLE
2889 && e
->symtree
!= NULL
2890 && e
->symtree
->n
.sym
== var
)
2892 expr2
= expr2
->value
.op
.op2
;
2897 e
= expr2
->value
.op
.op2
;
2898 if (e
->expr_type
== EXPR_FUNCTION
2899 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2900 e
= e
->value
.function
.actual
->expr
;
2901 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2902 && e
->symtree
!= NULL
2903 && e
->symtree
->n
.sym
== var
);
2904 expr2
= expr2
->value
.op
.op1
;
2905 var_on_left
= false;
2907 gfc_conv_expr (&rse
, expr2
);
2908 gfc_add_block_to_block (&block
, &rse
.pre
);
2912 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
2913 switch (expr2
->value
.function
.isym
->id
)
2933 e
= expr2
->value
.function
.actual
->expr
;
2934 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2935 && e
->symtree
!= NULL
2936 && e
->symtree
->n
.sym
== var
);
2938 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
2939 gfc_add_block_to_block (&block
, &rse
.pre
);
2940 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
2942 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
2943 gfc_actual_arglist
*arg
;
2945 gfc_add_modify (&block
, accum
, rse
.expr
);
2946 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
2949 gfc_init_block (&rse
.pre
);
2950 gfc_conv_expr (&rse
, arg
->expr
);
2951 gfc_add_block_to_block (&block
, &rse
.pre
);
2952 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
2954 gfc_add_modify (&block
, accum
, x
);
2960 expr2
= expr2
->value
.function
.actual
->next
->expr
;
2963 lhsaddr
= save_expr (lhsaddr
);
2964 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
2965 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
2966 || TREE_CODE (TREE_OPERAND (lhsaddr
, 0)) != VAR_DECL
))
2968 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
2969 it even after unsharing function body. */
2970 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
2971 DECL_CONTEXT (var
) = current_function_decl
;
2972 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
2973 NULL_TREE
, NULL_TREE
);
2976 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
2978 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2979 == GFC_OMP_ATOMIC_WRITE
)
2980 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2984 x
= convert (TREE_TYPE (rhs
),
2985 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
2987 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
2989 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
2992 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
2993 && TREE_CODE (type
) != COMPLEX_TYPE
)
2994 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
2995 TREE_TYPE (TREE_TYPE (rhs
)), x
);
2997 gfc_add_block_to_block (&block
, &lse
.pre
);
2998 gfc_add_block_to_block (&block
, &rse
.pre
);
3000 if (aop
== OMP_ATOMIC
)
3002 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3003 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3004 gfc_add_expr_to_block (&block
, x
);
3008 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3011 expr2
= code
->expr2
;
3012 if (expr2
->expr_type
== EXPR_FUNCTION
3013 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3014 expr2
= expr2
->value
.function
.actual
->expr
;
3016 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3017 gfc_conv_expr (&vse
, code
->expr1
);
3018 gfc_add_block_to_block (&block
, &vse
.pre
);
3020 gfc_init_se (&lse
, NULL
);
3021 gfc_conv_expr (&lse
, expr2
);
3022 gfc_add_block_to_block (&block
, &lse
.pre
);
3024 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3025 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3026 x
= convert (TREE_TYPE (vse
.expr
), x
);
3027 gfc_add_modify (&block
, vse
.expr
, x
);
3030 return gfc_finish_block (&block
);
3034 gfc_trans_omp_barrier (void)
3036 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3037 return build_call_expr_loc (input_location
, decl
, 0);
3041 gfc_trans_omp_cancel (gfc_code
*code
)
3044 tree ifc
= boolean_true_node
;
3046 switch (code
->ext
.omp_clauses
->cancel
)
3048 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3049 case OMP_CANCEL_DO
: mask
= 2; break;
3050 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3051 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3052 default: gcc_unreachable ();
3054 gfc_start_block (&block
);
3055 if (code
->ext
.omp_clauses
->if_expr
)
3060 gfc_init_se (&se
, NULL
);
3061 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3062 gfc_add_block_to_block (&block
, &se
.pre
);
3063 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3064 gfc_add_block_to_block (&block
, &se
.post
);
3065 tree type
= TREE_TYPE (if_var
);
3066 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3067 boolean_type_node
, if_var
,
3068 build_zero_cst (type
));
3070 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3071 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3072 ifc
= fold_convert (c_bool_type
, ifc
);
3073 gfc_add_expr_to_block (&block
,
3074 build_call_expr_loc (input_location
, decl
, 2,
3075 build_int_cst (integer_type_node
,
3077 return gfc_finish_block (&block
);
3081 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3084 switch (code
->ext
.omp_clauses
->cancel
)
3086 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3087 case OMP_CANCEL_DO
: mask
= 2; break;
3088 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3089 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3090 default: gcc_unreachable ();
3092 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3093 return build_call_expr_loc (input_location
, decl
, 1,
3094 build_int_cst (integer_type_node
, mask
));
3098 gfc_trans_omp_critical (gfc_code
*code
)
3100 tree name
= NULL_TREE
, stmt
;
3101 if (code
->ext
.omp_name
!= NULL
)
3102 name
= get_identifier (code
->ext
.omp_name
);
3103 stmt
= gfc_trans_code (code
->block
->next
);
3104 return build2_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
, name
);
3107 typedef struct dovar_init_d
{
3114 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3115 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3118 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
3119 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3122 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3123 int i
, collapse
= clauses
->collapse
;
3124 vec
<dovar_init
> inits
= vNULL
;
3131 code
= code
->block
->next
;
3132 gcc_assert (code
->op
== EXEC_DO
);
3134 init
= make_tree_vec (collapse
);
3135 cond
= make_tree_vec (collapse
);
3136 incr
= make_tree_vec (collapse
);
3140 gfc_start_block (&block
);
3144 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3146 for (i
= 0; i
< collapse
; i
++)
3149 int dovar_found
= 0;
3154 gfc_omp_namelist
*n
= NULL
;
3155 if (op
!= EXEC_OMP_DISTRIBUTE
)
3156 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3157 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3158 n
!= NULL
; n
= n
->next
)
3159 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3163 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3164 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3165 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3171 /* Evaluate all the expressions in the iterator. */
3172 gfc_init_se (&se
, NULL
);
3173 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3174 gfc_add_block_to_block (pblock
, &se
.pre
);
3176 type
= TREE_TYPE (dovar
);
3177 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3179 gfc_init_se (&se
, NULL
);
3180 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3181 gfc_add_block_to_block (pblock
, &se
.pre
);
3182 from
= gfc_evaluate_now (se
.expr
, pblock
);
3184 gfc_init_se (&se
, NULL
);
3185 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3186 gfc_add_block_to_block (pblock
, &se
.pre
);
3187 to
= gfc_evaluate_now (se
.expr
, pblock
);
3189 gfc_init_se (&se
, NULL
);
3190 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3191 gfc_add_block_to_block (pblock
, &se
.pre
);
3192 step
= gfc_evaluate_now (se
.expr
, pblock
);
3195 /* Special case simple loops. */
3196 if (TREE_CODE (dovar
) == VAR_DECL
)
3198 if (integer_onep (step
))
3200 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3205 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3211 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3212 /* The condition should not be folded. */
3213 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3214 ? LE_EXPR
: GE_EXPR
,
3215 boolean_type_node
, dovar
, to
);
3216 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3218 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3221 TREE_VEC_ELT (incr
, i
));
3225 /* STEP is not 1 or -1. Use:
3226 for (count = 0; count < (to + step - from) / step; count++)
3228 dovar = from + count * step;
3232 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3233 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3234 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3236 tmp
= gfc_evaluate_now (tmp
, pblock
);
3237 count
= gfc_create_var (type
, "count");
3238 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3239 build_int_cst (type
, 0));
3240 /* The condition should not be folded. */
3241 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3244 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3246 build_int_cst (type
, 1));
3247 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3248 MODIFY_EXPR
, type
, count
,
3249 TREE_VEC_ELT (incr
, i
));
3251 /* Initialize DOVAR. */
3252 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3253 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3254 dovar_init e
= {dovar
, tmp
};
3255 inits
.safe_push (e
);
3258 if (dovar_found
== 2
3259 && op
== EXEC_OMP_SIMD
3263 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3264 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3265 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3267 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3273 if (op
== EXEC_OMP_SIMD
)
3277 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3278 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3279 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3282 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3287 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3288 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3289 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3291 if (dovar_found
== 2)
3298 /* If dovar is lastprivate, but different counter is used,
3299 dovar += step needs to be added to
3300 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3301 will have the value on entry of the last loop, rather
3302 than value after iterator increment. */
3303 tmp
= gfc_evaluate_now (step
, pblock
);
3304 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
3306 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3308 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3309 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3310 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3312 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3315 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3316 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3318 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3322 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3324 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3325 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3326 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3328 tree l
= build_omp_clause (input_location
,
3329 OMP_CLAUSE_LASTPRIVATE
);
3330 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3331 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3332 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3334 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3338 gcc_assert (simple
|| c
!= NULL
);
3342 if (op
!= EXEC_OMP_SIMD
)
3343 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3344 else if (collapse
== 1)
3346 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3347 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3348 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3349 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3352 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3353 OMP_CLAUSE_DECL (tmp
) = count
;
3354 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3357 if (i
+ 1 < collapse
)
3358 code
= code
->block
->next
;
3361 if (pblock
!= &block
)
3364 gfc_start_block (&block
);
3367 gfc_start_block (&body
);
3369 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3370 gfc_add_modify (&body
, di
->var
, di
->init
);
3373 /* Cycle statement is implemented with a goto. Exit statement must not be
3374 present for this loop. */
3375 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3377 /* Put these labels where they can be found later. */
3379 code
->cycle_label
= cycle_label
;
3380 code
->exit_label
= NULL_TREE
;
3382 /* Main loop body. */
3383 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3384 gfc_add_expr_to_block (&body
, tmp
);
3386 /* Label for cycle statements (if needed). */
3387 if (TREE_USED (cycle_label
))
3389 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3390 gfc_add_expr_to_block (&body
, tmp
);
3393 /* End of loop body. */
3396 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3397 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3398 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3399 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3400 default: gcc_unreachable ();
3403 TREE_TYPE (stmt
) = void_type_node
;
3404 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3405 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3406 OMP_FOR_INIT (stmt
) = init
;
3407 OMP_FOR_COND (stmt
) = cond
;
3408 OMP_FOR_INCR (stmt
) = incr
;
3409 gfc_add_expr_to_block (&block
, stmt
);
3411 return gfc_finish_block (&block
);
3414 /* parallel loop and kernels loop. */
3416 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3418 stmtblock_t block
, *pblock
= NULL
;
3419 gfc_omp_clauses construct_clauses
, loop_clauses
;
3420 tree stmt
, oacc_clauses
= NULL_TREE
;
3421 enum tree_code construct_code
;
3425 case EXEC_OACC_PARALLEL_LOOP
:
3426 construct_code
= OACC_PARALLEL
;
3428 case EXEC_OACC_KERNELS_LOOP
:
3429 construct_code
= OACC_KERNELS
;
3435 gfc_start_block (&block
);
3437 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3438 if (code
->ext
.omp_clauses
!= NULL
)
3440 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3441 sizeof (construct_clauses
));
3442 loop_clauses
.collapse
= construct_clauses
.collapse
;
3443 loop_clauses
.gang
= construct_clauses
.gang
;
3444 loop_clauses
.vector
= construct_clauses
.vector
;
3445 loop_clauses
.worker
= construct_clauses
.worker
;
3446 loop_clauses
.seq
= construct_clauses
.seq
;
3447 loop_clauses
.independent
= construct_clauses
.independent
;
3448 construct_clauses
.collapse
= 0;
3449 construct_clauses
.gang
= false;
3450 construct_clauses
.vector
= false;
3451 construct_clauses
.worker
= false;
3452 construct_clauses
.seq
= false;
3453 construct_clauses
.independent
= false;
3454 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3457 if (!loop_clauses
.seq
)
3461 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3462 if (TREE_CODE (stmt
) != BIND_EXPR
)
3463 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3466 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3468 if (code
->op
== EXEC_OACC_KERNELS_LOOP
)
3469 OACC_KERNELS_COMBINED (stmt
) = 1;
3471 OACC_PARALLEL_COMBINED (stmt
) = 1;
3472 gfc_add_expr_to_block (&block
, stmt
);
3473 return gfc_finish_block (&block
);
3477 gfc_trans_omp_flush (void)
3479 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3480 return build_call_expr_loc (input_location
, decl
, 0);
3484 gfc_trans_omp_master (gfc_code
*code
)
3486 tree stmt
= gfc_trans_code (code
->block
->next
);
3487 if (IS_EMPTY_STMT (stmt
))
3489 return build1_v (OMP_MASTER
, stmt
);
3493 gfc_trans_omp_ordered (gfc_code
*code
)
3495 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
3499 gfc_trans_omp_parallel (gfc_code
*code
)
3502 tree stmt
, omp_clauses
;
3504 gfc_start_block (&block
);
3505 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3507 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3508 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3510 gfc_add_expr_to_block (&block
, stmt
);
3511 return gfc_finish_block (&block
);
3518 GFC_OMP_SPLIT_PARALLEL
,
3519 GFC_OMP_SPLIT_DISTRIBUTE
,
3520 GFC_OMP_SPLIT_TEAMS
,
3521 GFC_OMP_SPLIT_TARGET
,
3527 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3528 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3529 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3530 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3531 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3532 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
)
3536 gfc_split_omp_clauses (gfc_code
*code
,
3537 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3539 int mask
= 0, innermost
= 0;
3540 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3543 case EXEC_OMP_DISTRIBUTE
:
3544 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3546 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3547 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3548 innermost
= GFC_OMP_SPLIT_DO
;
3550 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3551 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3552 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3553 innermost
= GFC_OMP_SPLIT_SIMD
;
3555 case EXEC_OMP_DISTRIBUTE_SIMD
:
3556 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3557 innermost
= GFC_OMP_SPLIT_SIMD
;
3560 innermost
= GFC_OMP_SPLIT_DO
;
3562 case EXEC_OMP_DO_SIMD
:
3563 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3564 innermost
= GFC_OMP_SPLIT_SIMD
;
3566 case EXEC_OMP_PARALLEL
:
3567 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3569 case EXEC_OMP_PARALLEL_DO
:
3570 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3571 innermost
= GFC_OMP_SPLIT_DO
;
3573 case EXEC_OMP_PARALLEL_DO_SIMD
:
3574 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3575 innermost
= GFC_OMP_SPLIT_SIMD
;
3578 innermost
= GFC_OMP_SPLIT_SIMD
;
3580 case EXEC_OMP_TARGET
:
3581 innermost
= GFC_OMP_SPLIT_TARGET
;
3583 case EXEC_OMP_TARGET_TEAMS
:
3584 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
3585 innermost
= GFC_OMP_SPLIT_TEAMS
;
3587 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3588 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3589 | GFC_OMP_MASK_DISTRIBUTE
;
3590 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3592 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3593 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3594 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3595 innermost
= GFC_OMP_SPLIT_DO
;
3597 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3598 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3599 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3600 innermost
= GFC_OMP_SPLIT_SIMD
;
3602 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3603 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3604 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3605 innermost
= GFC_OMP_SPLIT_SIMD
;
3607 case EXEC_OMP_TEAMS
:
3608 innermost
= GFC_OMP_SPLIT_TEAMS
;
3610 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3611 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
3612 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3614 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3615 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3616 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3617 innermost
= GFC_OMP_SPLIT_DO
;
3619 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3620 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3621 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3622 innermost
= GFC_OMP_SPLIT_SIMD
;
3624 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3625 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3626 innermost
= GFC_OMP_SPLIT_SIMD
;
3633 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
3636 if (code
->ext
.omp_clauses
!= NULL
)
3638 if (mask
& GFC_OMP_MASK_TARGET
)
3640 /* First the clauses that are unique to some constructs. */
3641 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
3642 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
3643 clausesa
[GFC_OMP_SPLIT_TARGET
].device
3644 = code
->ext
.omp_clauses
->device
;
3646 if (mask
& GFC_OMP_MASK_TEAMS
)
3648 /* First the clauses that are unique to some constructs. */
3649 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
3650 = code
->ext
.omp_clauses
->num_teams
;
3651 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
3652 = code
->ext
.omp_clauses
->thread_limit
;
3653 /* Shared and default clauses are allowed on parallel and teams. */
3654 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
3655 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3656 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
3657 = code
->ext
.omp_clauses
->default_sharing
;
3659 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3661 /* First the clauses that are unique to some constructs. */
3662 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
3663 = code
->ext
.omp_clauses
->dist_sched_kind
;
3664 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
3665 = code
->ext
.omp_clauses
->dist_chunk_size
;
3666 /* Duplicate collapse. */
3667 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
3668 = code
->ext
.omp_clauses
->collapse
;
3670 if (mask
& GFC_OMP_MASK_PARALLEL
)
3672 /* First the clauses that are unique to some constructs. */
3673 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
3674 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
3675 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
3676 = code
->ext
.omp_clauses
->num_threads
;
3677 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
3678 = code
->ext
.omp_clauses
->proc_bind
;
3679 /* Shared and default clauses are allowed on parallel and teams. */
3680 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
3681 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3682 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
3683 = code
->ext
.omp_clauses
->default_sharing
;
3685 if (mask
& GFC_OMP_MASK_DO
)
3687 /* First the clauses that are unique to some constructs. */
3688 clausesa
[GFC_OMP_SPLIT_DO
].ordered
3689 = code
->ext
.omp_clauses
->ordered
;
3690 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
3691 = code
->ext
.omp_clauses
->sched_kind
;
3692 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
3693 = code
->ext
.omp_clauses
->chunk_size
;
3694 clausesa
[GFC_OMP_SPLIT_DO
].nowait
3695 = code
->ext
.omp_clauses
->nowait
;
3696 /* Duplicate collapse. */
3697 clausesa
[GFC_OMP_SPLIT_DO
].collapse
3698 = code
->ext
.omp_clauses
->collapse
;
3700 if (mask
& GFC_OMP_MASK_SIMD
)
3702 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
3703 = code
->ext
.omp_clauses
->safelen_expr
;
3704 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LINEAR
]
3705 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
3706 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
3707 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
3708 /* Duplicate collapse. */
3709 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
3710 = code
->ext
.omp_clauses
->collapse
;
3712 /* Private clause is supported on all constructs but target,
3713 it is enough to put it on the innermost one. For
3714 !$ omp do put it on parallel though,
3715 as that's what we did for OpenMP 3.1. */
3716 clausesa
[innermost
== GFC_OMP_SPLIT_DO
3717 ? (int) GFC_OMP_SPLIT_PARALLEL
3718 : innermost
].lists
[OMP_LIST_PRIVATE
]
3719 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
3720 /* Firstprivate clause is supported on all constructs but
3721 target and simd. Put it on the outermost of those and
3722 duplicate on parallel. */
3723 if (mask
& GFC_OMP_MASK_TEAMS
)
3724 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
3725 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3726 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3727 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
3728 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3729 if (mask
& GFC_OMP_MASK_PARALLEL
)
3730 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
3731 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3732 else if (mask
& GFC_OMP_MASK_DO
)
3733 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
3734 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3735 /* Lastprivate is allowed on do and simd. In
3736 parallel do{, simd} we actually want to put it on
3737 parallel rather than do. */
3738 if (mask
& GFC_OMP_MASK_PARALLEL
)
3739 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
3740 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3741 else if (mask
& GFC_OMP_MASK_DO
)
3742 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
3743 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3744 if (mask
& GFC_OMP_MASK_SIMD
)
3745 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
3746 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3747 /* Reduction is allowed on simd, do, parallel and teams.
3748 Duplicate it on all of them, but omit on do if
3749 parallel is present. */
3750 if (mask
& GFC_OMP_MASK_TEAMS
)
3751 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
3752 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3753 if (mask
& GFC_OMP_MASK_PARALLEL
)
3754 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
3755 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3756 else if (mask
& GFC_OMP_MASK_DO
)
3757 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
3758 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3759 if (mask
& GFC_OMP_MASK_SIMD
)
3760 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
3761 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3762 /* FIXME: This is currently being discussed. */
3763 if (mask
& GFC_OMP_MASK_PARALLEL
)
3764 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
3765 = code
->ext
.omp_clauses
->if_expr
;
3767 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
3768 = code
->ext
.omp_clauses
->if_expr
;
3770 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3771 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3772 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
3776 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3777 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
3780 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3781 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
3784 gfc_start_block (&block
);
3786 gfc_init_block (&block
);
3788 if (clausesa
== NULL
)
3790 clausesa
= clausesa_buf
;
3791 gfc_split_omp_clauses (code
, clausesa
);
3795 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
3796 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
3797 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
3800 if (TREE_CODE (body
) != BIND_EXPR
)
3801 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
3805 else if (TREE_CODE (body
) != BIND_EXPR
)
3806 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
3809 stmt
= make_node (OMP_FOR
);
3810 TREE_TYPE (stmt
) = void_type_node
;
3811 OMP_FOR_BODY (stmt
) = body
;
3812 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
3816 gfc_add_expr_to_block (&block
, stmt
);
3817 return gfc_finish_block (&block
);
3821 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
3822 gfc_omp_clauses
*clausesa
)
3824 stmtblock_t block
, *new_pblock
= pblock
;
3825 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3826 tree stmt
, omp_clauses
= NULL_TREE
;
3829 gfc_start_block (&block
);
3831 gfc_init_block (&block
);
3833 if (clausesa
== NULL
)
3835 clausesa
= clausesa_buf
;
3836 gfc_split_omp_clauses (code
, clausesa
);
3839 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3843 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
3844 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
3845 new_pblock
= &block
;
3849 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
3850 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
3853 if (TREE_CODE (stmt
) != BIND_EXPR
)
3854 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3858 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3859 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3860 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3862 OMP_PARALLEL_COMBINED (stmt
) = 1;
3863 gfc_add_expr_to_block (&block
, stmt
);
3864 return gfc_finish_block (&block
);
3868 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3869 gfc_omp_clauses
*clausesa
)
3872 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3873 tree stmt
, omp_clauses
= NULL_TREE
;
3876 gfc_start_block (&block
);
3878 gfc_init_block (&block
);
3880 if (clausesa
== NULL
)
3882 clausesa
= clausesa_buf
;
3883 gfc_split_omp_clauses (code
, clausesa
);
3887 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3891 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
3894 if (TREE_CODE (stmt
) != BIND_EXPR
)
3895 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3899 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3900 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3903 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3905 OMP_PARALLEL_COMBINED (stmt
) = 1;
3907 gfc_add_expr_to_block (&block
, stmt
);
3908 return gfc_finish_block (&block
);
3912 gfc_trans_omp_parallel_sections (gfc_code
*code
)
3915 gfc_omp_clauses section_clauses
;
3916 tree stmt
, omp_clauses
;
3918 memset (§ion_clauses
, 0, sizeof (section_clauses
));
3919 section_clauses
.nowait
= true;
3921 gfc_start_block (&block
);
3922 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3925 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
3926 if (TREE_CODE (stmt
) != BIND_EXPR
)
3927 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3930 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3932 OMP_PARALLEL_COMBINED (stmt
) = 1;
3933 gfc_add_expr_to_block (&block
, stmt
);
3934 return gfc_finish_block (&block
);
3938 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
3941 gfc_omp_clauses workshare_clauses
;
3942 tree stmt
, omp_clauses
;
3944 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
3945 workshare_clauses
.nowait
= true;
3947 gfc_start_block (&block
);
3948 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3951 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
3952 if (TREE_CODE (stmt
) != BIND_EXPR
)
3953 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3956 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3958 OMP_PARALLEL_COMBINED (stmt
) = 1;
3959 gfc_add_expr_to_block (&block
, stmt
);
3960 return gfc_finish_block (&block
);
3964 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3966 stmtblock_t block
, body
;
3967 tree omp_clauses
, stmt
;
3968 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
3970 gfc_start_block (&block
);
3972 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
3974 gfc_init_block (&body
);
3975 for (code
= code
->block
; code
; code
= code
->block
)
3977 /* Last section is special because of lastprivate, so even if it
3978 is empty, chain it in. */
3979 stmt
= gfc_trans_omp_code (code
->next
,
3980 has_lastprivate
&& code
->block
== NULL
);
3981 if (! IS_EMPTY_STMT (stmt
))
3983 stmt
= build1_v (OMP_SECTION
, stmt
);
3984 gfc_add_expr_to_block (&body
, stmt
);
3987 stmt
= gfc_finish_block (&body
);
3989 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
3991 gfc_add_expr_to_block (&block
, stmt
);
3993 return gfc_finish_block (&block
);
3997 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3999 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
4000 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4001 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4007 gfc_trans_omp_task (gfc_code
*code
)
4010 tree stmt
, omp_clauses
;
4012 gfc_start_block (&block
);
4013 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4015 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4016 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4018 gfc_add_expr_to_block (&block
, stmt
);
4019 return gfc_finish_block (&block
);
4023 gfc_trans_omp_taskgroup (gfc_code
*code
)
4025 tree stmt
= gfc_trans_code (code
->block
->next
);
4026 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4030 gfc_trans_omp_taskwait (void)
4032 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4033 return build_call_expr_loc (input_location
, decl
, 0);
4037 gfc_trans_omp_taskyield (void)
4039 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4040 return build_call_expr_loc (input_location
, decl
, 0);
4044 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4047 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4048 tree stmt
, omp_clauses
= NULL_TREE
;
4050 gfc_start_block (&block
);
4051 if (clausesa
== NULL
)
4053 clausesa
= clausesa_buf
;
4054 gfc_split_omp_clauses (code
, clausesa
);
4058 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4062 case EXEC_OMP_DISTRIBUTE
:
4063 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4064 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4065 /* This is handled in gfc_trans_omp_do. */
4068 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4069 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4070 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4071 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4072 if (TREE_CODE (stmt
) != BIND_EXPR
)
4073 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4077 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4078 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4079 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4080 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4081 if (TREE_CODE (stmt
) != BIND_EXPR
)
4082 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4086 case EXEC_OMP_DISTRIBUTE_SIMD
:
4087 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4088 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4089 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4090 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4091 if (TREE_CODE (stmt
) != BIND_EXPR
)
4092 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4101 tree distribute
= make_node (OMP_DISTRIBUTE
);
4102 TREE_TYPE (distribute
) = void_type_node
;
4103 OMP_FOR_BODY (distribute
) = stmt
;
4104 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4107 gfc_add_expr_to_block (&block
, stmt
);
4108 return gfc_finish_block (&block
);
4112 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4115 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4116 tree stmt
, omp_clauses
= NULL_TREE
;
4118 gfc_start_block (&block
);
4119 if (clausesa
== NULL
)
4121 clausesa
= clausesa_buf
;
4122 gfc_split_omp_clauses (code
, clausesa
);
4126 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4130 case EXEC_OMP_TARGET_TEAMS
:
4131 case EXEC_OMP_TEAMS
:
4132 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4134 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4135 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4136 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4137 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4141 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4144 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4146 gfc_add_expr_to_block (&block
, stmt
);
4147 return gfc_finish_block (&block
);
4151 gfc_trans_omp_target (gfc_code
*code
)
4154 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4155 tree stmt
, omp_clauses
= NULL_TREE
;
4157 gfc_start_block (&block
);
4158 gfc_split_omp_clauses (code
, clausesa
);
4161 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4163 if (code
->op
== EXEC_OMP_TARGET
)
4164 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4166 stmt
= gfc_trans_omp_teams (code
, clausesa
);
4167 if (TREE_CODE (stmt
) != BIND_EXPR
)
4168 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4170 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4172 gfc_add_expr_to_block (&block
, stmt
);
4173 return gfc_finish_block (&block
);
4177 gfc_trans_omp_target_data (gfc_code
*code
)
4180 tree stmt
, omp_clauses
;
4182 gfc_start_block (&block
);
4183 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4185 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4186 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4188 gfc_add_expr_to_block (&block
, stmt
);
4189 return gfc_finish_block (&block
);
4193 gfc_trans_omp_target_update (gfc_code
*code
)
4196 tree stmt
, omp_clauses
;
4198 gfc_start_block (&block
);
4199 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4201 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4203 gfc_add_expr_to_block (&block
, stmt
);
4204 return gfc_finish_block (&block
);
4208 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4210 tree res
, tmp
, stmt
;
4211 stmtblock_t block
, *pblock
= NULL
;
4212 stmtblock_t singleblock
;
4213 int saved_ompws_flags
;
4214 bool singleblock_in_progress
= false;
4215 /* True if previous gfc_code in workshare construct is not workshared. */
4216 bool prev_singleunit
;
4218 code
= code
->block
->next
;
4222 gfc_start_block (&block
);
4225 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4226 prev_singleunit
= false;
4228 /* Translate statements one by one to trees until we reach
4229 the end of the workshare construct. Adjacent gfc_codes that
4230 are a single unit of work are clustered and encapsulated in a
4231 single OMP_SINGLE construct. */
4232 for (; code
; code
= code
->next
)
4234 if (code
->here
!= 0)
4236 res
= gfc_trans_label_here (code
);
4237 gfc_add_expr_to_block (pblock
, res
);
4240 /* No dependence analysis, use for clauses with wait.
4241 If this is the last gfc_code, use default omp_clauses. */
4242 if (code
->next
== NULL
&& clauses
->nowait
)
4243 ompws_flags
|= OMPWS_NOWAIT
;
4245 /* By default, every gfc_code is a single unit of work. */
4246 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4247 ompws_flags
&= ~OMPWS_SCALARIZER_WS
;
4256 res
= gfc_trans_assign (code
);
4259 case EXEC_POINTER_ASSIGN
:
4260 res
= gfc_trans_pointer_assign (code
);
4263 case EXEC_INIT_ASSIGN
:
4264 res
= gfc_trans_init_assign (code
);
4268 res
= gfc_trans_forall (code
);
4272 res
= gfc_trans_where (code
);
4275 case EXEC_OMP_ATOMIC
:
4276 res
= gfc_trans_omp_directive (code
);
4279 case EXEC_OMP_PARALLEL
:
4280 case EXEC_OMP_PARALLEL_DO
:
4281 case EXEC_OMP_PARALLEL_SECTIONS
:
4282 case EXEC_OMP_PARALLEL_WORKSHARE
:
4283 case EXEC_OMP_CRITICAL
:
4284 saved_ompws_flags
= ompws_flags
;
4286 res
= gfc_trans_omp_directive (code
);
4287 ompws_flags
= saved_ompws_flags
;
4291 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4294 gfc_set_backend_locus (&code
->loc
);
4296 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4298 if (prev_singleunit
)
4300 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4301 /* Add current gfc_code to single block. */
4302 gfc_add_expr_to_block (&singleblock
, res
);
4305 /* Finish single block and add it to pblock. */
4306 tmp
= gfc_finish_block (&singleblock
);
4307 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4308 void_type_node
, tmp
, NULL_TREE
);
4309 gfc_add_expr_to_block (pblock
, tmp
);
4310 /* Add current gfc_code to pblock. */
4311 gfc_add_expr_to_block (pblock
, res
);
4312 singleblock_in_progress
= false;
4317 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4319 /* Start single block. */
4320 gfc_init_block (&singleblock
);
4321 gfc_add_expr_to_block (&singleblock
, res
);
4322 singleblock_in_progress
= true;
4325 /* Add the new statement to the block. */
4326 gfc_add_expr_to_block (pblock
, res
);
4328 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
4332 /* Finish remaining SINGLE block, if we were in the middle of one. */
4333 if (singleblock_in_progress
)
4335 /* Finish single block and add it to pblock. */
4336 tmp
= gfc_finish_block (&singleblock
);
4337 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
4339 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
4341 gfc_add_expr_to_block (pblock
, tmp
);
4344 stmt
= gfc_finish_block (pblock
);
4345 if (TREE_CODE (stmt
) != BIND_EXPR
)
4347 if (!IS_EMPTY_STMT (stmt
))
4349 tree bindblock
= poplevel (1, 0);
4350 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
4358 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
4359 stmt
= gfc_trans_omp_barrier ();
4366 gfc_trans_oacc_declare (stmtblock_t
*block
, gfc_namespace
*ns
)
4369 oacc_clauses
= gfc_trans_omp_clauses (block
, ns
->oacc_declare_clauses
,
4370 ns
->oacc_declare_clauses
->loc
);
4371 return build1_loc (ns
->oacc_declare_clauses
->loc
.lb
->location
,
4372 OACC_DECLARE
, void_type_node
, oacc_clauses
);
4376 gfc_trans_oacc_directive (gfc_code
*code
)
4380 case EXEC_OACC_PARALLEL_LOOP
:
4381 case EXEC_OACC_KERNELS_LOOP
:
4382 return gfc_trans_oacc_combined_directive (code
);
4383 case EXEC_OACC_PARALLEL
:
4384 case EXEC_OACC_KERNELS
:
4385 case EXEC_OACC_DATA
:
4386 case EXEC_OACC_HOST_DATA
:
4387 return gfc_trans_oacc_construct (code
);
4388 case EXEC_OACC_LOOP
:
4389 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4391 case EXEC_OACC_UPDATE
:
4392 case EXEC_OACC_CACHE
:
4393 case EXEC_OACC_ENTER_DATA
:
4394 case EXEC_OACC_EXIT_DATA
:
4395 return gfc_trans_oacc_executable_directive (code
);
4396 case EXEC_OACC_WAIT
:
4397 return gfc_trans_oacc_wait_directive (code
);
4404 gfc_trans_omp_directive (gfc_code
*code
)
4408 case EXEC_OMP_ATOMIC
:
4409 return gfc_trans_omp_atomic (code
);
4410 case EXEC_OMP_BARRIER
:
4411 return gfc_trans_omp_barrier ();
4412 case EXEC_OMP_CANCEL
:
4413 return gfc_trans_omp_cancel (code
);
4414 case EXEC_OMP_CANCELLATION_POINT
:
4415 return gfc_trans_omp_cancellation_point (code
);
4416 case EXEC_OMP_CRITICAL
:
4417 return gfc_trans_omp_critical (code
);
4418 case EXEC_OMP_DISTRIBUTE
:
4421 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4423 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4424 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4425 case EXEC_OMP_DISTRIBUTE_SIMD
:
4426 return gfc_trans_omp_distribute (code
, NULL
);
4427 case EXEC_OMP_DO_SIMD
:
4428 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
4429 case EXEC_OMP_FLUSH
:
4430 return gfc_trans_omp_flush ();
4431 case EXEC_OMP_MASTER
:
4432 return gfc_trans_omp_master (code
);
4433 case EXEC_OMP_ORDERED
:
4434 return gfc_trans_omp_ordered (code
);
4435 case EXEC_OMP_PARALLEL
:
4436 return gfc_trans_omp_parallel (code
);
4437 case EXEC_OMP_PARALLEL_DO
:
4438 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
4439 case EXEC_OMP_PARALLEL_DO_SIMD
:
4440 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
4441 case EXEC_OMP_PARALLEL_SECTIONS
:
4442 return gfc_trans_omp_parallel_sections (code
);
4443 case EXEC_OMP_PARALLEL_WORKSHARE
:
4444 return gfc_trans_omp_parallel_workshare (code
);
4445 case EXEC_OMP_SECTIONS
:
4446 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
4447 case EXEC_OMP_SINGLE
:
4448 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
4449 case EXEC_OMP_TARGET
:
4450 case EXEC_OMP_TARGET_TEAMS
:
4451 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4452 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4453 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4454 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4455 return gfc_trans_omp_target (code
);
4456 case EXEC_OMP_TARGET_DATA
:
4457 return gfc_trans_omp_target_data (code
);
4458 case EXEC_OMP_TARGET_UPDATE
:
4459 return gfc_trans_omp_target_update (code
);
4461 return gfc_trans_omp_task (code
);
4462 case EXEC_OMP_TASKGROUP
:
4463 return gfc_trans_omp_taskgroup (code
);
4464 case EXEC_OMP_TASKWAIT
:
4465 return gfc_trans_omp_taskwait ();
4466 case EXEC_OMP_TASKYIELD
:
4467 return gfc_trans_omp_taskyield ();
4468 case EXEC_OMP_TEAMS
:
4469 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4470 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4471 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4472 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4473 return gfc_trans_omp_teams (code
, NULL
);
4474 case EXEC_OMP_WORKSHARE
:
4475 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
4482 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
4487 gfc_omp_declare_simd
*ods
;
4488 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
4490 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
4491 tree fndecl
= ns
->proc_name
->backend_decl
;
4493 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
4494 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
4495 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
4496 DECL_ATTRIBUTES (fndecl
) = c
;