1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
30 #include "diagnostic-core.h" /* For internal_error. */
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
45 gfc_omp_privatize_by_reference (const_tree decl
)
47 tree type
= TREE_TYPE (decl
);
49 if (TREE_CODE (type
) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
53 if (TREE_CODE (type
) == POINTER_TYPE
)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and aren't scalar pointers, scalar
57 allocatables, Cray pointees or C pointers are supposed to be
58 privatized by reference. */
59 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
60 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
61 || GFC_DECL_CRAY_POINTEE (decl
)
62 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
65 if (!DECL_ARTIFICIAL (decl
)
66 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
69 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
71 if (DECL_LANG_SPECIFIC (decl
)
72 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
79 /* True if OpenMP sharing attribute of DECL is predetermined. */
81 enum omp_clause_default_kind
82 gfc_omp_predetermined_sharing (tree decl
)
84 /* Associate names preserve the association established during ASSOCIATE.
85 As they are implemented either as pointers to the selector or array
86 descriptor and shouldn't really change in the ASSOCIATE region,
87 this decl can be either shared or firstprivate. If it is a pointer,
88 use firstprivate, as it is cheaper that way, otherwise make it shared. */
89 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
91 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
92 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
94 return OMP_CLAUSE_DEFAULT_SHARED
;
97 if (DECL_ARTIFICIAL (decl
)
98 && ! GFC_DECL_RESULT (decl
)
99 && ! (DECL_LANG_SPECIFIC (decl
)
100 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
101 return OMP_CLAUSE_DEFAULT_SHARED
;
103 /* Cray pointees shouldn't be listed in any clauses and should be
104 gimplified to dereference of the corresponding Cray pointer.
105 Make them all private, so that they are emitted in the debug
107 if (GFC_DECL_CRAY_POINTEE (decl
))
108 return OMP_CLAUSE_DEFAULT_PRIVATE
;
110 /* Assumed-size arrays are predetermined shared. */
111 if (TREE_CODE (decl
) == PARM_DECL
112 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
113 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
114 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
115 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
117 return OMP_CLAUSE_DEFAULT_SHARED
;
119 /* Dummy procedures aren't considered variables by OpenMP, thus are
120 disallowed in OpenMP clauses. They are represented as PARM_DECLs
121 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
122 to avoid complaining about their uses with default(none). */
123 if (TREE_CODE (decl
) == PARM_DECL
124 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
125 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
126 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
128 /* COMMON and EQUIVALENCE decls are shared. They
129 are only referenced through DECL_VALUE_EXPR of the variables
130 contained in them. If those are privatized, they will not be
131 gimplified to the COMMON or EQUIVALENCE decls. */
132 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
133 return OMP_CLAUSE_DEFAULT_SHARED
;
135 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
136 return OMP_CLAUSE_DEFAULT_SHARED
;
138 /* These are either array or derived parameters, or vtables.
139 In the former cases, the OpenMP standard doesn't consider them to be
140 variables at all (they can't be redefined), but they can nevertheless appear
141 in parallel/task regions and for default(none) purposes treat them as shared.
142 For vtables likely the same handling is desirable. */
143 if (TREE_CODE (decl
) == VAR_DECL
144 && TREE_READONLY (decl
)
145 && TREE_STATIC (decl
))
146 return OMP_CLAUSE_DEFAULT_SHARED
;
148 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
151 /* Return decl that should be used when reporting DEFAULT(NONE)
155 gfc_omp_report_decl (tree decl
)
157 if (DECL_ARTIFICIAL (decl
)
158 && DECL_LANG_SPECIFIC (decl
)
159 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
160 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
165 /* Return true if TYPE has any allocatable components. */
168 gfc_has_alloc_comps (tree type
, tree decl
)
172 if (POINTER_TYPE_P (type
))
174 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
175 type
= TREE_TYPE (type
);
176 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
180 while (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
181 type
= gfc_get_element_type (type
);
183 if (TREE_CODE (type
) != RECORD_TYPE
)
186 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
188 ftype
= TREE_TYPE (field
);
189 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
191 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
192 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
194 if (gfc_has_alloc_comps (ftype
, field
))
200 /* Return true if DECL in private clause needs
201 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
203 gfc_omp_private_outer_ref (tree decl
)
205 tree type
= TREE_TYPE (decl
);
207 if (GFC_DESCRIPTOR_TYPE_P (type
)
208 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
211 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
214 if (gfc_omp_privatize_by_reference (decl
))
215 type
= TREE_TYPE (type
);
217 if (gfc_has_alloc_comps (type
, decl
))
223 /* Callback for gfc_omp_unshare_expr. */
226 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
229 enum tree_code code
= TREE_CODE (t
);
231 /* Stop at types, decls, constants like copy_tree_r. */
232 if (TREE_CODE_CLASS (code
) == tcc_type
233 || TREE_CODE_CLASS (code
) == tcc_declaration
234 || TREE_CODE_CLASS (code
) == tcc_constant
237 else if (handled_component_p (t
)
238 || TREE_CODE (t
) == MEM_REF
)
240 *tp
= unshare_expr (t
);
247 /* Unshare in expr anything that the FE which normally doesn't
248 care much about tree sharing (because during gimplification
249 everything is unshared) could cause problems with tree sharing
250 at omp-low.c time. */
253 gfc_omp_unshare_expr (tree expr
)
255 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
259 enum walk_alloc_comps
261 WALK_ALLOC_COMPS_DTOR
,
262 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
263 WALK_ALLOC_COMPS_COPY_CTOR
266 /* Handle allocatable components in OpenMP clauses. */
269 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
270 enum walk_alloc_comps kind
)
272 stmtblock_t block
, tmpblock
;
273 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
274 gfc_init_block (&block
);
276 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
278 if (GFC_DESCRIPTOR_TYPE_P (type
))
280 gfc_init_block (&tmpblock
);
281 tem
= gfc_full_array_size (&tmpblock
, decl
,
282 GFC_TYPE_ARRAY_RANK (type
));
283 then_b
= gfc_finish_block (&tmpblock
);
284 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
285 tem
= gfc_omp_unshare_expr (tem
);
286 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
287 gfc_array_index_type
, tem
,
292 if (!TYPE_DOMAIN (type
)
293 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
294 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
295 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
297 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
298 TYPE_SIZE_UNIT (type
),
299 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
300 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
303 tem
= array_type_nelts (type
);
304 tem
= fold_convert (gfc_array_index_type
, tem
);
307 tree nelems
= gfc_evaluate_now (tem
, &block
);
308 tree index
= gfc_create_var (gfc_array_index_type
, "S");
310 gfc_init_block (&tmpblock
);
311 tem
= gfc_conv_array_data (decl
);
312 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
313 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
314 tree destvar
, destvref
= NULL_TREE
;
317 tem
= gfc_conv_array_data (dest
);
318 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
319 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
321 gfc_add_expr_to_block (&tmpblock
,
322 gfc_walk_alloc_comps (declvref
, destvref
,
326 gfc_init_loopinfo (&loop
);
328 loop
.from
[0] = gfc_index_zero_node
;
329 loop
.loopvar
[0] = index
;
331 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
332 gfc_add_block_to_block (&block
, &loop
.pre
);
333 return gfc_finish_block (&block
);
335 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
337 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
339 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
340 type
= TREE_TYPE (decl
);
343 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
344 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
346 tree ftype
= TREE_TYPE (field
);
347 tree declf
, destf
= NULL_TREE
;
348 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
349 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
350 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
351 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
354 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
355 decl
, field
, NULL_TREE
);
357 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
358 dest
, field
, NULL_TREE
);
363 case WALK_ALLOC_COMPS_DTOR
:
365 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
366 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
367 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
369 gfc_add_modify (&block
, unshare_expr (destf
),
370 unshare_expr (declf
));
371 tem
= gfc_duplicate_allocatable_nocopy
372 (destf
, declf
, ftype
,
373 GFC_TYPE_ARRAY_RANK (ftype
));
375 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
376 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
378 case WALK_ALLOC_COMPS_COPY_CTOR
:
379 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
380 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
381 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
382 GFC_TYPE_ARRAY_RANK (ftype
));
383 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
384 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0);
388 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
391 gfc_init_block (&tmpblock
);
392 gfc_add_expr_to_block (&tmpblock
,
393 gfc_walk_alloc_comps (declf
, destf
,
395 then_b
= gfc_finish_block (&tmpblock
);
396 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
397 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
398 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
399 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
400 tem
= unshare_expr (declf
);
405 tem
= fold_convert (pvoid_type_node
, tem
);
406 tem
= fold_build2_loc (input_location
, NE_EXPR
,
407 boolean_type_node
, tem
,
409 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
411 build_empty_stmt (input_location
));
413 gfc_add_expr_to_block (&block
, then_b
);
415 if (kind
== WALK_ALLOC_COMPS_DTOR
)
417 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
418 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
420 tem
= gfc_trans_dealloc_allocated (unshare_expr (declf
),
422 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
424 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
426 tem
= gfc_call_free (unshare_expr (declf
));
427 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
432 return gfc_finish_block (&block
);
435 /* Return code to initialize DECL with its default constructor, or
436 NULL if there's nothing to do. */
439 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
441 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
442 stmtblock_t block
, cond_block
;
444 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
445 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
446 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
447 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
449 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
450 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
451 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
453 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
456 gfc_start_block (&block
);
457 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
458 OMP_CLAUSE_DECL (clause
),
459 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
460 gfc_add_expr_to_block (&block
, tem
);
461 return gfc_finish_block (&block
);
466 gcc_assert (outer
!= NULL_TREE
);
468 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
469 "not currently allocated" allocation status if outer
470 array is "not currently allocated", otherwise should be allocated. */
471 gfc_start_block (&block
);
473 gfc_init_block (&cond_block
);
475 if (GFC_DESCRIPTOR_TYPE_P (type
))
477 gfc_add_modify (&cond_block
, decl
, outer
);
478 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
479 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
480 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
482 gfc_conv_descriptor_lbound_get (decl
, rank
));
483 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
484 size
, gfc_index_one_node
);
485 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
486 size
= fold_build2_loc (input_location
, MULT_EXPR
,
487 gfc_array_index_type
, size
,
488 gfc_conv_descriptor_stride_get (decl
, rank
));
489 tree esize
= fold_convert (gfc_array_index_type
,
490 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
491 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
493 size
= unshare_expr (size
);
494 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
498 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
499 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
500 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
501 if (GFC_DESCRIPTOR_TYPE_P (type
))
502 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
504 gfc_add_modify (&cond_block
, unshare_expr (decl
),
505 fold_convert (TREE_TYPE (decl
), ptr
));
506 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
508 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
509 OMP_CLAUSE_DECL (clause
),
510 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
511 gfc_add_expr_to_block (&cond_block
, tem
);
513 then_b
= gfc_finish_block (&cond_block
);
515 /* Reduction clause requires allocated ALLOCATABLE. */
516 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
518 gfc_init_block (&cond_block
);
519 if (GFC_DESCRIPTOR_TYPE_P (type
))
520 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
523 gfc_add_modify (&cond_block
, unshare_expr (decl
),
524 build_zero_cst (TREE_TYPE (decl
)));
525 else_b
= gfc_finish_block (&cond_block
);
527 tree tem
= fold_convert (pvoid_type_node
,
528 GFC_DESCRIPTOR_TYPE_P (type
)
529 ? gfc_conv_descriptor_data_get (outer
) : outer
);
530 tem
= unshare_expr (tem
);
531 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
532 tem
, null_pointer_node
);
533 gfc_add_expr_to_block (&block
,
534 build3_loc (input_location
, COND_EXPR
,
535 void_type_node
, cond
, then_b
,
539 gfc_add_expr_to_block (&block
, then_b
);
541 return gfc_finish_block (&block
);
544 /* Build and return code for a copy constructor from SRC to DEST. */
547 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
549 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
550 tree cond
, then_b
, else_b
;
551 stmtblock_t block
, cond_block
;
553 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
554 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
556 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
557 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
558 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
560 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
562 gfc_start_block (&block
);
563 gfc_add_modify (&block
, dest
, src
);
564 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
565 WALK_ALLOC_COMPS_COPY_CTOR
);
566 gfc_add_expr_to_block (&block
, tem
);
567 return gfc_finish_block (&block
);
570 return build2_v (MODIFY_EXPR
, dest
, src
);
573 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
574 and copied from SRC. */
575 gfc_start_block (&block
);
577 gfc_init_block (&cond_block
);
579 gfc_add_modify (&cond_block
, dest
, src
);
580 if (GFC_DESCRIPTOR_TYPE_P (type
))
582 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
583 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
584 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
586 gfc_conv_descriptor_lbound_get (dest
, rank
));
587 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
588 size
, gfc_index_one_node
);
589 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
590 size
= fold_build2_loc (input_location
, MULT_EXPR
,
591 gfc_array_index_type
, size
,
592 gfc_conv_descriptor_stride_get (dest
, rank
));
593 tree esize
= fold_convert (gfc_array_index_type
,
594 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
595 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
597 size
= unshare_expr (size
);
598 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
602 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
603 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
604 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
605 if (GFC_DESCRIPTOR_TYPE_P (type
))
606 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
608 gfc_add_modify (&cond_block
, unshare_expr (dest
),
609 fold_convert (TREE_TYPE (dest
), ptr
));
611 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
612 ? gfc_conv_descriptor_data_get (src
) : src
;
613 srcptr
= unshare_expr (srcptr
);
614 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
615 call
= build_call_expr_loc (input_location
,
616 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
618 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
619 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
621 tree tem
= gfc_walk_alloc_comps (src
, dest
,
622 OMP_CLAUSE_DECL (clause
),
623 WALK_ALLOC_COMPS_COPY_CTOR
);
624 gfc_add_expr_to_block (&cond_block
, tem
);
626 then_b
= gfc_finish_block (&cond_block
);
628 gfc_init_block (&cond_block
);
629 if (GFC_DESCRIPTOR_TYPE_P (type
))
630 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
633 gfc_add_modify (&cond_block
, unshare_expr (dest
),
634 build_zero_cst (TREE_TYPE (dest
)));
635 else_b
= gfc_finish_block (&cond_block
);
637 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
638 unshare_expr (srcptr
), null_pointer_node
);
639 gfc_add_expr_to_block (&block
,
640 build3_loc (input_location
, COND_EXPR
,
641 void_type_node
, cond
, then_b
, else_b
));
643 return gfc_finish_block (&block
);
646 /* Similarly, except use an intrinsic or pointer assignment operator
650 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
652 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
653 tree cond
, then_b
, else_b
;
654 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
656 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
657 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
658 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
660 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
662 gfc_start_block (&block
);
663 /* First dealloc any allocatable components in DEST. */
664 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
665 OMP_CLAUSE_DECL (clause
),
666 WALK_ALLOC_COMPS_DTOR
);
667 gfc_add_expr_to_block (&block
, tem
);
668 /* Then copy over toplevel data. */
669 gfc_add_modify (&block
, dest
, src
);
670 /* Finally allocate any allocatable components and copy. */
671 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
672 WALK_ALLOC_COMPS_COPY_CTOR
);
673 gfc_add_expr_to_block (&block
, tem
);
674 return gfc_finish_block (&block
);
677 return build2_v (MODIFY_EXPR
, dest
, src
);
680 gfc_start_block (&block
);
682 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
684 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
685 WALK_ALLOC_COMPS_DTOR
);
686 tree tem
= fold_convert (pvoid_type_node
,
687 GFC_DESCRIPTOR_TYPE_P (type
)
688 ? gfc_conv_descriptor_data_get (dest
) : dest
);
689 tem
= unshare_expr (tem
);
690 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
691 tem
, null_pointer_node
);
692 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
693 then_b
, build_empty_stmt (input_location
));
694 gfc_add_expr_to_block (&block
, tem
);
697 gfc_init_block (&cond_block
);
699 if (GFC_DESCRIPTOR_TYPE_P (type
))
701 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
702 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
703 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
705 gfc_conv_descriptor_lbound_get (src
, rank
));
706 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
707 size
, gfc_index_one_node
);
708 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
709 size
= fold_build2_loc (input_location
, MULT_EXPR
,
710 gfc_array_index_type
, size
,
711 gfc_conv_descriptor_stride_get (src
, rank
));
712 tree esize
= fold_convert (gfc_array_index_type
,
713 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
714 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
716 size
= unshare_expr (size
);
717 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
721 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
722 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
724 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
725 ? gfc_conv_descriptor_data_get (dest
) : dest
;
726 destptr
= unshare_expr (destptr
);
727 destptr
= fold_convert (pvoid_type_node
, destptr
);
728 gfc_add_modify (&cond_block
, ptr
, destptr
);
730 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
731 destptr
, null_pointer_node
);
733 if (GFC_DESCRIPTOR_TYPE_P (type
))
736 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
738 tree rank
= gfc_rank_cst
[i
];
739 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
740 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
741 gfc_array_index_type
, tem
,
742 gfc_conv_descriptor_lbound_get (src
, rank
));
743 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
744 gfc_array_index_type
, tem
,
745 gfc_conv_descriptor_lbound_get (dest
, rank
));
746 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
747 tem
, gfc_conv_descriptor_ubound_get (dest
,
749 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
750 boolean_type_node
, cond
, tem
);
754 gfc_init_block (&cond_block2
);
756 if (GFC_DESCRIPTOR_TYPE_P (type
))
758 gfc_init_block (&inner_block
);
759 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
760 then_b
= gfc_finish_block (&inner_block
);
762 gfc_init_block (&inner_block
);
763 gfc_add_modify (&inner_block
, ptr
,
764 gfc_call_realloc (&inner_block
, ptr
, size
));
765 else_b
= gfc_finish_block (&inner_block
);
767 gfc_add_expr_to_block (&cond_block2
,
768 build3_loc (input_location
, COND_EXPR
,
770 unshare_expr (nonalloc
),
772 gfc_add_modify (&cond_block2
, dest
, src
);
773 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
777 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
778 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
779 fold_convert (type
, ptr
));
781 then_b
= gfc_finish_block (&cond_block2
);
782 else_b
= build_empty_stmt (input_location
);
784 gfc_add_expr_to_block (&cond_block
,
785 build3_loc (input_location
, COND_EXPR
,
786 void_type_node
, unshare_expr (cond
),
789 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
790 ? gfc_conv_descriptor_data_get (src
) : src
;
791 srcptr
= unshare_expr (srcptr
);
792 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
793 call
= build_call_expr_loc (input_location
,
794 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
796 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
797 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
799 tree tem
= gfc_walk_alloc_comps (src
, dest
,
800 OMP_CLAUSE_DECL (clause
),
801 WALK_ALLOC_COMPS_COPY_CTOR
);
802 gfc_add_expr_to_block (&cond_block
, tem
);
804 then_b
= gfc_finish_block (&cond_block
);
806 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
808 gfc_init_block (&cond_block
);
809 if (GFC_DESCRIPTOR_TYPE_P (type
))
810 gfc_add_expr_to_block (&cond_block
,
811 gfc_trans_dealloc_allocated (unshare_expr (dest
),
815 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
816 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
817 gfc_add_modify (&cond_block
, unshare_expr (dest
),
818 build_zero_cst (TREE_TYPE (dest
)));
820 else_b
= gfc_finish_block (&cond_block
);
822 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
823 unshare_expr (srcptr
), null_pointer_node
);
824 gfc_add_expr_to_block (&block
,
825 build3_loc (input_location
, COND_EXPR
,
826 void_type_node
, cond
,
830 gfc_add_expr_to_block (&block
, then_b
);
832 return gfc_finish_block (&block
);
836 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
837 tree add
, tree nelems
)
839 stmtblock_t tmpblock
;
840 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
841 nelems
= gfc_evaluate_now (nelems
, block
);
843 gfc_init_block (&tmpblock
);
844 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
846 desta
= gfc_build_array_ref (dest
, index
, NULL
);
847 srca
= gfc_build_array_ref (src
, index
, NULL
);
851 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
852 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
853 fold_convert (sizetype
, index
),
854 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
855 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
856 TREE_TYPE (dest
), dest
,
858 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
859 TREE_TYPE (src
), src
,
862 gfc_add_modify (&tmpblock
, desta
,
863 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
867 gfc_init_loopinfo (&loop
);
869 loop
.from
[0] = gfc_index_zero_node
;
870 loop
.loopvar
[0] = index
;
872 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
873 gfc_add_block_to_block (block
, &loop
.pre
);
876 /* Build and return code for a constructor of DEST that initializes
877 it to SRC plus ADD (ADD is scalar integer). */
880 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
882 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
885 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
887 gfc_start_block (&block
);
888 add
= gfc_evaluate_now (add
, &block
);
890 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
891 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
892 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
894 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
895 if (!TYPE_DOMAIN (type
)
896 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
897 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
898 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
900 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
901 TYPE_SIZE_UNIT (type
),
902 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
903 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
906 nelems
= array_type_nelts (type
);
907 nelems
= fold_convert (gfc_array_index_type
, nelems
);
909 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
910 return gfc_finish_block (&block
);
913 /* Allocatable arrays in LINEAR clauses need to be allocated
914 and copied from SRC. */
915 gfc_add_modify (&block
, dest
, src
);
916 if (GFC_DESCRIPTOR_TYPE_P (type
))
918 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
919 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
920 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
922 gfc_conv_descriptor_lbound_get (dest
, rank
));
923 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
924 size
, gfc_index_one_node
);
925 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
926 size
= fold_build2_loc (input_location
, MULT_EXPR
,
927 gfc_array_index_type
, size
,
928 gfc_conv_descriptor_stride_get (dest
, rank
));
929 tree esize
= fold_convert (gfc_array_index_type
,
930 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
931 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
932 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
933 nelems
, unshare_expr (esize
));
934 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
936 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
937 gfc_array_index_type
, nelems
,
941 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
942 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
943 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
944 if (GFC_DESCRIPTOR_TYPE_P (type
))
946 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
947 tree etype
= gfc_get_element_type (type
);
948 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
949 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
950 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
951 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
955 gfc_add_modify (&block
, unshare_expr (dest
),
956 fold_convert (TREE_TYPE (dest
), ptr
));
957 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
958 tree dstm
= build_fold_indirect_ref (ptr
);
959 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
960 gfc_add_modify (&block
, dstm
,
961 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
963 return gfc_finish_block (&block
);
966 /* Build and return code destructing DECL. Return NULL if nothing
970 gfc_omp_clause_dtor (tree clause
, tree decl
)
972 tree type
= TREE_TYPE (decl
), tem
;
974 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
975 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
976 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
978 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
979 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
980 OMP_CLAUSE_DECL (clause
),
981 WALK_ALLOC_COMPS_DTOR
);
985 if (GFC_DESCRIPTOR_TYPE_P (type
))
986 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
987 to be deallocated if they were allocated. */
988 tem
= gfc_trans_dealloc_allocated (decl
, false, NULL
);
990 tem
= gfc_call_free (decl
);
991 tem
= gfc_omp_unshare_expr (tem
);
993 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
998 gfc_init_block (&block
);
999 gfc_add_expr_to_block (&block
,
1000 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1001 OMP_CLAUSE_DECL (clause
),
1002 WALK_ALLOC_COMPS_DTOR
));
1003 gfc_add_expr_to_block (&block
, tem
);
1004 then_b
= gfc_finish_block (&block
);
1006 tem
= fold_convert (pvoid_type_node
,
1007 GFC_DESCRIPTOR_TYPE_P (type
)
1008 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1009 tem
= unshare_expr (tem
);
1010 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1011 tem
, null_pointer_node
);
1012 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1013 then_b
, build_empty_stmt (input_location
));
1020 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1022 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1025 tree decl
= OMP_CLAUSE_DECL (c
);
1026 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1027 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1029 if (!gfc_omp_privatize_by_reference (decl
)
1030 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1031 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1032 && !GFC_DECL_CRAY_POINTEE (decl
)
1033 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1035 tree orig_decl
= decl
;
1036 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1037 OMP_CLAUSE_MAP_KIND (c4
) = OMP_CLAUSE_MAP_POINTER
;
1038 OMP_CLAUSE_DECL (c4
) = decl
;
1039 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1040 decl
= build_fold_indirect_ref (decl
);
1041 OMP_CLAUSE_DECL (c
) = decl
;
1042 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1043 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1044 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1045 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1047 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1048 OMP_CLAUSE_MAP_KIND (c3
) = OMP_CLAUSE_MAP_POINTER
;
1049 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1050 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1051 decl
= build_fold_indirect_ref (decl
);
1052 OMP_CLAUSE_DECL (c
) = decl
;
1055 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1058 gfc_start_block (&block
);
1059 tree type
= TREE_TYPE (decl
);
1060 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1061 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1062 ptr
= build_fold_indirect_ref (ptr
);
1063 OMP_CLAUSE_DECL (c
) = ptr
;
1064 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1065 OMP_CLAUSE_MAP_KIND (c2
) = OMP_CLAUSE_MAP_TO_PSET
;
1066 OMP_CLAUSE_DECL (c2
) = decl
;
1067 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1068 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1069 OMP_CLAUSE_MAP_KIND (c3
) = OMP_CLAUSE_MAP_POINTER
;
1070 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1071 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1072 tree size
= create_tmp_var (gfc_array_index_type
, NULL
);
1073 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1074 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1075 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1076 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1078 stmtblock_t cond_block
;
1079 tree tem
, then_b
, else_b
, zero
, cond
;
1081 gfc_init_block (&cond_block
);
1082 tem
= gfc_full_array_size (&cond_block
, decl
,
1083 GFC_TYPE_ARRAY_RANK (type
));
1084 gfc_add_modify (&cond_block
, size
, tem
);
1085 gfc_add_modify (&cond_block
, size
,
1086 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1088 then_b
= gfc_finish_block (&cond_block
);
1089 gfc_init_block (&cond_block
);
1090 zero
= build_int_cst (gfc_array_index_type
, 0);
1091 gfc_add_modify (&cond_block
, size
, zero
);
1092 else_b
= gfc_finish_block (&cond_block
);
1093 tem
= gfc_conv_descriptor_data_get (decl
);
1094 tem
= fold_convert (pvoid_type_node
, tem
);
1095 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1096 boolean_type_node
, tem
, null_pointer_node
);
1097 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1098 void_type_node
, cond
,
1103 gfc_add_modify (&block
, size
,
1104 gfc_full_array_size (&block
, decl
,
1105 GFC_TYPE_ARRAY_RANK (type
)));
1106 gfc_add_modify (&block
, size
,
1107 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1110 OMP_CLAUSE_SIZE (c
) = size
;
1111 tree stmt
= gfc_finish_block (&block
);
1112 gimplify_and_add (stmt
, pre_p
);
1115 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1117 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1118 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1121 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1122 OMP_CLAUSE_CHAIN (last
) = c2
;
1127 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1128 OMP_CLAUSE_CHAIN (last
) = c3
;
1133 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1134 OMP_CLAUSE_CHAIN (last
) = c4
;
1140 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1141 disregarded in OpenMP construct, because it is going to be
1142 remapped during OpenMP lowering. SHARED is true if DECL
1143 is going to be shared, false if it is going to be privatized. */
1146 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1148 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1149 && DECL_HAS_VALUE_EXPR_P (decl
))
1151 tree value
= DECL_VALUE_EXPR (decl
);
1153 if (TREE_CODE (value
) == COMPONENT_REF
1154 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1155 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1157 /* If variable in COMMON or EQUIVALENCE is privatized, return
1158 true, as just that variable is supposed to be privatized,
1159 not the whole COMMON or whole EQUIVALENCE.
1160 For shared variables in COMMON or EQUIVALENCE, let them be
1161 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1162 from the same COMMON or EQUIVALENCE just one sharing of the
1163 whole COMMON or EQUIVALENCE is enough. */
1168 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1174 /* Return true if DECL that is shared iff SHARED is true should
1175 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1179 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1181 if (GFC_DECL_CRAY_POINTEE (decl
))
1184 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1185 && DECL_HAS_VALUE_EXPR_P (decl
))
1187 tree value
= DECL_VALUE_EXPR (decl
);
1189 if (TREE_CODE (value
) == COMPONENT_REF
1190 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1191 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1198 /* Register language specific type size variables as potentially OpenMP
1199 firstprivate variables. */
1202 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1204 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1208 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1209 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1211 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1212 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1213 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1215 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1216 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1222 gfc_trans_add_clause (tree node
, tree tail
)
1224 OMP_CLAUSE_CHAIN (node
) = tail
;
1229 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1234 gfc_symbol
*proc_sym
;
1235 gfc_formal_arglist
*f
;
1237 gcc_assert (sym
->attr
.dummy
);
1238 proc_sym
= sym
->ns
->proc_name
;
1239 if (proc_sym
->attr
.entry_master
)
1241 if (gfc_return_by_reference (proc_sym
))
1244 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1247 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1253 return build_int_cst (integer_type_node
, cnt
);
1256 tree t
= gfc_get_symbol_decl (sym
);
1260 bool alternate_entry
;
1263 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1264 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1265 && sym
->result
== sym
;
1266 entry_master
= sym
->attr
.result
1267 && sym
->ns
->proc_name
->attr
.entry_master
1268 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1269 parent_decl
= current_function_decl
1270 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1272 if ((t
== parent_decl
&& return_value
)
1273 || (sym
->ns
&& sym
->ns
->proc_name
1274 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1275 && (alternate_entry
|| entry_master
)))
1280 /* Special case for assigning the return value of a function.
1281 Self recursive functions must have an explicit return value. */
1282 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1283 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1285 /* Similarly for alternate entry points. */
1286 else if (alternate_entry
1287 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1290 gfc_entry_list
*el
= NULL
;
1292 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1295 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1300 else if (entry_master
1301 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1303 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1309 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1310 gfc_omp_namelist
*namelist
, tree list
,
1313 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1314 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1316 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1317 if (t
!= error_mark_node
)
1319 tree node
= build_omp_clause (input_location
, code
);
1320 OMP_CLAUSE_DECL (node
) = t
;
1321 list
= gfc_trans_add_clause (node
, list
);
1327 struct omp_udr_find_orig_data
1329 gfc_omp_udr
*omp_udr
;
1334 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1337 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1338 if ((*e
)->expr_type
== EXPR_VARIABLE
1339 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1340 cd
->omp_orig_seen
= true;
1346 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1348 gfc_symbol
*sym
= n
->sym
;
1349 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1350 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1351 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1352 gfc_symbol omp_var_copy
[4];
1353 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1355 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1356 locus old_loc
= gfc_current_locus
;
1359 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1361 decl
= OMP_CLAUSE_DECL (c
);
1362 gfc_current_locus
= where
;
1363 type
= TREE_TYPE (decl
);
1364 outer_decl
= create_tmp_var_raw (type
, NULL
);
1365 if (TREE_CODE (decl
) == PARM_DECL
1366 && TREE_CODE (type
) == REFERENCE_TYPE
1367 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1368 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1370 decl
= build_fold_indirect_ref (decl
);
1371 type
= TREE_TYPE (type
);
1374 /* Create a fake symbol for init value. */
1375 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1376 init_val_sym
.ns
= sym
->ns
;
1377 init_val_sym
.name
= sym
->name
;
1378 init_val_sym
.ts
= sym
->ts
;
1379 init_val_sym
.attr
.referenced
= 1;
1380 init_val_sym
.declared_at
= where
;
1381 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1382 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1383 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1384 else if (udr
->initializer_ns
)
1385 backend_decl
= NULL
;
1387 switch (sym
->ts
.type
)
1393 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1396 backend_decl
= NULL_TREE
;
1399 init_val_sym
.backend_decl
= backend_decl
;
1401 /* Create a fake symbol for the outer array reference. */
1404 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1405 outer_sym
.attr
.dummy
= 0;
1406 outer_sym
.attr
.result
= 0;
1407 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1408 outer_sym
.backend_decl
= outer_decl
;
1409 if (decl
!= OMP_CLAUSE_DECL (c
))
1410 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1412 /* Create fake symtrees for it. */
1413 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1414 symtree1
->n
.sym
= sym
;
1415 gcc_assert (symtree1
== root1
);
1417 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1418 symtree2
->n
.sym
= &init_val_sym
;
1419 gcc_assert (symtree2
== root2
);
1421 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1422 symtree3
->n
.sym
= &outer_sym
;
1423 gcc_assert (symtree3
== root3
);
1425 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1428 omp_var_copy
[0] = *udr
->omp_out
;
1429 omp_var_copy
[1] = *udr
->omp_in
;
1430 *udr
->omp_out
= outer_sym
;
1431 *udr
->omp_in
= *sym
;
1432 if (udr
->initializer_ns
)
1434 omp_var_copy
[2] = *udr
->omp_priv
;
1435 omp_var_copy
[3] = *udr
->omp_orig
;
1436 *udr
->omp_priv
= *sym
;
1437 *udr
->omp_orig
= outer_sym
;
1441 /* Create expressions. */
1442 e1
= gfc_get_expr ();
1443 e1
->expr_type
= EXPR_VARIABLE
;
1445 e1
->symtree
= symtree1
;
1447 if (sym
->attr
.dimension
)
1449 e1
->ref
= ref
= gfc_get_ref ();
1450 ref
->type
= REF_ARRAY
;
1451 ref
->u
.ar
.where
= where
;
1452 ref
->u
.ar
.as
= sym
->as
;
1453 ref
->u
.ar
.type
= AR_FULL
;
1454 ref
->u
.ar
.dimen
= 0;
1456 t
= gfc_resolve_expr (e1
);
1460 if (backend_decl
!= NULL_TREE
)
1462 e2
= gfc_get_expr ();
1463 e2
->expr_type
= EXPR_VARIABLE
;
1465 e2
->symtree
= symtree2
;
1467 t
= gfc_resolve_expr (e2
);
1470 else if (udr
->initializer_ns
== NULL
)
1472 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1473 e2
= gfc_default_initializer (&sym
->ts
);
1475 t
= gfc_resolve_expr (e2
);
1478 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1480 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1481 t
= gfc_resolve_expr (e2
);
1484 if (udr
&& udr
->initializer_ns
)
1486 struct omp_udr_find_orig_data cd
;
1488 cd
.omp_orig_seen
= false;
1489 gfc_code_walker (&n
->udr
->initializer
,
1490 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1491 if (cd
.omp_orig_seen
)
1492 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1495 e3
= gfc_copy_expr (e1
);
1496 e3
->symtree
= symtree3
;
1497 t
= gfc_resolve_expr (e3
);
1502 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1506 e4
= gfc_add (e3
, e1
);
1509 e4
= gfc_multiply (e3
, e1
);
1511 case TRUTH_ANDIF_EXPR
:
1512 e4
= gfc_and (e3
, e1
);
1514 case TRUTH_ORIF_EXPR
:
1515 e4
= gfc_or (e3
, e1
);
1518 e4
= gfc_eqv (e3
, e1
);
1521 e4
= gfc_neqv (e3
, e1
);
1539 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1542 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1543 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1544 t
= gfc_resolve_expr (e3
);
1546 t
= gfc_resolve_expr (e4
);
1555 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1556 intrinsic_sym
.ns
= sym
->ns
;
1557 intrinsic_sym
.name
= iname
;
1558 intrinsic_sym
.ts
= sym
->ts
;
1559 intrinsic_sym
.attr
.referenced
= 1;
1560 intrinsic_sym
.attr
.intrinsic
= 1;
1561 intrinsic_sym
.attr
.function
= 1;
1562 intrinsic_sym
.result
= &intrinsic_sym
;
1563 intrinsic_sym
.declared_at
= where
;
1565 symtree4
= gfc_new_symtree (&root4
, iname
);
1566 symtree4
->n
.sym
= &intrinsic_sym
;
1567 gcc_assert (symtree4
== root4
);
1569 e4
= gfc_get_expr ();
1570 e4
->expr_type
= EXPR_FUNCTION
;
1572 e4
->symtree
= symtree4
;
1573 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1574 e4
->value
.function
.actual
->expr
= e3
;
1575 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1576 e4
->value
.function
.actual
->next
->expr
= e1
;
1578 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1580 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1581 e1
= gfc_copy_expr (e1
);
1582 e3
= gfc_copy_expr (e3
);
1583 t
= gfc_resolve_expr (e4
);
1587 /* Create the init statement list. */
1590 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1592 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1593 NULL_TREE
, NULL_TREE
, false);
1594 if (TREE_CODE (stmt
) != BIND_EXPR
)
1595 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1598 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1600 /* Create the merge statement list. */
1603 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1605 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1606 NULL_TREE
, NULL_TREE
, false);
1607 if (TREE_CODE (stmt
) != BIND_EXPR
)
1608 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1611 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1613 /* And stick the placeholder VAR_DECL into the clause as well. */
1614 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1616 gfc_current_locus
= old_loc
;
1629 gfc_free_array_spec (outer_sym
.as
);
1633 *udr
->omp_out
= omp_var_copy
[0];
1634 *udr
->omp_in
= omp_var_copy
[1];
1635 if (udr
->initializer_ns
)
1637 *udr
->omp_priv
= omp_var_copy
[2];
1638 *udr
->omp_orig
= omp_var_copy
[3];
1644 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1647 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1648 if (namelist
->sym
->attr
.referenced
)
1650 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1651 if (t
!= error_mark_node
)
1653 tree node
= build_omp_clause (where
.lb
->location
,
1654 OMP_CLAUSE_REDUCTION
);
1655 OMP_CLAUSE_DECL (node
) = t
;
1656 switch (namelist
->u
.reduction_op
)
1658 case OMP_REDUCTION_PLUS
:
1659 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1661 case OMP_REDUCTION_MINUS
:
1662 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1664 case OMP_REDUCTION_TIMES
:
1665 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1667 case OMP_REDUCTION_AND
:
1668 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1670 case OMP_REDUCTION_OR
:
1671 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1673 case OMP_REDUCTION_EQV
:
1674 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1676 case OMP_REDUCTION_NEQV
:
1677 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1679 case OMP_REDUCTION_MAX
:
1680 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1682 case OMP_REDUCTION_MIN
:
1683 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1685 case OMP_REDUCTION_IAND
:
1686 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1688 case OMP_REDUCTION_IOR
:
1689 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1691 case OMP_REDUCTION_IEOR
:
1692 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1694 case OMP_REDUCTION_USER
:
1695 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1700 if (namelist
->sym
->attr
.dimension
1701 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1702 || namelist
->sym
->attr
.allocatable
)
1703 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1704 list
= gfc_trans_add_clause (node
, list
);
1711 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1712 locus where
, bool declare_simd
= false)
1714 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1716 enum omp_clause_code clause_code
;
1719 if (clauses
== NULL
)
1722 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1724 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1730 case OMP_LIST_REDUCTION
:
1731 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
);
1733 case OMP_LIST_PRIVATE
:
1734 clause_code
= OMP_CLAUSE_PRIVATE
;
1736 case OMP_LIST_SHARED
:
1737 clause_code
= OMP_CLAUSE_SHARED
;
1739 case OMP_LIST_FIRSTPRIVATE
:
1740 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1742 case OMP_LIST_LASTPRIVATE
:
1743 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1745 case OMP_LIST_COPYIN
:
1746 clause_code
= OMP_CLAUSE_COPYIN
;
1748 case OMP_LIST_COPYPRIVATE
:
1749 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1751 case OMP_LIST_UNIFORM
:
1752 clause_code
= OMP_CLAUSE_UNIFORM
;
1756 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1759 case OMP_LIST_ALIGNED
:
1760 for (; n
!= NULL
; n
= n
->next
)
1761 if (n
->sym
->attr
.referenced
|| declare_simd
)
1763 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1764 if (t
!= error_mark_node
)
1766 tree node
= build_omp_clause (input_location
,
1767 OMP_CLAUSE_ALIGNED
);
1768 OMP_CLAUSE_DECL (node
) = t
;
1774 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1777 gfc_init_se (&se
, NULL
);
1778 gfc_conv_expr (&se
, n
->expr
);
1779 gfc_add_block_to_block (block
, &se
.pre
);
1780 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1781 gfc_add_block_to_block (block
, &se
.post
);
1783 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1785 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1789 case OMP_LIST_LINEAR
:
1791 gfc_expr
*last_step_expr
= NULL
;
1792 tree last_step
= NULL_TREE
;
1794 for (; n
!= NULL
; n
= n
->next
)
1798 last_step_expr
= n
->expr
;
1799 last_step
= NULL_TREE
;
1801 if (n
->sym
->attr
.referenced
|| declare_simd
)
1803 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1804 if (t
!= error_mark_node
)
1806 tree node
= build_omp_clause (input_location
,
1808 OMP_CLAUSE_DECL (node
) = t
;
1809 if (last_step_expr
&& last_step
== NULL_TREE
)
1813 = gfc_conv_constant_to_tree (last_step_expr
);
1816 gfc_init_se (&se
, NULL
);
1817 gfc_conv_expr (&se
, last_step_expr
);
1818 gfc_add_block_to_block (block
, &se
.pre
);
1819 last_step
= gfc_evaluate_now (se
.expr
, block
);
1820 gfc_add_block_to_block (block
, &se
.post
);
1823 OMP_CLAUSE_LINEAR_STEP (node
)
1824 = fold_convert (gfc_typenode_for_spec (&n
->sym
->ts
),
1826 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1827 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1828 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1834 case OMP_LIST_DEPEND
:
1835 for (; n
!= NULL
; n
= n
->next
)
1837 if (!n
->sym
->attr
.referenced
)
1840 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
1841 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1843 tree decl
= gfc_get_symbol_decl (n
->sym
);
1844 if (gfc_omp_privatize_by_reference (decl
))
1845 decl
= build_fold_indirect_ref (decl
);
1846 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1848 decl
= gfc_conv_descriptor_data_get (decl
);
1849 decl
= fold_convert (build_pointer_type (char_type_node
),
1851 decl
= build_fold_indirect_ref (decl
);
1853 else if (DECL_P (decl
))
1854 TREE_ADDRESSABLE (decl
) = 1;
1855 OMP_CLAUSE_DECL (node
) = decl
;
1860 gfc_init_se (&se
, NULL
);
1861 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
1863 gfc_conv_expr_reference (&se
, n
->expr
);
1868 gfc_conv_expr_descriptor (&se
, n
->expr
);
1869 ptr
= gfc_conv_array_data (se
.expr
);
1871 gfc_add_block_to_block (block
, &se
.pre
);
1872 gfc_add_block_to_block (block
, &se
.post
);
1873 ptr
= fold_convert (build_pointer_type (char_type_node
),
1875 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
1877 switch (n
->u
.depend_op
)
1880 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
1882 case OMP_DEPEND_OUT
:
1883 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
1885 case OMP_DEPEND_INOUT
:
1886 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
1891 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1895 for (; n
!= NULL
; n
= n
->next
)
1897 if (!n
->sym
->attr
.referenced
)
1900 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1901 tree node2
= NULL_TREE
;
1902 tree node3
= NULL_TREE
;
1903 tree node4
= NULL_TREE
;
1904 tree decl
= gfc_get_symbol_decl (n
->sym
);
1906 TREE_ADDRESSABLE (decl
) = 1;
1907 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1909 if (POINTER_TYPE_P (TREE_TYPE (decl
))
1910 && (gfc_omp_privatize_by_reference (decl
)
1911 || GFC_DECL_GET_SCALAR_POINTER (decl
)
1912 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1913 || GFC_DECL_CRAY_POINTEE (decl
)
1914 || GFC_DESCRIPTOR_TYPE_P
1915 (TREE_TYPE (TREE_TYPE (decl
)))))
1917 tree orig_decl
= decl
;
1918 node4
= build_omp_clause (input_location
,
1920 OMP_CLAUSE_MAP_KIND (node4
) = OMP_CLAUSE_MAP_POINTER
;
1921 OMP_CLAUSE_DECL (node4
) = decl
;
1922 OMP_CLAUSE_SIZE (node4
) = size_int (0);
1923 decl
= build_fold_indirect_ref (decl
);
1924 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1925 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1926 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1928 node3
= build_omp_clause (input_location
,
1930 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
1931 OMP_CLAUSE_DECL (node3
) = decl
;
1932 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1933 decl
= build_fold_indirect_ref (decl
);
1936 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1938 tree type
= TREE_TYPE (decl
);
1939 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1940 ptr
= fold_convert (build_pointer_type (char_type_node
),
1942 ptr
= build_fold_indirect_ref (ptr
);
1943 OMP_CLAUSE_DECL (node
) = ptr
;
1944 node2
= build_omp_clause (input_location
,
1946 OMP_CLAUSE_MAP_KIND (node2
) = OMP_CLAUSE_MAP_TO_PSET
;
1947 OMP_CLAUSE_DECL (node2
) = decl
;
1948 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
1949 node3
= build_omp_clause (input_location
,
1951 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
1952 OMP_CLAUSE_DECL (node3
)
1953 = gfc_conv_descriptor_data_get (decl
);
1954 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1955 if (n
->sym
->attr
.pointer
)
1957 stmtblock_t cond_block
;
1959 = gfc_create_var (gfc_array_index_type
, NULL
);
1960 tree tem
, then_b
, else_b
, zero
, cond
;
1962 gfc_init_block (&cond_block
);
1964 = gfc_full_array_size (&cond_block
, decl
,
1965 GFC_TYPE_ARRAY_RANK (type
));
1966 gfc_add_modify (&cond_block
, size
, tem
);
1967 then_b
= gfc_finish_block (&cond_block
);
1968 gfc_init_block (&cond_block
);
1969 zero
= build_int_cst (gfc_array_index_type
, 0);
1970 gfc_add_modify (&cond_block
, size
, zero
);
1971 else_b
= gfc_finish_block (&cond_block
);
1972 tem
= gfc_conv_descriptor_data_get (decl
);
1973 tem
= fold_convert (pvoid_type_node
, tem
);
1974 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1976 tem
, null_pointer_node
);
1977 gfc_add_expr_to_block (block
,
1978 build3_loc (input_location
,
1983 OMP_CLAUSE_SIZE (node
) = size
;
1986 OMP_CLAUSE_SIZE (node
)
1987 = gfc_full_array_size (block
, decl
,
1988 GFC_TYPE_ARRAY_RANK (type
));
1990 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1991 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1992 OMP_CLAUSE_SIZE (node
)
1993 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1994 OMP_CLAUSE_SIZE (node
), elemsz
);
1997 OMP_CLAUSE_DECL (node
) = decl
;
2002 gfc_init_se (&se
, NULL
);
2003 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2005 gfc_conv_expr_reference (&se
, n
->expr
);
2006 gfc_add_block_to_block (block
, &se
.pre
);
2008 OMP_CLAUSE_SIZE (node
)
2009 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2013 gfc_conv_expr_descriptor (&se
, n
->expr
);
2014 ptr
= gfc_conv_array_data (se
.expr
);
2015 tree type
= TREE_TYPE (se
.expr
);
2016 gfc_add_block_to_block (block
, &se
.pre
);
2017 OMP_CLAUSE_SIZE (node
)
2018 = gfc_full_array_size (block
, se
.expr
,
2019 GFC_TYPE_ARRAY_RANK (type
));
2021 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2022 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2023 OMP_CLAUSE_SIZE (node
)
2024 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2025 OMP_CLAUSE_SIZE (node
), elemsz
);
2027 gfc_add_block_to_block (block
, &se
.post
);
2028 ptr
= fold_convert (build_pointer_type (char_type_node
),
2030 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2032 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2033 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2035 node4
= build_omp_clause (input_location
,
2037 OMP_CLAUSE_MAP_KIND (node4
) = OMP_CLAUSE_MAP_POINTER
;
2038 OMP_CLAUSE_DECL (node4
) = decl
;
2039 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2040 decl
= build_fold_indirect_ref (decl
);
2042 ptr
= fold_convert (sizetype
, ptr
);
2043 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2045 tree type
= TREE_TYPE (decl
);
2046 ptr2
= gfc_conv_descriptor_data_get (decl
);
2047 node2
= build_omp_clause (input_location
,
2049 OMP_CLAUSE_MAP_KIND (node2
) = OMP_CLAUSE_MAP_TO_PSET
;
2050 OMP_CLAUSE_DECL (node2
) = decl
;
2051 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2052 node3
= build_omp_clause (input_location
,
2054 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
2055 OMP_CLAUSE_DECL (node3
)
2056 = gfc_conv_descriptor_data_get (decl
);
2060 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2061 ptr2
= build_fold_addr_expr (decl
);
2064 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2067 node3
= build_omp_clause (input_location
,
2069 OMP_CLAUSE_MAP_KIND (node3
) = OMP_CLAUSE_MAP_POINTER
;
2070 OMP_CLAUSE_DECL (node3
) = decl
;
2072 ptr2
= fold_convert (sizetype
, ptr2
);
2073 OMP_CLAUSE_SIZE (node3
)
2074 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2076 switch (n
->u
.map_op
)
2079 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_ALLOC
;
2082 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_TO
;
2085 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_FROM
;
2087 case OMP_MAP_TOFROM
:
2088 OMP_CLAUSE_MAP_KIND (node
) = OMP_CLAUSE_MAP_TOFROM
;
2093 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2095 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2097 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2099 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2104 for (; n
!= NULL
; n
= n
->next
)
2106 if (!n
->sym
->attr
.referenced
)
2109 tree node
= build_omp_clause (input_location
,
2111 ? OMP_CLAUSE_TO
: OMP_CLAUSE_FROM
);
2112 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2114 tree decl
= gfc_get_symbol_decl (n
->sym
);
2115 if (gfc_omp_privatize_by_reference (decl
))
2116 decl
= build_fold_indirect_ref (decl
);
2117 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2119 tree type
= TREE_TYPE (decl
);
2120 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2121 ptr
= fold_convert (build_pointer_type (char_type_node
),
2123 ptr
= build_fold_indirect_ref (ptr
);
2124 OMP_CLAUSE_DECL (node
) = ptr
;
2125 OMP_CLAUSE_SIZE (node
)
2126 = gfc_full_array_size (block
, decl
,
2127 GFC_TYPE_ARRAY_RANK (type
));
2129 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2130 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2131 OMP_CLAUSE_SIZE (node
)
2132 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2133 OMP_CLAUSE_SIZE (node
), elemsz
);
2136 OMP_CLAUSE_DECL (node
) = decl
;
2141 gfc_init_se (&se
, NULL
);
2142 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2144 gfc_conv_expr_reference (&se
, n
->expr
);
2146 gfc_add_block_to_block (block
, &se
.pre
);
2147 OMP_CLAUSE_SIZE (node
)
2148 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2152 gfc_conv_expr_descriptor (&se
, n
->expr
);
2153 ptr
= gfc_conv_array_data (se
.expr
);
2154 tree type
= TREE_TYPE (se
.expr
);
2155 gfc_add_block_to_block (block
, &se
.pre
);
2156 OMP_CLAUSE_SIZE (node
)
2157 = gfc_full_array_size (block
, se
.expr
,
2158 GFC_TYPE_ARRAY_RANK (type
));
2160 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2161 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2162 OMP_CLAUSE_SIZE (node
)
2163 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2164 OMP_CLAUSE_SIZE (node
), elemsz
);
2166 gfc_add_block_to_block (block
, &se
.post
);
2167 ptr
= fold_convert (build_pointer_type (char_type_node
),
2169 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2171 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2179 if (clauses
->if_expr
)
2183 gfc_init_se (&se
, NULL
);
2184 gfc_conv_expr (&se
, clauses
->if_expr
);
2185 gfc_add_block_to_block (block
, &se
.pre
);
2186 if_var
= gfc_evaluate_now (se
.expr
, block
);
2187 gfc_add_block_to_block (block
, &se
.post
);
2189 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2190 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2191 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2194 if (clauses
->final_expr
)
2198 gfc_init_se (&se
, NULL
);
2199 gfc_conv_expr (&se
, clauses
->final_expr
);
2200 gfc_add_block_to_block (block
, &se
.pre
);
2201 final_var
= gfc_evaluate_now (se
.expr
, block
);
2202 gfc_add_block_to_block (block
, &se
.post
);
2204 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2205 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2206 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2209 if (clauses
->num_threads
)
2213 gfc_init_se (&se
, NULL
);
2214 gfc_conv_expr (&se
, clauses
->num_threads
);
2215 gfc_add_block_to_block (block
, &se
.pre
);
2216 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2217 gfc_add_block_to_block (block
, &se
.post
);
2219 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2220 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2221 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2224 chunk_size
= NULL_TREE
;
2225 if (clauses
->chunk_size
)
2227 gfc_init_se (&se
, NULL
);
2228 gfc_conv_expr (&se
, clauses
->chunk_size
);
2229 gfc_add_block_to_block (block
, &se
.pre
);
2230 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2231 gfc_add_block_to_block (block
, &se
.post
);
2234 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2236 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2237 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2238 switch (clauses
->sched_kind
)
2240 case OMP_SCHED_STATIC
:
2241 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2243 case OMP_SCHED_DYNAMIC
:
2244 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2246 case OMP_SCHED_GUIDED
:
2247 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2249 case OMP_SCHED_RUNTIME
:
2250 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2252 case OMP_SCHED_AUTO
:
2253 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2258 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2261 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2263 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2264 switch (clauses
->default_sharing
)
2266 case OMP_DEFAULT_NONE
:
2267 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2269 case OMP_DEFAULT_SHARED
:
2270 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2272 case OMP_DEFAULT_PRIVATE
:
2273 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2275 case OMP_DEFAULT_FIRSTPRIVATE
:
2276 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2281 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2284 if (clauses
->nowait
)
2286 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2287 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2290 if (clauses
->ordered
)
2292 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2293 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2296 if (clauses
->untied
)
2298 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2299 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2302 if (clauses
->mergeable
)
2304 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2305 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2308 if (clauses
->collapse
)
2310 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2311 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2312 = build_int_cst (integer_type_node
, clauses
->collapse
);
2313 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2316 if (clauses
->inbranch
)
2318 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2319 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2322 if (clauses
->notinbranch
)
2324 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2325 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2328 switch (clauses
->cancel
)
2330 case OMP_CANCEL_UNKNOWN
:
2332 case OMP_CANCEL_PARALLEL
:
2333 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2334 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2336 case OMP_CANCEL_SECTIONS
:
2337 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2338 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2341 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2342 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2344 case OMP_CANCEL_TASKGROUP
:
2345 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2346 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2350 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2352 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2353 switch (clauses
->proc_bind
)
2355 case OMP_PROC_BIND_MASTER
:
2356 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2358 case OMP_PROC_BIND_SPREAD
:
2359 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2361 case OMP_PROC_BIND_CLOSE
:
2362 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2367 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2370 if (clauses
->safelen_expr
)
2374 gfc_init_se (&se
, NULL
);
2375 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2376 gfc_add_block_to_block (block
, &se
.pre
);
2377 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2378 gfc_add_block_to_block (block
, &se
.post
);
2380 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2381 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2382 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2385 if (clauses
->simdlen_expr
)
2387 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2388 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2389 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2390 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2393 if (clauses
->num_teams
)
2397 gfc_init_se (&se
, NULL
);
2398 gfc_conv_expr (&se
, clauses
->num_teams
);
2399 gfc_add_block_to_block (block
, &se
.pre
);
2400 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2401 gfc_add_block_to_block (block
, &se
.post
);
2403 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2404 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2405 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2408 if (clauses
->device
)
2412 gfc_init_se (&se
, NULL
);
2413 gfc_conv_expr (&se
, clauses
->device
);
2414 gfc_add_block_to_block (block
, &se
.pre
);
2415 device
= gfc_evaluate_now (se
.expr
, block
);
2416 gfc_add_block_to_block (block
, &se
.post
);
2418 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2419 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2420 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2423 if (clauses
->thread_limit
)
2427 gfc_init_se (&se
, NULL
);
2428 gfc_conv_expr (&se
, clauses
->thread_limit
);
2429 gfc_add_block_to_block (block
, &se
.pre
);
2430 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2431 gfc_add_block_to_block (block
, &se
.post
);
2433 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2434 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2435 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2438 chunk_size
= NULL_TREE
;
2439 if (clauses
->dist_chunk_size
)
2441 gfc_init_se (&se
, NULL
);
2442 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2443 gfc_add_block_to_block (block
, &se
.pre
);
2444 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2445 gfc_add_block_to_block (block
, &se
.post
);
2448 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2450 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2451 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2452 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2455 return nreverse (omp_clauses
);
2458 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2461 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2466 stmt
= gfc_trans_code (code
);
2467 if (TREE_CODE (stmt
) != BIND_EXPR
)
2469 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2471 tree block
= poplevel (1, 0);
2472 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
2483 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
2484 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
2487 gfc_trans_omp_atomic (gfc_code
*code
)
2489 gfc_code
*atomic_code
= code
;
2493 gfc_expr
*expr2
, *e
;
2496 tree lhsaddr
, type
, rhs
, x
;
2497 enum tree_code op
= ERROR_MARK
;
2498 enum tree_code aop
= OMP_ATOMIC
;
2499 bool var_on_left
= false;
2500 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
2502 code
= code
->block
->next
;
2503 gcc_assert (code
->op
== EXEC_ASSIGN
);
2504 var
= code
->expr1
->symtree
->n
.sym
;
2506 gfc_init_se (&lse
, NULL
);
2507 gfc_init_se (&rse
, NULL
);
2508 gfc_init_se (&vse
, NULL
);
2509 gfc_start_block (&block
);
2511 expr2
= code
->expr2
;
2512 if (expr2
->expr_type
== EXPR_FUNCTION
2513 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2514 expr2
= expr2
->value
.function
.actual
->expr
;
2516 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2518 case GFC_OMP_ATOMIC_READ
:
2519 gfc_conv_expr (&vse
, code
->expr1
);
2520 gfc_add_block_to_block (&block
, &vse
.pre
);
2522 gfc_conv_expr (&lse
, expr2
);
2523 gfc_add_block_to_block (&block
, &lse
.pre
);
2524 type
= TREE_TYPE (lse
.expr
);
2525 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2527 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
2528 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2529 x
= convert (TREE_TYPE (vse
.expr
), x
);
2530 gfc_add_modify (&block
, vse
.expr
, x
);
2532 gfc_add_block_to_block (&block
, &lse
.pre
);
2533 gfc_add_block_to_block (&block
, &rse
.pre
);
2535 return gfc_finish_block (&block
);
2536 case GFC_OMP_ATOMIC_CAPTURE
:
2537 aop
= OMP_ATOMIC_CAPTURE_NEW
;
2538 if (expr2
->expr_type
== EXPR_VARIABLE
)
2540 aop
= OMP_ATOMIC_CAPTURE_OLD
;
2541 gfc_conv_expr (&vse
, code
->expr1
);
2542 gfc_add_block_to_block (&block
, &vse
.pre
);
2544 gfc_conv_expr (&lse
, expr2
);
2545 gfc_add_block_to_block (&block
, &lse
.pre
);
2546 gfc_init_se (&lse
, NULL
);
2548 var
= code
->expr1
->symtree
->n
.sym
;
2549 expr2
= code
->expr2
;
2550 if (expr2
->expr_type
== EXPR_FUNCTION
2551 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2552 expr2
= expr2
->value
.function
.actual
->expr
;
2559 gfc_conv_expr (&lse
, code
->expr1
);
2560 gfc_add_block_to_block (&block
, &lse
.pre
);
2561 type
= TREE_TYPE (lse
.expr
);
2562 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2564 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2565 == GFC_OMP_ATOMIC_WRITE
)
2566 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2568 gfc_conv_expr (&rse
, expr2
);
2569 gfc_add_block_to_block (&block
, &rse
.pre
);
2571 else if (expr2
->expr_type
== EXPR_OP
)
2574 switch (expr2
->value
.op
.op
)
2576 case INTRINSIC_PLUS
:
2579 case INTRINSIC_TIMES
:
2582 case INTRINSIC_MINUS
:
2585 case INTRINSIC_DIVIDE
:
2586 if (expr2
->ts
.type
== BT_INTEGER
)
2587 op
= TRUNC_DIV_EXPR
;
2592 op
= TRUTH_ANDIF_EXPR
;
2595 op
= TRUTH_ORIF_EXPR
;
2600 case INTRINSIC_NEQV
:
2606 e
= expr2
->value
.op
.op1
;
2607 if (e
->expr_type
== EXPR_FUNCTION
2608 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2609 e
= e
->value
.function
.actual
->expr
;
2610 if (e
->expr_type
== EXPR_VARIABLE
2611 && e
->symtree
!= NULL
2612 && e
->symtree
->n
.sym
== var
)
2614 expr2
= expr2
->value
.op
.op2
;
2619 e
= expr2
->value
.op
.op2
;
2620 if (e
->expr_type
== EXPR_FUNCTION
2621 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2622 e
= e
->value
.function
.actual
->expr
;
2623 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2624 && e
->symtree
!= NULL
2625 && e
->symtree
->n
.sym
== var
);
2626 expr2
= expr2
->value
.op
.op1
;
2627 var_on_left
= false;
2629 gfc_conv_expr (&rse
, expr2
);
2630 gfc_add_block_to_block (&block
, &rse
.pre
);
2634 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
2635 switch (expr2
->value
.function
.isym
->id
)
2655 e
= expr2
->value
.function
.actual
->expr
;
2656 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2657 && e
->symtree
!= NULL
2658 && e
->symtree
->n
.sym
== var
);
2660 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
2661 gfc_add_block_to_block (&block
, &rse
.pre
);
2662 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
2664 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
2665 gfc_actual_arglist
*arg
;
2667 gfc_add_modify (&block
, accum
, rse
.expr
);
2668 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
2671 gfc_init_block (&rse
.pre
);
2672 gfc_conv_expr (&rse
, arg
->expr
);
2673 gfc_add_block_to_block (&block
, &rse
.pre
);
2674 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
2676 gfc_add_modify (&block
, accum
, x
);
2682 expr2
= expr2
->value
.function
.actual
->next
->expr
;
2685 lhsaddr
= save_expr (lhsaddr
);
2686 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
2688 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2689 == GFC_OMP_ATOMIC_WRITE
)
2690 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2694 x
= convert (TREE_TYPE (rhs
),
2695 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
2697 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
2699 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
2702 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
2703 && TREE_CODE (type
) != COMPLEX_TYPE
)
2704 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
2705 TREE_TYPE (TREE_TYPE (rhs
)), x
);
2707 gfc_add_block_to_block (&block
, &lse
.pre
);
2708 gfc_add_block_to_block (&block
, &rse
.pre
);
2710 if (aop
== OMP_ATOMIC
)
2712 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
2713 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2714 gfc_add_expr_to_block (&block
, x
);
2718 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
2721 expr2
= code
->expr2
;
2722 if (expr2
->expr_type
== EXPR_FUNCTION
2723 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2724 expr2
= expr2
->value
.function
.actual
->expr
;
2726 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
2727 gfc_conv_expr (&vse
, code
->expr1
);
2728 gfc_add_block_to_block (&block
, &vse
.pre
);
2730 gfc_init_se (&lse
, NULL
);
2731 gfc_conv_expr (&lse
, expr2
);
2732 gfc_add_block_to_block (&block
, &lse
.pre
);
2734 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
2735 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2736 x
= convert (TREE_TYPE (vse
.expr
), x
);
2737 gfc_add_modify (&block
, vse
.expr
, x
);
2740 return gfc_finish_block (&block
);
2744 gfc_trans_omp_barrier (void)
2746 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
2747 return build_call_expr_loc (input_location
, decl
, 0);
2751 gfc_trans_omp_cancel (gfc_code
*code
)
2754 tree ifc
= boolean_true_node
;
2756 switch (code
->ext
.omp_clauses
->cancel
)
2758 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
2759 case OMP_CANCEL_DO
: mask
= 2; break;
2760 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
2761 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
2762 default: gcc_unreachable ();
2764 gfc_start_block (&block
);
2765 if (code
->ext
.omp_clauses
->if_expr
)
2770 gfc_init_se (&se
, NULL
);
2771 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
2772 gfc_add_block_to_block (&block
, &se
.pre
);
2773 if_var
= gfc_evaluate_now (se
.expr
, &block
);
2774 gfc_add_block_to_block (&block
, &se
.post
);
2775 tree type
= TREE_TYPE (if_var
);
2776 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
2777 boolean_type_node
, if_var
,
2778 build_zero_cst (type
));
2780 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
2781 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
2782 ifc
= fold_convert (c_bool_type
, ifc
);
2783 gfc_add_expr_to_block (&block
,
2784 build_call_expr_loc (input_location
, decl
, 2,
2785 build_int_cst (integer_type_node
,
2787 return gfc_finish_block (&block
);
2791 gfc_trans_omp_cancellation_point (gfc_code
*code
)
2794 switch (code
->ext
.omp_clauses
->cancel
)
2796 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
2797 case OMP_CANCEL_DO
: mask
= 2; break;
2798 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
2799 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
2800 default: gcc_unreachable ();
2802 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
2803 return build_call_expr_loc (input_location
, decl
, 1,
2804 build_int_cst (integer_type_node
, mask
));
2808 gfc_trans_omp_critical (gfc_code
*code
)
2810 tree name
= NULL_TREE
, stmt
;
2811 if (code
->ext
.omp_name
!= NULL
)
2812 name
= get_identifier (code
->ext
.omp_name
);
2813 stmt
= gfc_trans_code (code
->block
->next
);
2814 return build2_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
, name
);
2817 typedef struct dovar_init_d
{
2824 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
2825 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
2828 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
2829 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
2832 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
2833 int i
, collapse
= clauses
->collapse
;
2834 vec
<dovar_init
> inits
= vNULL
;
2841 code
= code
->block
->next
;
2842 gcc_assert (code
->op
== EXEC_DO
);
2844 init
= make_tree_vec (collapse
);
2845 cond
= make_tree_vec (collapse
);
2846 incr
= make_tree_vec (collapse
);
2850 gfc_start_block (&block
);
2854 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
2856 for (i
= 0; i
< collapse
; i
++)
2859 int dovar_found
= 0;
2864 gfc_omp_namelist
*n
= NULL
;
2865 if (op
!= EXEC_OMP_DISTRIBUTE
)
2866 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
2867 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
2868 n
!= NULL
; n
= n
->next
)
2869 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
2873 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
2874 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
2875 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
2881 /* Evaluate all the expressions in the iterator. */
2882 gfc_init_se (&se
, NULL
);
2883 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
2884 gfc_add_block_to_block (pblock
, &se
.pre
);
2886 type
= TREE_TYPE (dovar
);
2887 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
2889 gfc_init_se (&se
, NULL
);
2890 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
2891 gfc_add_block_to_block (pblock
, &se
.pre
);
2892 from
= gfc_evaluate_now (se
.expr
, pblock
);
2894 gfc_init_se (&se
, NULL
);
2895 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
2896 gfc_add_block_to_block (pblock
, &se
.pre
);
2897 to
= gfc_evaluate_now (se
.expr
, pblock
);
2899 gfc_init_se (&se
, NULL
);
2900 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
2901 gfc_add_block_to_block (pblock
, &se
.pre
);
2902 step
= gfc_evaluate_now (se
.expr
, pblock
);
2905 /* Special case simple loops. */
2906 if (TREE_CODE (dovar
) == VAR_DECL
)
2908 if (integer_onep (step
))
2910 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
2915 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
2921 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
2922 /* The condition should not be folded. */
2923 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
2924 ? LE_EXPR
: GE_EXPR
,
2925 boolean_type_node
, dovar
, to
);
2926 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
2928 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
2931 TREE_VEC_ELT (incr
, i
));
2935 /* STEP is not 1 or -1. Use:
2936 for (count = 0; count < (to + step - from) / step; count++)
2938 dovar = from + count * step;
2942 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
2943 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
2944 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
2946 tmp
= gfc_evaluate_now (tmp
, pblock
);
2947 count
= gfc_create_var (type
, "count");
2948 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
2949 build_int_cst (type
, 0));
2950 /* The condition should not be folded. */
2951 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
2954 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
2956 build_int_cst (type
, 1));
2957 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
2958 MODIFY_EXPR
, type
, count
,
2959 TREE_VEC_ELT (incr
, i
));
2961 /* Initialize DOVAR. */
2962 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
2963 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
2964 dovar_init e
= {dovar
, tmp
};
2965 inits
.safe_push (e
);
2970 if (op
== EXEC_OMP_SIMD
)
2974 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
2975 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
2978 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
2983 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
2984 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
2985 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
2987 if (dovar_found
== 2)
2994 /* If dovar is lastprivate, but different counter is used,
2995 dovar += step needs to be added to
2996 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
2997 will have the value on entry of the last loop, rather
2998 than value after iterator increment. */
2999 tmp
= gfc_evaluate_now (step
, pblock
);
3000 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
3002 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3004 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3005 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3006 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3008 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3011 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3012 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3014 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3018 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3020 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3021 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3022 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3024 tree l
= build_omp_clause (input_location
,
3025 OMP_CLAUSE_LASTPRIVATE
);
3026 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3027 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3028 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3030 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3034 gcc_assert (simple
|| c
!= NULL
);
3038 if (op
!= EXEC_OMP_SIMD
)
3039 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3040 else if (collapse
== 1)
3042 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3043 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3044 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3045 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3048 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3049 OMP_CLAUSE_DECL (tmp
) = count
;
3050 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3053 if (i
+ 1 < collapse
)
3054 code
= code
->block
->next
;
3057 if (pblock
!= &block
)
3060 gfc_start_block (&block
);
3063 gfc_start_block (&body
);
3065 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3066 gfc_add_modify (&body
, di
->var
, di
->init
);
3069 /* Cycle statement is implemented with a goto. Exit statement must not be
3070 present for this loop. */
3071 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3073 /* Put these labels where they can be found later. */
3075 code
->cycle_label
= cycle_label
;
3076 code
->exit_label
= NULL_TREE
;
3078 /* Main loop body. */
3079 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3080 gfc_add_expr_to_block (&body
, tmp
);
3082 /* Label for cycle statements (if needed). */
3083 if (TREE_USED (cycle_label
))
3085 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3086 gfc_add_expr_to_block (&body
, tmp
);
3089 /* End of loop body. */
3092 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3093 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3094 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3095 default: gcc_unreachable ();
3098 TREE_TYPE (stmt
) = void_type_node
;
3099 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3100 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3101 OMP_FOR_INIT (stmt
) = init
;
3102 OMP_FOR_COND (stmt
) = cond
;
3103 OMP_FOR_INCR (stmt
) = incr
;
3104 gfc_add_expr_to_block (&block
, stmt
);
3106 return gfc_finish_block (&block
);
3110 gfc_trans_omp_flush (void)
3112 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3113 return build_call_expr_loc (input_location
, decl
, 0);
3117 gfc_trans_omp_master (gfc_code
*code
)
3119 tree stmt
= gfc_trans_code (code
->block
->next
);
3120 if (IS_EMPTY_STMT (stmt
))
3122 return build1_v (OMP_MASTER
, stmt
);
3126 gfc_trans_omp_ordered (gfc_code
*code
)
3128 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
3132 gfc_trans_omp_parallel (gfc_code
*code
)
3135 tree stmt
, omp_clauses
;
3137 gfc_start_block (&block
);
3138 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3140 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3141 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3143 gfc_add_expr_to_block (&block
, stmt
);
3144 return gfc_finish_block (&block
);
3151 GFC_OMP_SPLIT_PARALLEL
,
3152 GFC_OMP_SPLIT_DISTRIBUTE
,
3153 GFC_OMP_SPLIT_TEAMS
,
3154 GFC_OMP_SPLIT_TARGET
,
3160 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3161 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3162 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3163 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3164 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3165 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
)
3169 gfc_split_omp_clauses (gfc_code
*code
,
3170 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3172 int mask
= 0, innermost
= 0;
3173 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3176 case EXEC_OMP_DISTRIBUTE
:
3177 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3179 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3180 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3181 innermost
= GFC_OMP_SPLIT_DO
;
3183 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3184 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3185 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3186 innermost
= GFC_OMP_SPLIT_SIMD
;
3188 case EXEC_OMP_DISTRIBUTE_SIMD
:
3189 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3190 innermost
= GFC_OMP_SPLIT_SIMD
;
3193 innermost
= GFC_OMP_SPLIT_DO
;
3195 case EXEC_OMP_DO_SIMD
:
3196 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3197 innermost
= GFC_OMP_SPLIT_SIMD
;
3199 case EXEC_OMP_PARALLEL
:
3200 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3202 case EXEC_OMP_PARALLEL_DO
:
3203 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3204 innermost
= GFC_OMP_SPLIT_DO
;
3206 case EXEC_OMP_PARALLEL_DO_SIMD
:
3207 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3208 innermost
= GFC_OMP_SPLIT_SIMD
;
3211 innermost
= GFC_OMP_SPLIT_SIMD
;
3213 case EXEC_OMP_TARGET
:
3214 innermost
= GFC_OMP_SPLIT_TARGET
;
3216 case EXEC_OMP_TARGET_TEAMS
:
3217 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
3218 innermost
= GFC_OMP_SPLIT_TEAMS
;
3220 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3221 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3222 | GFC_OMP_MASK_DISTRIBUTE
;
3223 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3225 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3226 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3227 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3228 innermost
= GFC_OMP_SPLIT_DO
;
3230 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3231 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3232 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3233 innermost
= GFC_OMP_SPLIT_SIMD
;
3235 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3236 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3237 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3238 innermost
= GFC_OMP_SPLIT_SIMD
;
3240 case EXEC_OMP_TEAMS
:
3241 innermost
= GFC_OMP_SPLIT_TEAMS
;
3243 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3244 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
3245 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3247 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3248 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3249 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3250 innermost
= GFC_OMP_SPLIT_DO
;
3252 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3253 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3254 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3255 innermost
= GFC_OMP_SPLIT_SIMD
;
3257 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3258 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3259 innermost
= GFC_OMP_SPLIT_SIMD
;
3266 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
3269 if (code
->ext
.omp_clauses
!= NULL
)
3271 if (mask
& GFC_OMP_MASK_TARGET
)
3273 /* First the clauses that are unique to some constructs. */
3274 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
3275 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
3276 clausesa
[GFC_OMP_SPLIT_TARGET
].device
3277 = code
->ext
.omp_clauses
->device
;
3279 if (mask
& GFC_OMP_MASK_TEAMS
)
3281 /* First the clauses that are unique to some constructs. */
3282 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
3283 = code
->ext
.omp_clauses
->num_teams
;
3284 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
3285 = code
->ext
.omp_clauses
->thread_limit
;
3286 /* Shared and default clauses are allowed on parallel and teams. */
3287 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
3288 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3289 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
3290 = code
->ext
.omp_clauses
->default_sharing
;
3292 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3294 /* First the clauses that are unique to some constructs. */
3295 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
3296 = code
->ext
.omp_clauses
->dist_sched_kind
;
3297 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
3298 = code
->ext
.omp_clauses
->dist_chunk_size
;
3299 /* Duplicate collapse. */
3300 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
3301 = code
->ext
.omp_clauses
->collapse
;
3303 if (mask
& GFC_OMP_MASK_PARALLEL
)
3305 /* First the clauses that are unique to some constructs. */
3306 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
3307 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
3308 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
3309 = code
->ext
.omp_clauses
->num_threads
;
3310 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
3311 = code
->ext
.omp_clauses
->proc_bind
;
3312 /* Shared and default clauses are allowed on parallel and teams. */
3313 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
3314 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3315 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
3316 = code
->ext
.omp_clauses
->default_sharing
;
3318 if (mask
& GFC_OMP_MASK_DO
)
3320 /* First the clauses that are unique to some constructs. */
3321 clausesa
[GFC_OMP_SPLIT_DO
].ordered
3322 = code
->ext
.omp_clauses
->ordered
;
3323 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
3324 = code
->ext
.omp_clauses
->sched_kind
;
3325 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
3326 = code
->ext
.omp_clauses
->chunk_size
;
3327 clausesa
[GFC_OMP_SPLIT_DO
].nowait
3328 = code
->ext
.omp_clauses
->nowait
;
3329 /* Duplicate collapse. */
3330 clausesa
[GFC_OMP_SPLIT_DO
].collapse
3331 = code
->ext
.omp_clauses
->collapse
;
3333 if (mask
& GFC_OMP_MASK_SIMD
)
3335 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
3336 = code
->ext
.omp_clauses
->safelen_expr
;
3337 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LINEAR
]
3338 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
3339 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
3340 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
3341 /* Duplicate collapse. */
3342 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
3343 = code
->ext
.omp_clauses
->collapse
;
3345 /* Private clause is supported on all constructs but target,
3346 it is enough to put it on the innermost one. For
3347 !$ omp do put it on parallel though,
3348 as that's what we did for OpenMP 3.1. */
3349 clausesa
[innermost
== GFC_OMP_SPLIT_DO
3350 ? (int) GFC_OMP_SPLIT_PARALLEL
3351 : innermost
].lists
[OMP_LIST_PRIVATE
]
3352 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
3353 /* Firstprivate clause is supported on all constructs but
3354 target and simd. Put it on the outermost of those and
3355 duplicate on parallel. */
3356 if (mask
& GFC_OMP_MASK_TEAMS
)
3357 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
3358 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3359 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3360 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
3361 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3362 if (mask
& GFC_OMP_MASK_PARALLEL
)
3363 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
3364 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3365 else if (mask
& GFC_OMP_MASK_DO
)
3366 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
3367 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3368 /* Lastprivate is allowed on do and simd. In
3369 parallel do{, simd} we actually want to put it on
3370 parallel rather than do. */
3371 if (mask
& GFC_OMP_MASK_PARALLEL
)
3372 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
3373 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3374 else if (mask
& GFC_OMP_MASK_DO
)
3375 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
3376 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3377 if (mask
& GFC_OMP_MASK_SIMD
)
3378 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
3379 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3380 /* Reduction is allowed on simd, do, parallel and teams.
3381 Duplicate it on all of them, but omit on do if
3382 parallel is present. */
3383 if (mask
& GFC_OMP_MASK_TEAMS
)
3384 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
3385 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3386 if (mask
& GFC_OMP_MASK_PARALLEL
)
3387 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
3388 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3389 else if (mask
& GFC_OMP_MASK_DO
)
3390 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
3391 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3392 if (mask
& GFC_OMP_MASK_SIMD
)
3393 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
3394 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3395 /* FIXME: This is currently being discussed. */
3396 if (mask
& GFC_OMP_MASK_PARALLEL
)
3397 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
3398 = code
->ext
.omp_clauses
->if_expr
;
3400 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
3401 = code
->ext
.omp_clauses
->if_expr
;
3403 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3404 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3405 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
3409 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3410 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
3413 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3414 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
3417 gfc_start_block (&block
);
3419 gfc_init_block (&block
);
3421 if (clausesa
== NULL
)
3423 clausesa
= clausesa_buf
;
3424 gfc_split_omp_clauses (code
, clausesa
);
3426 if (gfc_option
.gfc_flag_openmp
)
3428 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
3429 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
3430 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
3433 if (TREE_CODE (body
) != BIND_EXPR
)
3434 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
3438 else if (TREE_CODE (body
) != BIND_EXPR
)
3439 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
3440 if (gfc_option
.gfc_flag_openmp
)
3442 stmt
= make_node (OMP_FOR
);
3443 TREE_TYPE (stmt
) = void_type_node
;
3444 OMP_FOR_BODY (stmt
) = body
;
3445 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
3449 gfc_add_expr_to_block (&block
, stmt
);
3450 return gfc_finish_block (&block
);
3454 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
3455 gfc_omp_clauses
*clausesa
)
3457 stmtblock_t block
, *new_pblock
= pblock
;
3458 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3459 tree stmt
, omp_clauses
= NULL_TREE
;
3462 gfc_start_block (&block
);
3464 gfc_init_block (&block
);
3466 if (clausesa
== NULL
)
3468 clausesa
= clausesa_buf
;
3469 gfc_split_omp_clauses (code
, clausesa
);
3472 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3476 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
3477 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
3478 new_pblock
= &block
;
3482 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
3483 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
3486 if (TREE_CODE (stmt
) != BIND_EXPR
)
3487 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3491 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3492 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3493 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3495 OMP_PARALLEL_COMBINED (stmt
) = 1;
3496 gfc_add_expr_to_block (&block
, stmt
);
3497 return gfc_finish_block (&block
);
3501 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3502 gfc_omp_clauses
*clausesa
)
3505 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3506 tree stmt
, omp_clauses
= NULL_TREE
;
3509 gfc_start_block (&block
);
3511 gfc_init_block (&block
);
3513 if (clausesa
== NULL
)
3515 clausesa
= clausesa_buf
;
3516 gfc_split_omp_clauses (code
, clausesa
);
3518 if (gfc_option
.gfc_flag_openmp
)
3520 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3524 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
3527 if (TREE_CODE (stmt
) != BIND_EXPR
)
3528 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3532 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3533 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3534 if (gfc_option
.gfc_flag_openmp
)
3536 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3538 OMP_PARALLEL_COMBINED (stmt
) = 1;
3540 gfc_add_expr_to_block (&block
, stmt
);
3541 return gfc_finish_block (&block
);
3545 gfc_trans_omp_parallel_sections (gfc_code
*code
)
3548 gfc_omp_clauses section_clauses
;
3549 tree stmt
, omp_clauses
;
3551 memset (§ion_clauses
, 0, sizeof (section_clauses
));
3552 section_clauses
.nowait
= true;
3554 gfc_start_block (&block
);
3555 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3558 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
3559 if (TREE_CODE (stmt
) != BIND_EXPR
)
3560 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3563 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3565 OMP_PARALLEL_COMBINED (stmt
) = 1;
3566 gfc_add_expr_to_block (&block
, stmt
);
3567 return gfc_finish_block (&block
);
3571 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
3574 gfc_omp_clauses workshare_clauses
;
3575 tree stmt
, omp_clauses
;
3577 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
3578 workshare_clauses
.nowait
= true;
3580 gfc_start_block (&block
);
3581 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3584 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
3585 if (TREE_CODE (stmt
) != BIND_EXPR
)
3586 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3589 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3591 OMP_PARALLEL_COMBINED (stmt
) = 1;
3592 gfc_add_expr_to_block (&block
, stmt
);
3593 return gfc_finish_block (&block
);
3597 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3599 stmtblock_t block
, body
;
3600 tree omp_clauses
, stmt
;
3601 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
3603 gfc_start_block (&block
);
3605 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
3607 gfc_init_block (&body
);
3608 for (code
= code
->block
; code
; code
= code
->block
)
3610 /* Last section is special because of lastprivate, so even if it
3611 is empty, chain it in. */
3612 stmt
= gfc_trans_omp_code (code
->next
,
3613 has_lastprivate
&& code
->block
== NULL
);
3614 if (! IS_EMPTY_STMT (stmt
))
3616 stmt
= build1_v (OMP_SECTION
, stmt
);
3617 gfc_add_expr_to_block (&body
, stmt
);
3620 stmt
= gfc_finish_block (&body
);
3622 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
3624 gfc_add_expr_to_block (&block
, stmt
);
3626 return gfc_finish_block (&block
);
3630 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3632 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
3633 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3634 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
3640 gfc_trans_omp_task (gfc_code
*code
)
3643 tree stmt
, omp_clauses
;
3645 gfc_start_block (&block
);
3646 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3648 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3649 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
3651 gfc_add_expr_to_block (&block
, stmt
);
3652 return gfc_finish_block (&block
);
3656 gfc_trans_omp_taskgroup (gfc_code
*code
)
3658 tree stmt
= gfc_trans_code (code
->block
->next
);
3659 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
3663 gfc_trans_omp_taskwait (void)
3665 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
3666 return build_call_expr_loc (input_location
, decl
, 0);
3670 gfc_trans_omp_taskyield (void)
3672 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
3673 return build_call_expr_loc (input_location
, decl
, 0);
3677 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
3680 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3681 tree stmt
, omp_clauses
= NULL_TREE
;
3683 gfc_start_block (&block
);
3684 if (clausesa
== NULL
)
3686 clausesa
= clausesa_buf
;
3687 gfc_split_omp_clauses (code
, clausesa
);
3689 if (gfc_option
.gfc_flag_openmp
)
3691 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
3695 case EXEC_OMP_DISTRIBUTE
:
3696 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3697 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3698 /* This is handled in gfc_trans_omp_do. */
3701 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3702 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3703 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3704 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
3705 if (TREE_CODE (stmt
) != BIND_EXPR
)
3706 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3710 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3711 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3712 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3713 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
3714 if (TREE_CODE (stmt
) != BIND_EXPR
)
3715 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3719 case EXEC_OMP_DISTRIBUTE_SIMD
:
3720 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3721 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3722 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
3723 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
3724 if (TREE_CODE (stmt
) != BIND_EXPR
)
3725 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3732 if (gfc_option
.gfc_flag_openmp
)
3734 tree distribute
= make_node (OMP_DISTRIBUTE
);
3735 TREE_TYPE (distribute
) = void_type_node
;
3736 OMP_FOR_BODY (distribute
) = stmt
;
3737 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
3740 gfc_add_expr_to_block (&block
, stmt
);
3741 return gfc_finish_block (&block
);
3745 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
3748 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3749 tree stmt
, omp_clauses
= NULL_TREE
;
3751 gfc_start_block (&block
);
3752 if (clausesa
== NULL
)
3754 clausesa
= clausesa_buf
;
3755 gfc_split_omp_clauses (code
, clausesa
);
3757 if (gfc_option
.gfc_flag_openmp
)
3759 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
3763 case EXEC_OMP_TARGET_TEAMS
:
3764 case EXEC_OMP_TEAMS
:
3765 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3767 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3768 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3769 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
3770 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
3774 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
3777 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
3779 gfc_add_expr_to_block (&block
, stmt
);
3780 return gfc_finish_block (&block
);
3784 gfc_trans_omp_target (gfc_code
*code
)
3787 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
3788 tree stmt
, omp_clauses
= NULL_TREE
;
3790 gfc_start_block (&block
);
3791 gfc_split_omp_clauses (code
, clausesa
);
3792 if (gfc_option
.gfc_flag_openmp
)
3794 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
3796 if (code
->op
== EXEC_OMP_TARGET
)
3797 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3799 stmt
= gfc_trans_omp_teams (code
, clausesa
);
3800 if (TREE_CODE (stmt
) != BIND_EXPR
)
3801 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3802 if (gfc_option
.gfc_flag_openmp
)
3803 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
3805 gfc_add_expr_to_block (&block
, stmt
);
3806 return gfc_finish_block (&block
);
3810 gfc_trans_omp_target_data (gfc_code
*code
)
3813 tree stmt
, omp_clauses
;
3815 gfc_start_block (&block
);
3816 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3818 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3819 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
3821 gfc_add_expr_to_block (&block
, stmt
);
3822 return gfc_finish_block (&block
);
3826 gfc_trans_omp_target_update (gfc_code
*code
)
3829 tree stmt
, omp_clauses
;
3831 gfc_start_block (&block
);
3832 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3834 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
3836 gfc_add_expr_to_block (&block
, stmt
);
3837 return gfc_finish_block (&block
);
3841 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3843 tree res
, tmp
, stmt
;
3844 stmtblock_t block
, *pblock
= NULL
;
3845 stmtblock_t singleblock
;
3846 int saved_ompws_flags
;
3847 bool singleblock_in_progress
= false;
3848 /* True if previous gfc_code in workshare construct is not workshared. */
3849 bool prev_singleunit
;
3851 code
= code
->block
->next
;
3855 gfc_start_block (&block
);
3858 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
3859 prev_singleunit
= false;
3861 /* Translate statements one by one to trees until we reach
3862 the end of the workshare construct. Adjacent gfc_codes that
3863 are a single unit of work are clustered and encapsulated in a
3864 single OMP_SINGLE construct. */
3865 for (; code
; code
= code
->next
)
3867 if (code
->here
!= 0)
3869 res
= gfc_trans_label_here (code
);
3870 gfc_add_expr_to_block (pblock
, res
);
3873 /* No dependence analysis, use for clauses with wait.
3874 If this is the last gfc_code, use default omp_clauses. */
3875 if (code
->next
== NULL
&& clauses
->nowait
)
3876 ompws_flags
|= OMPWS_NOWAIT
;
3878 /* By default, every gfc_code is a single unit of work. */
3879 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
3880 ompws_flags
&= ~OMPWS_SCALARIZER_WS
;
3889 res
= gfc_trans_assign (code
);
3892 case EXEC_POINTER_ASSIGN
:
3893 res
= gfc_trans_pointer_assign (code
);
3896 case EXEC_INIT_ASSIGN
:
3897 res
= gfc_trans_init_assign (code
);
3901 res
= gfc_trans_forall (code
);
3905 res
= gfc_trans_where (code
);
3908 case EXEC_OMP_ATOMIC
:
3909 res
= gfc_trans_omp_directive (code
);
3912 case EXEC_OMP_PARALLEL
:
3913 case EXEC_OMP_PARALLEL_DO
:
3914 case EXEC_OMP_PARALLEL_SECTIONS
:
3915 case EXEC_OMP_PARALLEL_WORKSHARE
:
3916 case EXEC_OMP_CRITICAL
:
3917 saved_ompws_flags
= ompws_flags
;
3919 res
= gfc_trans_omp_directive (code
);
3920 ompws_flags
= saved_ompws_flags
;
3924 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
3927 gfc_set_backend_locus (&code
->loc
);
3929 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
3931 if (prev_singleunit
)
3933 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
3934 /* Add current gfc_code to single block. */
3935 gfc_add_expr_to_block (&singleblock
, res
);
3938 /* Finish single block and add it to pblock. */
3939 tmp
= gfc_finish_block (&singleblock
);
3940 tmp
= build2_loc (input_location
, OMP_SINGLE
,
3941 void_type_node
, tmp
, NULL_TREE
);
3942 gfc_add_expr_to_block (pblock
, tmp
);
3943 /* Add current gfc_code to pblock. */
3944 gfc_add_expr_to_block (pblock
, res
);
3945 singleblock_in_progress
= false;
3950 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
3952 /* Start single block. */
3953 gfc_init_block (&singleblock
);
3954 gfc_add_expr_to_block (&singleblock
, res
);
3955 singleblock_in_progress
= true;
3958 /* Add the new statement to the block. */
3959 gfc_add_expr_to_block (pblock
, res
);
3961 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
3965 /* Finish remaining SINGLE block, if we were in the middle of one. */
3966 if (singleblock_in_progress
)
3968 /* Finish single block and add it to pblock. */
3969 tmp
= gfc_finish_block (&singleblock
);
3970 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
3972 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
3974 gfc_add_expr_to_block (pblock
, tmp
);
3977 stmt
= gfc_finish_block (pblock
);
3978 if (TREE_CODE (stmt
) != BIND_EXPR
)
3980 if (!IS_EMPTY_STMT (stmt
))
3982 tree bindblock
= poplevel (1, 0);
3983 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
3991 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
3992 stmt
= gfc_trans_omp_barrier ();
3999 gfc_trans_omp_directive (gfc_code
*code
)
4003 case EXEC_OMP_ATOMIC
:
4004 return gfc_trans_omp_atomic (code
);
4005 case EXEC_OMP_BARRIER
:
4006 return gfc_trans_omp_barrier ();
4007 case EXEC_OMP_CANCEL
:
4008 return gfc_trans_omp_cancel (code
);
4009 case EXEC_OMP_CANCELLATION_POINT
:
4010 return gfc_trans_omp_cancellation_point (code
);
4011 case EXEC_OMP_CRITICAL
:
4012 return gfc_trans_omp_critical (code
);
4013 case EXEC_OMP_DISTRIBUTE
:
4016 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4018 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4019 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4020 case EXEC_OMP_DISTRIBUTE_SIMD
:
4021 return gfc_trans_omp_distribute (code
, NULL
);
4022 case EXEC_OMP_DO_SIMD
:
4023 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
4024 case EXEC_OMP_FLUSH
:
4025 return gfc_trans_omp_flush ();
4026 case EXEC_OMP_MASTER
:
4027 return gfc_trans_omp_master (code
);
4028 case EXEC_OMP_ORDERED
:
4029 return gfc_trans_omp_ordered (code
);
4030 case EXEC_OMP_PARALLEL
:
4031 return gfc_trans_omp_parallel (code
);
4032 case EXEC_OMP_PARALLEL_DO
:
4033 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
4034 case EXEC_OMP_PARALLEL_DO_SIMD
:
4035 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
4036 case EXEC_OMP_PARALLEL_SECTIONS
:
4037 return gfc_trans_omp_parallel_sections (code
);
4038 case EXEC_OMP_PARALLEL_WORKSHARE
:
4039 return gfc_trans_omp_parallel_workshare (code
);
4040 case EXEC_OMP_SECTIONS
:
4041 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
4042 case EXEC_OMP_SINGLE
:
4043 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
4044 case EXEC_OMP_TARGET
:
4045 case EXEC_OMP_TARGET_TEAMS
:
4046 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4047 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4048 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4049 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4050 return gfc_trans_omp_target (code
);
4051 case EXEC_OMP_TARGET_DATA
:
4052 return gfc_trans_omp_target_data (code
);
4053 case EXEC_OMP_TARGET_UPDATE
:
4054 return gfc_trans_omp_target_update (code
);
4056 return gfc_trans_omp_task (code
);
4057 case EXEC_OMP_TASKGROUP
:
4058 return gfc_trans_omp_taskgroup (code
);
4059 case EXEC_OMP_TASKWAIT
:
4060 return gfc_trans_omp_taskwait ();
4061 case EXEC_OMP_TASKYIELD
:
4062 return gfc_trans_omp_taskyield ();
4063 case EXEC_OMP_TEAMS
:
4064 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4065 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4066 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4067 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4068 return gfc_trans_omp_teams (code
, NULL
);
4069 case EXEC_OMP_WORKSHARE
:
4070 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
4077 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
4082 gfc_omp_declare_simd
*ods
;
4083 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
4085 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
4086 tree fndecl
= ns
->proc_name
->backend_decl
;
4088 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
4089 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
4090 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
4091 DECL_ATTRIBUTES (fndecl
) = c
;