1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "tree-gimple.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
43 gfc_omp_privatize_by_reference (const_tree decl
)
45 tree type
= TREE_TYPE (decl
);
47 if (TREE_CODE (type
) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
51 if (TREE_CODE (type
) == POINTER_TYPE
)
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type
))
59 if (!DECL_ARTIFICIAL (decl
))
62 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 if (DECL_LANG_SPECIFIC (decl
)
65 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
72 /* True if OpenMP sharing attribute of DECL is predetermined. */
74 enum omp_clause_default_kind
75 gfc_omp_predetermined_sharing (tree decl
)
77 if (DECL_ARTIFICIAL (decl
) && ! GFC_DECL_RESULT (decl
))
78 return OMP_CLAUSE_DEFAULT_SHARED
;
80 /* Cray pointees shouldn't be listed in any clauses and should be
81 gimplified to dereference of the corresponding Cray pointer.
82 Make them all private, so that they are emitted in the debug
84 if (GFC_DECL_CRAY_POINTEE (decl
))
85 return OMP_CLAUSE_DEFAULT_PRIVATE
;
87 /* Assumed-size arrays are predetermined to inherit sharing
88 attributes of the associated actual argument, which is shared
90 if (TREE_CODE (decl
) == PARM_DECL
91 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
92 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
93 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
94 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
96 return OMP_CLAUSE_DEFAULT_SHARED
;
98 /* COMMON and EQUIVALENCE decls are shared. They
99 are only referenced through DECL_VALUE_EXPR of the variables
100 contained in them. If those are privatized, they will not be
101 gimplified to the COMMON or EQUIVALENCE decls. */
102 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
103 return OMP_CLAUSE_DEFAULT_SHARED
;
105 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
106 return OMP_CLAUSE_DEFAULT_SHARED
;
108 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
112 /* Return true if DECL in private clause needs
113 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
115 gfc_omp_private_outer_ref (tree decl
)
117 tree type
= TREE_TYPE (decl
);
119 if (GFC_DESCRIPTOR_TYPE_P (type
)
120 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
126 /* Return code to initialize DECL with its default constructor, or
127 NULL if there's nothing to do. */
130 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
132 tree type
= TREE_TYPE (decl
), rank
, size
, esize
, ptr
, cond
, then_b
, else_b
;
133 stmtblock_t block
, cond_block
;
135 if (! GFC_DESCRIPTOR_TYPE_P (type
)
136 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
139 gcc_assert (outer
!= NULL
);
140 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
141 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
);
143 /* Allocatable arrays in PRIVATE clauses need to be set to
144 "not currently allocated" allocation status if outer
145 array is "not currently allocated", otherwise should be allocated. */
146 gfc_start_block (&block
);
148 gfc_init_block (&cond_block
);
150 gfc_add_modify_expr (&cond_block
, decl
, outer
);
151 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
152 size
= gfc_conv_descriptor_ubound (decl
, rank
);
153 size
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
, size
,
154 gfc_conv_descriptor_lbound (decl
, rank
));
155 size
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
, size
,
157 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
158 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
,
159 gfc_conv_descriptor_stride (decl
, rank
));
160 esize
= fold_convert (gfc_array_index_type
,
161 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
162 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, esize
);
163 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &cond_block
);
164 ptr
= gfc_allocate_array_with_status (&cond_block
,
165 build_int_cst (pvoid_type_node
, 0),
167 gfc_conv_descriptor_data_set_tuples (&cond_block
, decl
, ptr
);
168 then_b
= gfc_finish_block (&cond_block
);
170 gfc_init_block (&cond_block
);
171 gfc_conv_descriptor_data_set_tuples (&cond_block
, decl
, null_pointer_node
);
172 else_b
= gfc_finish_block (&cond_block
);
174 cond
= fold_build2 (NE_EXPR
, boolean_type_node
,
175 fold_convert (pvoid_type_node
,
176 gfc_conv_descriptor_data_get (outer
)),
178 gfc_add_expr_to_block (&block
, build3 (COND_EXPR
, void_type_node
,
179 cond
, then_b
, else_b
));
181 return gfc_finish_block (&block
);
184 /* Build and return code for a copy constructor from SRC to DEST. */
187 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
189 tree type
= TREE_TYPE (dest
), ptr
, size
, esize
, rank
, call
;
192 if (! GFC_DESCRIPTOR_TYPE_P (type
)
193 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
194 return build_gimple_modify_stmt (dest
, src
);
196 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
);
198 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
199 and copied from SRC. */
200 gfc_start_block (&block
);
202 gfc_add_modify_expr (&block
, dest
, src
);
203 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
204 size
= gfc_conv_descriptor_ubound (dest
, rank
);
205 size
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
, size
,
206 gfc_conv_descriptor_lbound (dest
, rank
));
207 size
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
, size
,
209 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
210 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
,
211 gfc_conv_descriptor_stride (dest
, rank
));
212 esize
= fold_convert (gfc_array_index_type
,
213 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
214 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, esize
);
215 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
216 ptr
= gfc_allocate_array_with_status (&block
,
217 build_int_cst (pvoid_type_node
, 0),
219 gfc_conv_descriptor_data_set_tuples (&block
, dest
, ptr
);
220 call
= build_call_expr (built_in_decls
[BUILT_IN_MEMCPY
], 3, ptr
,
221 fold_convert (pvoid_type_node
,
222 gfc_conv_descriptor_data_get (src
)),
224 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
226 return gfc_finish_block (&block
);
229 /* Similarly, except use an assignment operator instead. */
232 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED
, tree dest
, tree src
)
234 tree type
= TREE_TYPE (dest
), rank
, size
, esize
, call
;
237 if (! GFC_DESCRIPTOR_TYPE_P (type
)
238 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
239 return build_gimple_modify_stmt (dest
, src
);
241 /* Handle copying allocatable arrays. */
242 gfc_start_block (&block
);
244 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
245 size
= gfc_conv_descriptor_ubound (dest
, rank
);
246 size
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
, size
,
247 gfc_conv_descriptor_lbound (dest
, rank
));
248 size
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
, size
,
250 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
251 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
,
252 gfc_conv_descriptor_stride (dest
, rank
));
253 esize
= fold_convert (gfc_array_index_type
,
254 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
255 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, esize
);
256 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
257 call
= build_call_expr (built_in_decls
[BUILT_IN_MEMCPY
], 3,
258 fold_convert (pvoid_type_node
,
259 gfc_conv_descriptor_data_get (dest
)),
260 fold_convert (pvoid_type_node
,
261 gfc_conv_descriptor_data_get (src
)),
263 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
265 return gfc_finish_block (&block
);
268 /* Build and return code destructing DECL. Return NULL if nothing
272 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED
, tree decl
)
274 tree type
= TREE_TYPE (decl
);
276 if (! GFC_DESCRIPTOR_TYPE_P (type
)
277 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
280 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
281 to be deallocated if they were allocated. */
282 return gfc_trans_dealloc_allocated (decl
);
286 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
287 disregarded in OpenMP construct, because it is going to be
288 remapped during OpenMP lowering. SHARED is true if DECL
289 is going to be shared, false if it is going to be privatized. */
292 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
294 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
295 && DECL_HAS_VALUE_EXPR_P (decl
))
297 tree value
= DECL_VALUE_EXPR (decl
);
299 if (TREE_CODE (value
) == COMPONENT_REF
300 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
301 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
303 /* If variable in COMMON or EQUIVALENCE is privatized, return
304 true, as just that variable is supposed to be privatized,
305 not the whole COMMON or whole EQUIVALENCE.
306 For shared variables in COMMON or EQUIVALENCE, let them be
307 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
308 from the same COMMON or EQUIVALENCE just one sharing of the
309 whole COMMON or EQUIVALENCE is enough. */
314 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
320 /* Return true if DECL that is shared iff SHARED is true should
321 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
325 gfc_omp_private_debug_clause (tree decl
, bool shared
)
327 if (GFC_DECL_CRAY_POINTEE (decl
))
330 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
331 && DECL_HAS_VALUE_EXPR_P (decl
))
333 tree value
= DECL_VALUE_EXPR (decl
);
335 if (TREE_CODE (value
) == COMPONENT_REF
336 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
337 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
344 /* Register language specific type size variables as potentially OpenMP
345 firstprivate variables. */
348 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
350 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
354 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
355 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
357 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
358 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
359 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
361 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
362 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
368 gfc_trans_add_clause (tree node
, tree tail
)
370 OMP_CLAUSE_CHAIN (node
) = tail
;
375 gfc_trans_omp_variable (gfc_symbol
*sym
)
377 tree t
= gfc_get_symbol_decl (sym
);
381 bool alternate_entry
;
384 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
385 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
386 && sym
->result
== sym
;
387 entry_master
= sym
->attr
.result
388 && sym
->ns
->proc_name
->attr
.entry_master
389 && !gfc_return_by_reference (sym
->ns
->proc_name
);
390 parent_decl
= DECL_CONTEXT (current_function_decl
);
392 if ((t
== parent_decl
&& return_value
)
393 || (sym
->ns
&& sym
->ns
->proc_name
394 && sym
->ns
->proc_name
->backend_decl
== parent_decl
395 && (alternate_entry
|| entry_master
)))
400 /* Special case for assigning the return value of a function.
401 Self recursive functions must have an explicit return value. */
402 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
403 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
405 /* Similarly for alternate entry points. */
406 else if (alternate_entry
407 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
410 gfc_entry_list
*el
= NULL
;
412 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
415 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
420 else if (entry_master
421 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
423 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
429 gfc_trans_omp_variable_list (enum omp_clause_code code
, gfc_namelist
*namelist
,
432 for (; namelist
!= NULL
; namelist
= namelist
->next
)
433 if (namelist
->sym
->attr
.referenced
)
435 tree t
= gfc_trans_omp_variable (namelist
->sym
);
436 if (t
!= error_mark_node
)
438 tree node
= build_omp_clause (code
);
439 OMP_CLAUSE_DECL (node
) = t
;
440 list
= gfc_trans_add_clause (node
, list
);
447 gfc_trans_omp_array_reduction (tree c
, gfc_symbol
*sym
, locus where
)
449 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
450 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
451 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
452 gfc_expr
*e1
, *e2
, *e3
, *e4
;
454 tree decl
, backend_decl
, stmt
;
455 locus old_loc
= gfc_current_locus
;
459 decl
= OMP_CLAUSE_DECL (c
);
460 gfc_current_locus
= where
;
462 /* Create a fake symbol for init value. */
463 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
464 init_val_sym
.ns
= sym
->ns
;
465 init_val_sym
.name
= sym
->name
;
466 init_val_sym
.ts
= sym
->ts
;
467 init_val_sym
.attr
.referenced
= 1;
468 init_val_sym
.declared_at
= where
;
469 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
470 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
471 init_val_sym
.backend_decl
= backend_decl
;
473 /* Create a fake symbol for the outer array reference. */
475 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
476 outer_sym
.attr
.dummy
= 0;
477 outer_sym
.attr
.result
= 0;
478 outer_sym
.attr
.flavor
= FL_VARIABLE
;
479 outer_sym
.backend_decl
= create_tmp_var_raw (TREE_TYPE (decl
), NULL
);
481 /* Create fake symtrees for it. */
482 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
483 symtree1
->n
.sym
= sym
;
484 gcc_assert (symtree1
== root1
);
486 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
487 symtree2
->n
.sym
= &init_val_sym
;
488 gcc_assert (symtree2
== root2
);
490 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
491 symtree3
->n
.sym
= &outer_sym
;
492 gcc_assert (symtree3
== root3
);
494 /* Create expressions. */
495 e1
= gfc_get_expr ();
496 e1
->expr_type
= EXPR_VARIABLE
;
498 e1
->symtree
= symtree1
;
500 e1
->ref
= ref
= gfc_get_ref ();
501 ref
->u
.ar
.where
= where
;
502 ref
->u
.ar
.as
= sym
->as
;
503 ref
->u
.ar
.type
= AR_FULL
;
505 t
= gfc_resolve_expr (e1
);
506 gcc_assert (t
== SUCCESS
);
508 e2
= gfc_get_expr ();
509 e2
->expr_type
= EXPR_VARIABLE
;
511 e2
->symtree
= symtree2
;
513 t
= gfc_resolve_expr (e2
);
514 gcc_assert (t
== SUCCESS
);
516 e3
= gfc_copy_expr (e1
);
517 e3
->symtree
= symtree3
;
518 t
= gfc_resolve_expr (e3
);
519 gcc_assert (t
== SUCCESS
);
522 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
526 e4
= gfc_add (e3
, e1
);
529 e4
= gfc_multiply (e3
, e1
);
531 case TRUTH_ANDIF_EXPR
:
532 e4
= gfc_and (e3
, e1
);
534 case TRUTH_ORIF_EXPR
:
535 e4
= gfc_or (e3
, e1
);
538 e4
= gfc_eqv (e3
, e1
);
541 e4
= gfc_neqv (e3
, e1
);
563 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
564 intrinsic_sym
.ns
= sym
->ns
;
565 intrinsic_sym
.name
= iname
;
566 intrinsic_sym
.ts
= sym
->ts
;
567 intrinsic_sym
.attr
.referenced
= 1;
568 intrinsic_sym
.attr
.intrinsic
= 1;
569 intrinsic_sym
.attr
.function
= 1;
570 intrinsic_sym
.result
= &intrinsic_sym
;
571 intrinsic_sym
.declared_at
= where
;
573 symtree4
= gfc_new_symtree (&root4
, iname
);
574 symtree4
->n
.sym
= &intrinsic_sym
;
575 gcc_assert (symtree4
== root4
);
577 e4
= gfc_get_expr ();
578 e4
->expr_type
= EXPR_FUNCTION
;
580 e4
->symtree
= symtree4
;
581 e4
->value
.function
.isym
= gfc_find_function (iname
);
582 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
583 e4
->value
.function
.actual
->expr
= e3
;
584 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
585 e4
->value
.function
.actual
->next
->expr
= e1
;
587 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
588 e1
= gfc_copy_expr (e1
);
589 e3
= gfc_copy_expr (e3
);
590 t
= gfc_resolve_expr (e4
);
591 gcc_assert (t
== SUCCESS
);
593 /* Create the init statement list. */
595 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
596 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_ALLOCATABLE
)
598 /* If decl is an allocatable array, it needs to be allocated
599 with the same bounds as the outer var. */
600 tree type
= TREE_TYPE (decl
), rank
, size
, esize
, ptr
;
603 gfc_start_block (&block
);
605 gfc_add_modify_expr (&block
, decl
, outer_sym
.backend_decl
);
606 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
607 size
= gfc_conv_descriptor_ubound (decl
, rank
);
608 size
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
, size
,
609 gfc_conv_descriptor_lbound (decl
, rank
));
610 size
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
, size
,
612 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
613 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
,
614 gfc_conv_descriptor_stride (decl
, rank
));
615 esize
= fold_convert (gfc_array_index_type
,
616 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
617 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, esize
);
618 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
619 ptr
= gfc_allocate_array_with_status (&block
,
620 build_int_cst (pvoid_type_node
, 0),
622 gfc_conv_descriptor_data_set_tuples (&block
, decl
, ptr
);
623 gfc_add_expr_to_block (&block
, gfc_trans_assignment (e1
, e2
, false));
624 stmt
= gfc_finish_block (&block
);
627 stmt
= gfc_trans_assignment (e1
, e2
, false);
628 if (TREE_CODE (stmt
) != BIND_EXPR
)
629 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
632 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
634 /* Create the merge statement list. */
636 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
637 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_ALLOCATABLE
)
639 /* If decl is an allocatable array, it needs to be deallocated
643 gfc_start_block (&block
);
644 gfc_add_expr_to_block (&block
, gfc_trans_assignment (e3
, e4
, false));
645 gfc_add_expr_to_block (&block
, gfc_trans_dealloc_allocated (decl
));
646 stmt
= gfc_finish_block (&block
);
649 stmt
= gfc_trans_assignment (e3
, e4
, false);
650 if (TREE_CODE (stmt
) != BIND_EXPR
)
651 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
654 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
656 /* And stick the placeholder VAR_DECL into the clause as well. */
657 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_sym
.backend_decl
;
659 gfc_current_locus
= old_loc
;
670 gfc_free_array_spec (outer_sym
.as
);
674 gfc_trans_omp_reduction_list (gfc_namelist
*namelist
, tree list
,
675 enum tree_code reduction_code
, locus where
)
677 for (; namelist
!= NULL
; namelist
= namelist
->next
)
678 if (namelist
->sym
->attr
.referenced
)
680 tree t
= gfc_trans_omp_variable (namelist
->sym
);
681 if (t
!= error_mark_node
)
683 tree node
= build_omp_clause (OMP_CLAUSE_REDUCTION
);
684 OMP_CLAUSE_DECL (node
) = t
;
685 OMP_CLAUSE_REDUCTION_CODE (node
) = reduction_code
;
686 if (namelist
->sym
->attr
.dimension
)
687 gfc_trans_omp_array_reduction (node
, namelist
->sym
, where
);
688 list
= gfc_trans_add_clause (node
, list
);
695 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
698 tree omp_clauses
= NULL_TREE
, chunk_size
, c
, old_clauses
;
700 enum omp_clause_code clause_code
;
706 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
708 gfc_namelist
*n
= clauses
->lists
[list
];
712 if (list
>= OMP_LIST_REDUCTION_FIRST
713 && list
<= OMP_LIST_REDUCTION_LAST
)
715 enum tree_code reduction_code
;
719 reduction_code
= PLUS_EXPR
;
722 reduction_code
= MULT_EXPR
;
725 reduction_code
= MINUS_EXPR
;
728 reduction_code
= TRUTH_ANDIF_EXPR
;
731 reduction_code
= TRUTH_ORIF_EXPR
;
734 reduction_code
= EQ_EXPR
;
737 reduction_code
= NE_EXPR
;
740 reduction_code
= MAX_EXPR
;
743 reduction_code
= MIN_EXPR
;
746 reduction_code
= BIT_AND_EXPR
;
749 reduction_code
= BIT_IOR_EXPR
;
752 reduction_code
= BIT_XOR_EXPR
;
757 old_clauses
= omp_clauses
;
759 = gfc_trans_omp_reduction_list (n
, omp_clauses
, reduction_code
,
765 case OMP_LIST_PRIVATE
:
766 clause_code
= OMP_CLAUSE_PRIVATE
;
768 case OMP_LIST_SHARED
:
769 clause_code
= OMP_CLAUSE_SHARED
;
771 case OMP_LIST_FIRSTPRIVATE
:
772 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
774 case OMP_LIST_LASTPRIVATE
:
775 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
777 case OMP_LIST_COPYIN
:
778 clause_code
= OMP_CLAUSE_COPYIN
;
780 case OMP_LIST_COPYPRIVATE
:
781 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
785 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
);
792 if (clauses
->if_expr
)
796 gfc_init_se (&se
, NULL
);
797 gfc_conv_expr (&se
, clauses
->if_expr
);
798 gfc_add_block_to_block (block
, &se
.pre
);
799 if_var
= gfc_evaluate_now (se
.expr
, block
);
800 gfc_add_block_to_block (block
, &se
.post
);
802 c
= build_omp_clause (OMP_CLAUSE_IF
);
803 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
804 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
807 if (clauses
->num_threads
)
811 gfc_init_se (&se
, NULL
);
812 gfc_conv_expr (&se
, clauses
->num_threads
);
813 gfc_add_block_to_block (block
, &se
.pre
);
814 num_threads
= gfc_evaluate_now (se
.expr
, block
);
815 gfc_add_block_to_block (block
, &se
.post
);
817 c
= build_omp_clause (OMP_CLAUSE_NUM_THREADS
);
818 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
819 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
822 chunk_size
= NULL_TREE
;
823 if (clauses
->chunk_size
)
825 gfc_init_se (&se
, NULL
);
826 gfc_conv_expr (&se
, clauses
->chunk_size
);
827 gfc_add_block_to_block (block
, &se
.pre
);
828 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
829 gfc_add_block_to_block (block
, &se
.post
);
832 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
834 c
= build_omp_clause (OMP_CLAUSE_SCHEDULE
);
835 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
836 switch (clauses
->sched_kind
)
838 case OMP_SCHED_STATIC
:
839 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
841 case OMP_SCHED_DYNAMIC
:
842 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
844 case OMP_SCHED_GUIDED
:
845 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
847 case OMP_SCHED_RUNTIME
:
848 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
851 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
856 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
859 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
861 c
= build_omp_clause (OMP_CLAUSE_DEFAULT
);
862 switch (clauses
->default_sharing
)
864 case OMP_DEFAULT_NONE
:
865 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
867 case OMP_DEFAULT_SHARED
:
868 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
870 case OMP_DEFAULT_PRIVATE
:
871 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
873 case OMP_DEFAULT_FIRSTPRIVATE
:
874 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
879 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
884 c
= build_omp_clause (OMP_CLAUSE_NOWAIT
);
885 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
888 if (clauses
->ordered
)
890 c
= build_omp_clause (OMP_CLAUSE_ORDERED
);
891 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
896 c
= build_omp_clause (OMP_CLAUSE_UNTIED
);
897 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
900 if (clauses
->collapse
)
902 c
= build_omp_clause (OMP_CLAUSE_COLLAPSE
);
903 OMP_CLAUSE_COLLAPSE_EXPR (c
) = build_int_cst (NULL
, clauses
->collapse
);
904 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
910 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
913 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
918 stmt
= gfc_trans_code (code
);
919 if (TREE_CODE (stmt
) != BIND_EXPR
)
921 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
923 tree block
= poplevel (1, 0, 0);
924 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
935 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
936 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
939 gfc_trans_omp_atomic (gfc_code
*code
)
946 tree lhsaddr
, type
, rhs
, x
;
947 enum tree_code op
= ERROR_MARK
;
948 bool var_on_left
= false;
950 code
= code
->block
->next
;
951 gcc_assert (code
->op
== EXEC_ASSIGN
);
952 gcc_assert (code
->next
== NULL
);
953 var
= code
->expr
->symtree
->n
.sym
;
955 gfc_init_se (&lse
, NULL
);
956 gfc_init_se (&rse
, NULL
);
957 gfc_start_block (&block
);
959 gfc_conv_expr (&lse
, code
->expr
);
960 gfc_add_block_to_block (&block
, &lse
.pre
);
961 type
= TREE_TYPE (lse
.expr
);
962 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
965 if (expr2
->expr_type
== EXPR_FUNCTION
966 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
967 expr2
= expr2
->value
.function
.actual
->expr
;
969 if (expr2
->expr_type
== EXPR_OP
)
972 switch (expr2
->value
.op
.operator)
977 case INTRINSIC_TIMES
:
980 case INTRINSIC_MINUS
:
983 case INTRINSIC_DIVIDE
:
984 if (expr2
->ts
.type
== BT_INTEGER
)
990 op
= TRUTH_ANDIF_EXPR
;
993 op
= TRUTH_ORIF_EXPR
;
1004 e
= expr2
->value
.op
.op1
;
1005 if (e
->expr_type
== EXPR_FUNCTION
1006 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1007 e
= e
->value
.function
.actual
->expr
;
1008 if (e
->expr_type
== EXPR_VARIABLE
1009 && e
->symtree
!= NULL
1010 && e
->symtree
->n
.sym
== var
)
1012 expr2
= expr2
->value
.op
.op2
;
1017 e
= expr2
->value
.op
.op2
;
1018 if (e
->expr_type
== EXPR_FUNCTION
1019 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1020 e
= e
->value
.function
.actual
->expr
;
1021 gcc_assert (e
->expr_type
== EXPR_VARIABLE
1022 && e
->symtree
!= NULL
1023 && e
->symtree
->n
.sym
== var
);
1024 expr2
= expr2
->value
.op
.op1
;
1025 var_on_left
= false;
1027 gfc_conv_expr (&rse
, expr2
);
1028 gfc_add_block_to_block (&block
, &rse
.pre
);
1032 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
1033 switch (expr2
->value
.function
.isym
->id
)
1053 e
= expr2
->value
.function
.actual
->expr
;
1054 gcc_assert (e
->expr_type
== EXPR_VARIABLE
1055 && e
->symtree
!= NULL
1056 && e
->symtree
->n
.sym
== var
);
1058 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
1059 gfc_add_block_to_block (&block
, &rse
.pre
);
1060 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
1062 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
1063 gfc_actual_arglist
*arg
;
1065 gfc_add_modify_stmt (&block
, accum
, rse
.expr
);
1066 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
1069 gfc_init_block (&rse
.pre
);
1070 gfc_conv_expr (&rse
, arg
->expr
);
1071 gfc_add_block_to_block (&block
, &rse
.pre
);
1072 x
= fold_build2 (op
, TREE_TYPE (accum
), accum
, rse
.expr
);
1073 gfc_add_modify_stmt (&block
, accum
, x
);
1079 expr2
= expr2
->value
.function
.actual
->next
->expr
;
1082 lhsaddr
= save_expr (lhsaddr
);
1083 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
1084 x
= convert (TREE_TYPE (rhs
), build_fold_indirect_ref (lhsaddr
));
1087 x
= fold_build2 (op
, TREE_TYPE (rhs
), x
, rhs
);
1089 x
= fold_build2 (op
, TREE_TYPE (rhs
), rhs
, x
);
1091 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
1092 && TREE_CODE (type
) != COMPLEX_TYPE
)
1093 x
= fold_build1 (REALPART_EXPR
, TREE_TYPE (TREE_TYPE (rhs
)), x
);
1095 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
1096 gfc_add_expr_to_block (&block
, x
);
1098 gfc_add_block_to_block (&block
, &lse
.pre
);
1099 gfc_add_block_to_block (&block
, &rse
.pre
);
1101 return gfc_finish_block (&block
);
1105 gfc_trans_omp_barrier (void)
1107 tree decl
= built_in_decls
[BUILT_IN_GOMP_BARRIER
];
1108 return build_call_expr (decl
, 0);
1112 gfc_trans_omp_critical (gfc_code
*code
)
1114 tree name
= NULL_TREE
, stmt
;
1115 if (code
->ext
.omp_name
!= NULL
)
1116 name
= get_identifier (code
->ext
.omp_name
);
1117 stmt
= gfc_trans_code (code
->block
->next
);
1118 return build2 (OMP_CRITICAL
, void_type_node
, stmt
, name
);
1122 gfc_trans_omp_do (gfc_code
*code
, stmtblock_t
*pblock
,
1123 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
1126 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
1127 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
1130 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
1131 gfc_code
*outermost
;
1132 int i
, collapse
= clauses
->collapse
;
1133 tree dovar_init
= NULL_TREE
;
1138 outermost
= code
= code
->block
->next
;
1139 gcc_assert (code
->op
== EXEC_DO
);
1141 init
= make_tree_vec (collapse
);
1142 cond
= make_tree_vec (collapse
);
1143 incr
= make_tree_vec (collapse
);
1147 gfc_start_block (&block
);
1151 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
1153 for (i
= 0; i
< collapse
; i
++)
1156 int dovar_found
= 0;
1161 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
]; n
!= NULL
;
1163 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
1168 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
1169 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
1175 /* Evaluate all the expressions in the iterator. */
1176 gfc_init_se (&se
, NULL
);
1177 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1178 gfc_add_block_to_block (pblock
, &se
.pre
);
1180 type
= TREE_TYPE (dovar
);
1181 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
1183 gfc_init_se (&se
, NULL
);
1184 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1185 gfc_add_block_to_block (pblock
, &se
.pre
);
1186 from
= gfc_evaluate_now (se
.expr
, pblock
);
1188 gfc_init_se (&se
, NULL
);
1189 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1190 gfc_add_block_to_block (pblock
, &se
.pre
);
1191 to
= gfc_evaluate_now (se
.expr
, pblock
);
1193 gfc_init_se (&se
, NULL
);
1194 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1195 gfc_add_block_to_block (pblock
, &se
.pre
);
1196 step
= gfc_evaluate_now (se
.expr
, pblock
);
1198 /* Special case simple loops. */
1199 if (integer_onep (step
))
1201 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
1207 TREE_VEC_ELT (init
, i
) = build2_v (GIMPLE_MODIFY_STMT
, dovar
, from
);
1208 TREE_VEC_ELT (cond
, i
) = fold_build2 (simple
> 0 ? LE_EXPR
: GE_EXPR
,
1209 boolean_type_node
, dovar
, to
);
1210 TREE_VEC_ELT (incr
, i
) = fold_build2 (PLUS_EXPR
, type
, dovar
, step
);
1211 TREE_VEC_ELT (incr
, i
) = fold_build2 (GIMPLE_MODIFY_STMT
, type
, dovar
,
1212 TREE_VEC_ELT (incr
, i
));
1216 /* STEP is not 1 or -1. Use:
1217 for (count = 0; count < (to + step - from) / step; count++)
1219 dovar = from + count * step;
1223 tmp
= fold_build2 (MINUS_EXPR
, type
, step
, from
);
1224 tmp
= fold_build2 (PLUS_EXPR
, type
, to
, tmp
);
1225 tmp
= fold_build2 (TRUNC_DIV_EXPR
, type
, tmp
, step
);
1226 tmp
= gfc_evaluate_now (tmp
, pblock
);
1227 count
= gfc_create_var (type
, "count");
1228 TREE_VEC_ELT (init
, i
) = build2_v (GIMPLE_MODIFY_STMT
, count
,
1229 build_int_cst (type
, 0));
1230 TREE_VEC_ELT (cond
, i
) = fold_build2 (LT_EXPR
, boolean_type_node
,
1232 TREE_VEC_ELT (incr
, i
) = fold_build2 (PLUS_EXPR
, type
, count
,
1233 build_int_cst (type
, 1));
1234 TREE_VEC_ELT (incr
, i
) = fold_build2 (GIMPLE_MODIFY_STMT
, type
,
1235 count
, TREE_VEC_ELT (incr
, i
));
1237 /* Initialize DOVAR. */
1238 tmp
= fold_build2 (MULT_EXPR
, type
, count
, step
);
1239 tmp
= fold_build2 (PLUS_EXPR
, type
, from
, tmp
);
1240 dovar_init
= tree_cons (dovar
, tmp
, dovar_init
);
1245 tmp
= build_omp_clause (OMP_CLAUSE_PRIVATE
);
1246 OMP_CLAUSE_DECL (tmp
) = dovar
;
1247 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
1249 else if (dovar_found
== 2)
1256 /* If dovar is lastprivate, but different counter is used,
1257 dovar += step needs to be added to
1258 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1259 will have the value on entry of the last loop, rather
1260 than value after iterator increment. */
1261 tmp
= gfc_evaluate_now (step
, pblock
);
1262 tmp
= fold_build2 (PLUS_EXPR
, type
, dovar
, tmp
);
1263 tmp
= fold_build2 (GIMPLE_MODIFY_STMT
, type
, dovar
, tmp
);
1264 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1265 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1266 && OMP_CLAUSE_DECL (c
) == dovar
)
1268 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
1272 if (c
== NULL
&& par_clauses
!= NULL
)
1274 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1275 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1276 && OMP_CLAUSE_DECL (c
) == dovar
)
1278 tree l
= build_omp_clause (OMP_CLAUSE_LASTPRIVATE
);
1279 OMP_CLAUSE_DECL (l
) = dovar
;
1280 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
1281 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
1283 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
1287 gcc_assert (simple
|| c
!= NULL
);
1291 tmp
= build_omp_clause (OMP_CLAUSE_PRIVATE
);
1292 OMP_CLAUSE_DECL (tmp
) = count
;
1293 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
1296 if (i
+ 1 < collapse
)
1297 code
= code
->block
->next
;
1300 if (pblock
!= &block
)
1303 gfc_start_block (&block
);
1306 gfc_start_block (&body
);
1308 dovar_init
= nreverse (dovar_init
);
1311 gfc_add_modify_stmt (&body
, TREE_PURPOSE (dovar_init
),
1312 TREE_VALUE (dovar_init
));
1313 dovar_init
= TREE_CHAIN (dovar_init
);
1316 /* Cycle statement is implemented with a goto. Exit statement must not be
1317 present for this loop. */
1318 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1320 /* Put these labels where they can be found later. We put the
1321 labels in a TREE_LIST node (because TREE_CHAIN is already
1322 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1323 label in TREE_VALUE (backend_decl). */
1325 code
->block
->backend_decl
= tree_cons (cycle_label
, NULL
, NULL
);
1327 /* Main loop body. */
1328 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
1329 gfc_add_expr_to_block (&body
, tmp
);
1331 /* Label for cycle statements (if needed). */
1332 if (TREE_USED (cycle_label
))
1334 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1335 gfc_add_expr_to_block (&body
, tmp
);
1338 /* End of loop body. */
1339 stmt
= make_node (OMP_FOR
);
1341 TREE_TYPE (stmt
) = void_type_node
;
1342 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
1343 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
1344 OMP_FOR_INIT (stmt
) = init
;
1345 OMP_FOR_COND (stmt
) = cond
;
1346 OMP_FOR_INCR (stmt
) = incr
;
1347 gfc_add_expr_to_block (&block
, stmt
);
1349 return gfc_finish_block (&block
);
1353 gfc_trans_omp_flush (void)
1355 tree decl
= built_in_decls
[BUILT_IN_SYNCHRONIZE
];
1356 return build_call_expr (decl
, 0);
1360 gfc_trans_omp_master (gfc_code
*code
)
1362 tree stmt
= gfc_trans_code (code
->block
->next
);
1363 if (IS_EMPTY_STMT (stmt
))
1365 return build1_v (OMP_MASTER
, stmt
);
1369 gfc_trans_omp_ordered (gfc_code
*code
)
1371 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
1375 gfc_trans_omp_parallel (gfc_code
*code
)
1378 tree stmt
, omp_clauses
;
1380 gfc_start_block (&block
);
1381 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1383 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1384 stmt
= build4_v (OMP_PARALLEL
, stmt
, omp_clauses
, NULL
, NULL
);
1385 gfc_add_expr_to_block (&block
, stmt
);
1386 return gfc_finish_block (&block
);
1390 gfc_trans_omp_parallel_do (gfc_code
*code
)
1392 stmtblock_t block
, *pblock
= NULL
;
1393 gfc_omp_clauses parallel_clauses
, do_clauses
;
1394 tree stmt
, omp_clauses
= NULL_TREE
;
1396 gfc_start_block (&block
);
1398 memset (&do_clauses
, 0, sizeof (do_clauses
));
1399 if (code
->ext
.omp_clauses
!= NULL
)
1401 memcpy (¶llel_clauses
, code
->ext
.omp_clauses
,
1402 sizeof (parallel_clauses
));
1403 do_clauses
.sched_kind
= parallel_clauses
.sched_kind
;
1404 do_clauses
.chunk_size
= parallel_clauses
.chunk_size
;
1405 do_clauses
.ordered
= parallel_clauses
.ordered
;
1406 do_clauses
.collapse
= parallel_clauses
.collapse
;
1407 parallel_clauses
.sched_kind
= OMP_SCHED_NONE
;
1408 parallel_clauses
.chunk_size
= NULL
;
1409 parallel_clauses
.ordered
= false;
1410 parallel_clauses
.collapse
= 0;
1411 omp_clauses
= gfc_trans_omp_clauses (&block
, ¶llel_clauses
,
1414 do_clauses
.nowait
= true;
1415 if (!do_clauses
.ordered
&& do_clauses
.sched_kind
!= OMP_SCHED_STATIC
)
1419 stmt
= gfc_trans_omp_do (code
, pblock
, &do_clauses
, omp_clauses
);
1420 if (TREE_CODE (stmt
) != BIND_EXPR
)
1421 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
1424 stmt
= build4_v (OMP_PARALLEL
, stmt
, omp_clauses
, NULL
, NULL
);
1425 OMP_PARALLEL_COMBINED (stmt
) = 1;
1426 gfc_add_expr_to_block (&block
, stmt
);
1427 return gfc_finish_block (&block
);
1431 gfc_trans_omp_parallel_sections (gfc_code
*code
)
1434 gfc_omp_clauses section_clauses
;
1435 tree stmt
, omp_clauses
;
1437 memset (§ion_clauses
, 0, sizeof (section_clauses
));
1438 section_clauses
.nowait
= true;
1440 gfc_start_block (&block
);
1441 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1444 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
1445 if (TREE_CODE (stmt
) != BIND_EXPR
)
1446 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
1449 stmt
= build4_v (OMP_PARALLEL
, stmt
, omp_clauses
, NULL
, NULL
);
1450 OMP_PARALLEL_COMBINED (stmt
) = 1;
1451 gfc_add_expr_to_block (&block
, stmt
);
1452 return gfc_finish_block (&block
);
1456 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
1459 gfc_omp_clauses workshare_clauses
;
1460 tree stmt
, omp_clauses
;
1462 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
1463 workshare_clauses
.nowait
= true;
1465 gfc_start_block (&block
);
1466 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1469 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
1470 if (TREE_CODE (stmt
) != BIND_EXPR
)
1471 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
1474 stmt
= build4_v (OMP_PARALLEL
, stmt
, omp_clauses
, NULL
, NULL
);
1475 OMP_PARALLEL_COMBINED (stmt
) = 1;
1476 gfc_add_expr_to_block (&block
, stmt
);
1477 return gfc_finish_block (&block
);
1481 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1483 stmtblock_t block
, body
;
1484 tree omp_clauses
, stmt
;
1485 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
1487 gfc_start_block (&block
);
1489 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
1491 gfc_init_block (&body
);
1492 for (code
= code
->block
; code
; code
= code
->block
)
1494 /* Last section is special because of lastprivate, so even if it
1495 is empty, chain it in. */
1496 stmt
= gfc_trans_omp_code (code
->next
,
1497 has_lastprivate
&& code
->block
== NULL
);
1498 if (! IS_EMPTY_STMT (stmt
))
1500 stmt
= build1_v (OMP_SECTION
, stmt
);
1501 gfc_add_expr_to_block (&body
, stmt
);
1504 stmt
= gfc_finish_block (&body
);
1506 stmt
= build3_v (OMP_SECTIONS
, stmt
, omp_clauses
, NULL_TREE
);
1507 gfc_add_expr_to_block (&block
, stmt
);
1509 return gfc_finish_block (&block
);
1513 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1515 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
1516 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1517 stmt
= build2 (OMP_SINGLE
, void_type_node
, stmt
, omp_clauses
);
1522 gfc_trans_omp_task (gfc_code
*code
)
1525 tree stmt
, body_stmt
, omp_clauses
;
1527 gfc_start_block (&block
);
1528 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1530 body_stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1531 stmt
= make_node (OMP_TASK
);
1532 TREE_TYPE (stmt
) = void_type_node
;
1533 OMP_TASK_CLAUSES (stmt
) = omp_clauses
;
1534 OMP_TASK_BODY (stmt
) = body_stmt
;
1535 gfc_add_expr_to_block (&block
, stmt
);
1536 return gfc_finish_block (&block
);
1540 gfc_trans_omp_taskwait (void)
1542 tree decl
= built_in_decls
[BUILT_IN_GOMP_TASKWAIT
];
1543 return build_call_expr (decl
, 0);
1547 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1550 return gfc_trans_omp_single (code
, clauses
);
1554 gfc_trans_omp_directive (gfc_code
*code
)
1558 case EXEC_OMP_ATOMIC
:
1559 return gfc_trans_omp_atomic (code
);
1560 case EXEC_OMP_BARRIER
:
1561 return gfc_trans_omp_barrier ();
1562 case EXEC_OMP_CRITICAL
:
1563 return gfc_trans_omp_critical (code
);
1565 return gfc_trans_omp_do (code
, NULL
, code
->ext
.omp_clauses
, NULL
);
1566 case EXEC_OMP_FLUSH
:
1567 return gfc_trans_omp_flush ();
1568 case EXEC_OMP_MASTER
:
1569 return gfc_trans_omp_master (code
);
1570 case EXEC_OMP_ORDERED
:
1571 return gfc_trans_omp_ordered (code
);
1572 case EXEC_OMP_PARALLEL
:
1573 return gfc_trans_omp_parallel (code
);
1574 case EXEC_OMP_PARALLEL_DO
:
1575 return gfc_trans_omp_parallel_do (code
);
1576 case EXEC_OMP_PARALLEL_SECTIONS
:
1577 return gfc_trans_omp_parallel_sections (code
);
1578 case EXEC_OMP_PARALLEL_WORKSHARE
:
1579 return gfc_trans_omp_parallel_workshare (code
);
1580 case EXEC_OMP_SECTIONS
:
1581 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
1582 case EXEC_OMP_SINGLE
:
1583 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
1585 return gfc_trans_omp_task (code
);
1586 case EXEC_OMP_TASKWAIT
:
1587 return gfc_trans_omp_taskwait ();
1588 case EXEC_OMP_WORKSHARE
:
1589 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);