1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
27 #include "gimple.h" /* For create_tmp_var_raw. */
28 #include "diagnostic-core.h" /* For internal_error. */
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
43 gfc_omp_privatize_by_reference (const_tree decl
)
45 tree type
= TREE_TYPE (decl
);
47 if (TREE_CODE (type
) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
51 if (TREE_CODE (type
) == POINTER_TYPE
)
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type
))
59 if (!DECL_ARTIFICIAL (decl
)
60 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
63 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
65 if (DECL_LANG_SPECIFIC (decl
)
66 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
73 /* True if OpenMP sharing attribute of DECL is predetermined. */
75 enum omp_clause_default_kind
76 gfc_omp_predetermined_sharing (tree decl
)
78 if (DECL_ARTIFICIAL (decl
)
79 && ! GFC_DECL_RESULT (decl
)
80 && ! (DECL_LANG_SPECIFIC (decl
)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
82 return OMP_CLAUSE_DEFAULT_SHARED
;
84 /* Cray pointees shouldn't be listed in any clauses and should be
85 gimplified to dereference of the corresponding Cray pointer.
86 Make them all private, so that they are emitted in the debug
88 if (GFC_DECL_CRAY_POINTEE (decl
))
89 return OMP_CLAUSE_DEFAULT_PRIVATE
;
91 /* Assumed-size arrays are predetermined to inherit sharing
92 attributes of the associated actual argument, which is shared
94 if (TREE_CODE (decl
) == PARM_DECL
95 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
96 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
97 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
98 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
100 return OMP_CLAUSE_DEFAULT_SHARED
;
102 /* Dummy procedures aren't considered variables by OpenMP, thus are
103 disallowed in OpenMP clauses. They are represented as PARM_DECLs
104 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
105 to avoid complaining about their uses with default(none). */
106 if (TREE_CODE (decl
) == PARM_DECL
107 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
108 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
109 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
111 /* COMMON and EQUIVALENCE decls are shared. They
112 are only referenced through DECL_VALUE_EXPR of the variables
113 contained in them. If those are privatized, they will not be
114 gimplified to the COMMON or EQUIVALENCE decls. */
115 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
116 return OMP_CLAUSE_DEFAULT_SHARED
;
118 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
119 return OMP_CLAUSE_DEFAULT_SHARED
;
121 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
124 /* Return decl that should be used when reporting DEFAULT(NONE)
128 gfc_omp_report_decl (tree decl
)
130 if (DECL_ARTIFICIAL (decl
)
131 && DECL_LANG_SPECIFIC (decl
)
132 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
133 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
138 /* Return true if DECL in private clause needs
139 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
141 gfc_omp_private_outer_ref (tree decl
)
143 tree type
= TREE_TYPE (decl
);
145 if (GFC_DESCRIPTOR_TYPE_P (type
)
146 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
152 /* Return code to initialize DECL with its default constructor, or
153 NULL if there's nothing to do. */
156 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
158 tree type
= TREE_TYPE (decl
), rank
, size
, esize
, ptr
, cond
, then_b
, else_b
;
159 stmtblock_t block
, cond_block
;
161 if (! GFC_DESCRIPTOR_TYPE_P (type
)
162 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
165 gcc_assert (outer
!= NULL
);
166 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
167 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
);
169 /* Allocatable arrays in PRIVATE clauses need to be set to
170 "not currently allocated" allocation status if outer
171 array is "not currently allocated", otherwise should be allocated. */
172 gfc_start_block (&block
);
174 gfc_init_block (&cond_block
);
176 gfc_add_modify (&cond_block
, decl
, outer
);
177 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
178 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
179 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
180 size
, gfc_conv_descriptor_lbound_get (decl
, rank
));
181 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
182 size
, gfc_index_one_node
);
183 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
184 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
185 size
, gfc_conv_descriptor_stride_get (decl
, rank
));
186 esize
= fold_convert (gfc_array_index_type
,
187 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
188 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
190 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &cond_block
);
191 ptr
= gfc_allocate_array_with_status (&cond_block
,
192 build_int_cst (pvoid_type_node
, 0),
194 gfc_conv_descriptor_data_set (&cond_block
, decl
, ptr
);
195 then_b
= gfc_finish_block (&cond_block
);
197 gfc_init_block (&cond_block
);
198 gfc_conv_descriptor_data_set (&cond_block
, decl
, null_pointer_node
);
199 else_b
= gfc_finish_block (&cond_block
);
201 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
202 fold_convert (pvoid_type_node
,
203 gfc_conv_descriptor_data_get (outer
)),
205 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
206 void_type_node
, cond
, then_b
, else_b
));
208 return gfc_finish_block (&block
);
211 /* Build and return code for a copy constructor from SRC to DEST. */
214 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
216 tree type
= TREE_TYPE (dest
), ptr
, size
, esize
, rank
, call
;
219 if (! GFC_DESCRIPTOR_TYPE_P (type
)
220 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
221 return build2_v (MODIFY_EXPR
, dest
, src
);
223 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
);
225 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
226 and copied from SRC. */
227 gfc_start_block (&block
);
229 gfc_add_modify (&block
, dest
, src
);
230 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
231 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
232 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
233 size
, gfc_conv_descriptor_lbound_get (dest
, rank
));
234 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
235 size
, gfc_index_one_node
);
236 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
237 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
238 size
, gfc_conv_descriptor_stride_get (dest
, rank
));
239 esize
= fold_convert (gfc_array_index_type
,
240 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
241 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
243 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
244 ptr
= gfc_allocate_array_with_status (&block
,
245 build_int_cst (pvoid_type_node
, 0),
247 gfc_conv_descriptor_data_set (&block
, dest
, ptr
);
248 call
= build_call_expr_loc (input_location
,
249 built_in_decls
[BUILT_IN_MEMCPY
], 3, ptr
,
250 fold_convert (pvoid_type_node
,
251 gfc_conv_descriptor_data_get (src
)),
253 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
255 return gfc_finish_block (&block
);
258 /* Similarly, except use an assignment operator instead. */
261 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED
, tree dest
, tree src
)
263 tree type
= TREE_TYPE (dest
), rank
, size
, esize
, call
;
266 if (! GFC_DESCRIPTOR_TYPE_P (type
)
267 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
268 return build2_v (MODIFY_EXPR
, dest
, src
);
270 /* Handle copying allocatable arrays. */
271 gfc_start_block (&block
);
273 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
274 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
275 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
276 size
, gfc_conv_descriptor_lbound_get (dest
, rank
));
277 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
278 size
, gfc_index_one_node
);
279 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
280 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
281 size
, gfc_conv_descriptor_stride_get (dest
, rank
));
282 esize
= fold_convert (gfc_array_index_type
,
283 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
284 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
286 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
287 call
= build_call_expr_loc (input_location
,
288 built_in_decls
[BUILT_IN_MEMCPY
], 3,
289 fold_convert (pvoid_type_node
,
290 gfc_conv_descriptor_data_get (dest
)),
291 fold_convert (pvoid_type_node
,
292 gfc_conv_descriptor_data_get (src
)),
294 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
296 return gfc_finish_block (&block
);
299 /* Build and return code destructing DECL. Return NULL if nothing
303 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED
, tree decl
)
305 tree type
= TREE_TYPE (decl
);
307 if (! GFC_DESCRIPTOR_TYPE_P (type
)
308 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
311 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
312 to be deallocated if they were allocated. */
313 return gfc_trans_dealloc_allocated (decl
);
317 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
318 disregarded in OpenMP construct, because it is going to be
319 remapped during OpenMP lowering. SHARED is true if DECL
320 is going to be shared, false if it is going to be privatized. */
323 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
325 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
326 && DECL_HAS_VALUE_EXPR_P (decl
))
328 tree value
= DECL_VALUE_EXPR (decl
);
330 if (TREE_CODE (value
) == COMPONENT_REF
331 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
332 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
334 /* If variable in COMMON or EQUIVALENCE is privatized, return
335 true, as just that variable is supposed to be privatized,
336 not the whole COMMON or whole EQUIVALENCE.
337 For shared variables in COMMON or EQUIVALENCE, let them be
338 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
339 from the same COMMON or EQUIVALENCE just one sharing of the
340 whole COMMON or EQUIVALENCE is enough. */
345 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
351 /* Return true if DECL that is shared iff SHARED is true should
352 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
356 gfc_omp_private_debug_clause (tree decl
, bool shared
)
358 if (GFC_DECL_CRAY_POINTEE (decl
))
361 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
362 && DECL_HAS_VALUE_EXPR_P (decl
))
364 tree value
= DECL_VALUE_EXPR (decl
);
366 if (TREE_CODE (value
) == COMPONENT_REF
367 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
368 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
375 /* Register language specific type size variables as potentially OpenMP
376 firstprivate variables. */
379 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
381 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
385 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
386 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
388 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
389 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
390 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
392 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
393 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
399 gfc_trans_add_clause (tree node
, tree tail
)
401 OMP_CLAUSE_CHAIN (node
) = tail
;
406 gfc_trans_omp_variable (gfc_symbol
*sym
)
408 tree t
= gfc_get_symbol_decl (sym
);
412 bool alternate_entry
;
415 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
416 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
417 && sym
->result
== sym
;
418 entry_master
= sym
->attr
.result
419 && sym
->ns
->proc_name
->attr
.entry_master
420 && !gfc_return_by_reference (sym
->ns
->proc_name
);
421 parent_decl
= DECL_CONTEXT (current_function_decl
);
423 if ((t
== parent_decl
&& return_value
)
424 || (sym
->ns
&& sym
->ns
->proc_name
425 && sym
->ns
->proc_name
->backend_decl
== parent_decl
426 && (alternate_entry
|| entry_master
)))
431 /* Special case for assigning the return value of a function.
432 Self recursive functions must have an explicit return value. */
433 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
434 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
436 /* Similarly for alternate entry points. */
437 else if (alternate_entry
438 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
441 gfc_entry_list
*el
= NULL
;
443 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
446 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
451 else if (entry_master
452 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
454 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
460 gfc_trans_omp_variable_list (enum omp_clause_code code
, gfc_namelist
*namelist
,
463 for (; namelist
!= NULL
; namelist
= namelist
->next
)
464 if (namelist
->sym
->attr
.referenced
)
466 tree t
= gfc_trans_omp_variable (namelist
->sym
);
467 if (t
!= error_mark_node
)
469 tree node
= build_omp_clause (input_location
, code
);
470 OMP_CLAUSE_DECL (node
) = t
;
471 list
= gfc_trans_add_clause (node
, list
);
478 gfc_trans_omp_array_reduction (tree c
, gfc_symbol
*sym
, locus where
)
480 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
481 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
482 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
483 gfc_expr
*e1
, *e2
, *e3
, *e4
;
485 tree decl
, backend_decl
, stmt
;
486 locus old_loc
= gfc_current_locus
;
490 decl
= OMP_CLAUSE_DECL (c
);
491 gfc_current_locus
= where
;
493 /* Create a fake symbol for init value. */
494 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
495 init_val_sym
.ns
= sym
->ns
;
496 init_val_sym
.name
= sym
->name
;
497 init_val_sym
.ts
= sym
->ts
;
498 init_val_sym
.attr
.referenced
= 1;
499 init_val_sym
.declared_at
= where
;
500 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
501 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
502 init_val_sym
.backend_decl
= backend_decl
;
504 /* Create a fake symbol for the outer array reference. */
506 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
507 outer_sym
.attr
.dummy
= 0;
508 outer_sym
.attr
.result
= 0;
509 outer_sym
.attr
.flavor
= FL_VARIABLE
;
510 outer_sym
.backend_decl
= create_tmp_var_raw (TREE_TYPE (decl
), NULL
);
512 /* Create fake symtrees for it. */
513 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
514 symtree1
->n
.sym
= sym
;
515 gcc_assert (symtree1
== root1
);
517 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
518 symtree2
->n
.sym
= &init_val_sym
;
519 gcc_assert (symtree2
== root2
);
521 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
522 symtree3
->n
.sym
= &outer_sym
;
523 gcc_assert (symtree3
== root3
);
525 /* Create expressions. */
526 e1
= gfc_get_expr ();
527 e1
->expr_type
= EXPR_VARIABLE
;
529 e1
->symtree
= symtree1
;
531 e1
->ref
= ref
= gfc_get_ref ();
532 ref
->type
= REF_ARRAY
;
533 ref
->u
.ar
.where
= where
;
534 ref
->u
.ar
.as
= sym
->as
;
535 ref
->u
.ar
.type
= AR_FULL
;
537 t
= gfc_resolve_expr (e1
);
538 gcc_assert (t
== SUCCESS
);
540 e2
= gfc_get_expr ();
541 e2
->expr_type
= EXPR_VARIABLE
;
543 e2
->symtree
= symtree2
;
545 t
= gfc_resolve_expr (e2
);
546 gcc_assert (t
== SUCCESS
);
548 e3
= gfc_copy_expr (e1
);
549 e3
->symtree
= symtree3
;
550 t
= gfc_resolve_expr (e3
);
551 gcc_assert (t
== SUCCESS
);
554 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
558 e4
= gfc_add (e3
, e1
);
561 e4
= gfc_multiply (e3
, e1
);
563 case TRUTH_ANDIF_EXPR
:
564 e4
= gfc_and (e3
, e1
);
566 case TRUTH_ORIF_EXPR
:
567 e4
= gfc_or (e3
, e1
);
570 e4
= gfc_eqv (e3
, e1
);
573 e4
= gfc_neqv (e3
, e1
);
595 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
596 intrinsic_sym
.ns
= sym
->ns
;
597 intrinsic_sym
.name
= iname
;
598 intrinsic_sym
.ts
= sym
->ts
;
599 intrinsic_sym
.attr
.referenced
= 1;
600 intrinsic_sym
.attr
.intrinsic
= 1;
601 intrinsic_sym
.attr
.function
= 1;
602 intrinsic_sym
.result
= &intrinsic_sym
;
603 intrinsic_sym
.declared_at
= where
;
605 symtree4
= gfc_new_symtree (&root4
, iname
);
606 symtree4
->n
.sym
= &intrinsic_sym
;
607 gcc_assert (symtree4
== root4
);
609 e4
= gfc_get_expr ();
610 e4
->expr_type
= EXPR_FUNCTION
;
612 e4
->symtree
= symtree4
;
613 e4
->value
.function
.isym
= gfc_find_function (iname
);
614 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
615 e4
->value
.function
.actual
->expr
= e3
;
616 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
617 e4
->value
.function
.actual
->next
->expr
= e1
;
619 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
620 e1
= gfc_copy_expr (e1
);
621 e3
= gfc_copy_expr (e3
);
622 t
= gfc_resolve_expr (e4
);
623 gcc_assert (t
== SUCCESS
);
625 /* Create the init statement list. */
627 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
628 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_ALLOCATABLE
)
630 /* If decl is an allocatable array, it needs to be allocated
631 with the same bounds as the outer var. */
632 tree type
= TREE_TYPE (decl
), rank
, size
, esize
, ptr
;
635 gfc_start_block (&block
);
637 gfc_add_modify (&block
, decl
, outer_sym
.backend_decl
);
638 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
639 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
640 size
= fold_build2_loc (input_location
, MINUS_EXPR
,
641 gfc_array_index_type
, size
,
642 gfc_conv_descriptor_lbound_get (decl
, rank
));
643 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
644 size
, gfc_index_one_node
);
645 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
646 size
= fold_build2_loc (input_location
, MULT_EXPR
,
647 gfc_array_index_type
, size
,
648 gfc_conv_descriptor_stride_get (decl
, rank
));
649 esize
= fold_convert (gfc_array_index_type
,
650 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
651 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
653 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
654 ptr
= gfc_allocate_array_with_status (&block
,
655 build_int_cst (pvoid_type_node
, 0),
657 gfc_conv_descriptor_data_set (&block
, decl
, ptr
);
658 gfc_add_expr_to_block (&block
, gfc_trans_assignment (e1
, e2
, false,
660 stmt
= gfc_finish_block (&block
);
663 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
664 if (TREE_CODE (stmt
) != BIND_EXPR
)
665 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
668 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
670 /* Create the merge statement list. */
672 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
673 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_ALLOCATABLE
)
675 /* If decl is an allocatable array, it needs to be deallocated
679 gfc_start_block (&block
);
680 gfc_add_expr_to_block (&block
, gfc_trans_assignment (e3
, e4
, false,
682 gfc_add_expr_to_block (&block
, gfc_trans_dealloc_allocated (decl
));
683 stmt
= gfc_finish_block (&block
);
686 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
687 if (TREE_CODE (stmt
) != BIND_EXPR
)
688 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
691 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
693 /* And stick the placeholder VAR_DECL into the clause as well. */
694 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_sym
.backend_decl
;
696 gfc_current_locus
= old_loc
;
707 gfc_free_array_spec (outer_sym
.as
);
711 gfc_trans_omp_reduction_list (gfc_namelist
*namelist
, tree list
,
712 enum tree_code reduction_code
, locus where
)
714 for (; namelist
!= NULL
; namelist
= namelist
->next
)
715 if (namelist
->sym
->attr
.referenced
)
717 tree t
= gfc_trans_omp_variable (namelist
->sym
);
718 if (t
!= error_mark_node
)
720 tree node
= build_omp_clause (where
.lb
->location
,
721 OMP_CLAUSE_REDUCTION
);
722 OMP_CLAUSE_DECL (node
) = t
;
723 OMP_CLAUSE_REDUCTION_CODE (node
) = reduction_code
;
724 if (namelist
->sym
->attr
.dimension
)
725 gfc_trans_omp_array_reduction (node
, namelist
->sym
, where
);
726 list
= gfc_trans_add_clause (node
, list
);
733 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
736 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
738 enum omp_clause_code clause_code
;
744 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
746 gfc_namelist
*n
= clauses
->lists
[list
];
750 if (list
>= OMP_LIST_REDUCTION_FIRST
751 && list
<= OMP_LIST_REDUCTION_LAST
)
753 enum tree_code reduction_code
;
757 reduction_code
= PLUS_EXPR
;
760 reduction_code
= MULT_EXPR
;
763 reduction_code
= MINUS_EXPR
;
766 reduction_code
= TRUTH_ANDIF_EXPR
;
769 reduction_code
= TRUTH_ORIF_EXPR
;
772 reduction_code
= EQ_EXPR
;
775 reduction_code
= NE_EXPR
;
778 reduction_code
= MAX_EXPR
;
781 reduction_code
= MIN_EXPR
;
784 reduction_code
= BIT_AND_EXPR
;
787 reduction_code
= BIT_IOR_EXPR
;
790 reduction_code
= BIT_XOR_EXPR
;
796 = gfc_trans_omp_reduction_list (n
, omp_clauses
, reduction_code
,
802 case OMP_LIST_PRIVATE
:
803 clause_code
= OMP_CLAUSE_PRIVATE
;
805 case OMP_LIST_SHARED
:
806 clause_code
= OMP_CLAUSE_SHARED
;
808 case OMP_LIST_FIRSTPRIVATE
:
809 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
811 case OMP_LIST_LASTPRIVATE
:
812 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
814 case OMP_LIST_COPYIN
:
815 clause_code
= OMP_CLAUSE_COPYIN
;
817 case OMP_LIST_COPYPRIVATE
:
818 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
822 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
);
829 if (clauses
->if_expr
)
833 gfc_init_se (&se
, NULL
);
834 gfc_conv_expr (&se
, clauses
->if_expr
);
835 gfc_add_block_to_block (block
, &se
.pre
);
836 if_var
= gfc_evaluate_now (se
.expr
, block
);
837 gfc_add_block_to_block (block
, &se
.post
);
839 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
840 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
841 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
844 if (clauses
->num_threads
)
848 gfc_init_se (&se
, NULL
);
849 gfc_conv_expr (&se
, clauses
->num_threads
);
850 gfc_add_block_to_block (block
, &se
.pre
);
851 num_threads
= gfc_evaluate_now (se
.expr
, block
);
852 gfc_add_block_to_block (block
, &se
.post
);
854 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
855 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
856 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
859 chunk_size
= NULL_TREE
;
860 if (clauses
->chunk_size
)
862 gfc_init_se (&se
, NULL
);
863 gfc_conv_expr (&se
, clauses
->chunk_size
);
864 gfc_add_block_to_block (block
, &se
.pre
);
865 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
866 gfc_add_block_to_block (block
, &se
.post
);
869 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
871 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
872 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
873 switch (clauses
->sched_kind
)
875 case OMP_SCHED_STATIC
:
876 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
878 case OMP_SCHED_DYNAMIC
:
879 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
881 case OMP_SCHED_GUIDED
:
882 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
884 case OMP_SCHED_RUNTIME
:
885 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
888 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
893 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
896 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
898 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
899 switch (clauses
->default_sharing
)
901 case OMP_DEFAULT_NONE
:
902 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
904 case OMP_DEFAULT_SHARED
:
905 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
907 case OMP_DEFAULT_PRIVATE
:
908 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
910 case OMP_DEFAULT_FIRSTPRIVATE
:
911 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
916 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
921 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
922 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
925 if (clauses
->ordered
)
927 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
928 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
933 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
934 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
937 if (clauses
->collapse
)
939 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
940 OMP_CLAUSE_COLLAPSE_EXPR (c
) = build_int_cst (NULL
, clauses
->collapse
);
941 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
947 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
950 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
955 stmt
= gfc_trans_code (code
);
956 if (TREE_CODE (stmt
) != BIND_EXPR
)
958 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
960 tree block
= poplevel (1, 0, 0);
961 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
972 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
973 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
976 gfc_trans_omp_atomic (gfc_code
*code
)
983 tree lhsaddr
, type
, rhs
, x
;
984 enum tree_code op
= ERROR_MARK
;
985 bool var_on_left
= false;
987 code
= code
->block
->next
;
988 gcc_assert (code
->op
== EXEC_ASSIGN
);
989 gcc_assert (code
->next
== NULL
);
990 var
= code
->expr1
->symtree
->n
.sym
;
992 gfc_init_se (&lse
, NULL
);
993 gfc_init_se (&rse
, NULL
);
994 gfc_start_block (&block
);
996 gfc_conv_expr (&lse
, code
->expr1
);
997 gfc_add_block_to_block (&block
, &lse
.pre
);
998 type
= TREE_TYPE (lse
.expr
);
999 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
1001 expr2
= code
->expr2
;
1002 if (expr2
->expr_type
== EXPR_FUNCTION
1003 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1004 expr2
= expr2
->value
.function
.actual
->expr
;
1006 if (expr2
->expr_type
== EXPR_OP
)
1009 switch (expr2
->value
.op
.op
)
1011 case INTRINSIC_PLUS
:
1014 case INTRINSIC_TIMES
:
1017 case INTRINSIC_MINUS
:
1020 case INTRINSIC_DIVIDE
:
1021 if (expr2
->ts
.type
== BT_INTEGER
)
1022 op
= TRUNC_DIV_EXPR
;
1027 op
= TRUTH_ANDIF_EXPR
;
1030 op
= TRUTH_ORIF_EXPR
;
1035 case INTRINSIC_NEQV
:
1041 e
= expr2
->value
.op
.op1
;
1042 if (e
->expr_type
== EXPR_FUNCTION
1043 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1044 e
= e
->value
.function
.actual
->expr
;
1045 if (e
->expr_type
== EXPR_VARIABLE
1046 && e
->symtree
!= NULL
1047 && e
->symtree
->n
.sym
== var
)
1049 expr2
= expr2
->value
.op
.op2
;
1054 e
= expr2
->value
.op
.op2
;
1055 if (e
->expr_type
== EXPR_FUNCTION
1056 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1057 e
= e
->value
.function
.actual
->expr
;
1058 gcc_assert (e
->expr_type
== EXPR_VARIABLE
1059 && e
->symtree
!= NULL
1060 && e
->symtree
->n
.sym
== var
);
1061 expr2
= expr2
->value
.op
.op1
;
1062 var_on_left
= false;
1064 gfc_conv_expr (&rse
, expr2
);
1065 gfc_add_block_to_block (&block
, &rse
.pre
);
1069 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
1070 switch (expr2
->value
.function
.isym
->id
)
1090 e
= expr2
->value
.function
.actual
->expr
;
1091 gcc_assert (e
->expr_type
== EXPR_VARIABLE
1092 && e
->symtree
!= NULL
1093 && e
->symtree
->n
.sym
== var
);
1095 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
1096 gfc_add_block_to_block (&block
, &rse
.pre
);
1097 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
1099 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
1100 gfc_actual_arglist
*arg
;
1102 gfc_add_modify (&block
, accum
, rse
.expr
);
1103 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
1106 gfc_init_block (&rse
.pre
);
1107 gfc_conv_expr (&rse
, arg
->expr
);
1108 gfc_add_block_to_block (&block
, &rse
.pre
);
1109 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
1111 gfc_add_modify (&block
, accum
, x
);
1117 expr2
= expr2
->value
.function
.actual
->next
->expr
;
1120 lhsaddr
= save_expr (lhsaddr
);
1121 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
1122 x
= convert (TREE_TYPE (rhs
), build_fold_indirect_ref_loc (input_location
,
1126 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
1128 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
1130 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
1131 && TREE_CODE (type
) != COMPLEX_TYPE
)
1132 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
1133 TREE_TYPE (TREE_TYPE (rhs
)), x
);
1135 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
1136 gfc_add_expr_to_block (&block
, x
);
1138 gfc_add_block_to_block (&block
, &lse
.pre
);
1139 gfc_add_block_to_block (&block
, &rse
.pre
);
1141 return gfc_finish_block (&block
);
1145 gfc_trans_omp_barrier (void)
1147 tree decl
= built_in_decls
[BUILT_IN_GOMP_BARRIER
];
1148 return build_call_expr_loc (input_location
, decl
, 0);
1152 gfc_trans_omp_critical (gfc_code
*code
)
1154 tree name
= NULL_TREE
, stmt
;
1155 if (code
->ext
.omp_name
!= NULL
)
1156 name
= get_identifier (code
->ext
.omp_name
);
1157 stmt
= gfc_trans_code (code
->block
->next
);
1158 return build2_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
, name
);
1161 typedef struct dovar_init_d
{
1166 DEF_VEC_O(dovar_init
);
1167 DEF_VEC_ALLOC_O(dovar_init
,heap
);
1170 gfc_trans_omp_do (gfc_code
*code
, stmtblock_t
*pblock
,
1171 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
1174 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
1175 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
1178 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
1179 int i
, collapse
= clauses
->collapse
;
1180 VEC(dovar_init
,heap
) *inits
= NULL
;
1187 code
= code
->block
->next
;
1188 gcc_assert (code
->op
== EXEC_DO
);
1190 init
= make_tree_vec (collapse
);
1191 cond
= make_tree_vec (collapse
);
1192 incr
= make_tree_vec (collapse
);
1196 gfc_start_block (&block
);
1200 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
1202 for (i
= 0; i
< collapse
; i
++)
1205 int dovar_found
= 0;
1211 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
]; n
!= NULL
;
1213 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
1218 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
1219 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
1225 /* Evaluate all the expressions in the iterator. */
1226 gfc_init_se (&se
, NULL
);
1227 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1228 gfc_add_block_to_block (pblock
, &se
.pre
);
1230 type
= TREE_TYPE (dovar
);
1231 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
1233 gfc_init_se (&se
, NULL
);
1234 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1235 gfc_add_block_to_block (pblock
, &se
.pre
);
1236 from
= gfc_evaluate_now (se
.expr
, pblock
);
1238 gfc_init_se (&se
, NULL
);
1239 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1240 gfc_add_block_to_block (pblock
, &se
.pre
);
1241 to
= gfc_evaluate_now (se
.expr
, pblock
);
1243 gfc_init_se (&se
, NULL
);
1244 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1245 gfc_add_block_to_block (pblock
, &se
.pre
);
1246 step
= gfc_evaluate_now (se
.expr
, pblock
);
1249 /* Special case simple loops. */
1250 if (TREE_CODE (dovar
) == VAR_DECL
)
1252 if (integer_onep (step
))
1254 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
1259 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
);
1264 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
1265 TREE_VEC_ELT (cond
, i
) = fold_build2_loc (input_location
, simple
> 0
1266 ? LE_EXPR
: GE_EXPR
,
1267 boolean_type_node
, dovar
,
1269 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
1271 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
1274 TREE_VEC_ELT (incr
, i
));
1278 /* STEP is not 1 or -1. Use:
1279 for (count = 0; count < (to + step - from) / step; count++)
1281 dovar = from + count * step;
1285 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
1286 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
1287 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
1289 tmp
= gfc_evaluate_now (tmp
, pblock
);
1290 count
= gfc_create_var (type
, "count");
1291 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
1292 build_int_cst (type
, 0));
1293 TREE_VEC_ELT (cond
, i
) = fold_build2_loc (input_location
, LT_EXPR
,
1296 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
1298 build_int_cst (type
, 1));
1299 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
1300 MODIFY_EXPR
, type
, count
,
1301 TREE_VEC_ELT (incr
, i
));
1303 /* Initialize DOVAR. */
1304 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
1305 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
1306 di
= VEC_safe_push (dovar_init
, heap
, inits
, NULL
);
1313 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
1314 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
1315 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
1317 else if (dovar_found
== 2)
1324 /* If dovar is lastprivate, but different counter is used,
1325 dovar += step needs to be added to
1326 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1327 will have the value on entry of the last loop, rather
1328 than value after iterator increment. */
1329 tmp
= gfc_evaluate_now (step
, pblock
);
1330 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
1332 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
1334 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1335 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1336 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
1338 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
1342 if (c
== NULL
&& par_clauses
!= NULL
)
1344 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1345 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1346 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
1348 tree l
= build_omp_clause (input_location
,
1349 OMP_CLAUSE_LASTPRIVATE
);
1350 OMP_CLAUSE_DECL (l
) = dovar_decl
;
1351 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
1352 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
1354 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
1358 gcc_assert (simple
|| c
!= NULL
);
1362 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
1363 OMP_CLAUSE_DECL (tmp
) = count
;
1364 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
1367 if (i
+ 1 < collapse
)
1368 code
= code
->block
->next
;
1371 if (pblock
!= &block
)
1374 gfc_start_block (&block
);
1377 gfc_start_block (&body
);
1379 FOR_EACH_VEC_ELT (dovar_init
, inits
, ix
, di
)
1380 gfc_add_modify (&body
, di
->var
, di
->init
);
1381 VEC_free (dovar_init
, heap
, inits
);
1383 /* Cycle statement is implemented with a goto. Exit statement must not be
1384 present for this loop. */
1385 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1387 /* Put these labels where they can be found later. */
1389 code
->cycle_label
= cycle_label
;
1390 code
->exit_label
= NULL_TREE
;
1392 /* Main loop body. */
1393 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
1394 gfc_add_expr_to_block (&body
, tmp
);
1396 /* Label for cycle statements (if needed). */
1397 if (TREE_USED (cycle_label
))
1399 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1400 gfc_add_expr_to_block (&body
, tmp
);
1403 /* End of loop body. */
1404 stmt
= make_node (OMP_FOR
);
1406 TREE_TYPE (stmt
) = void_type_node
;
1407 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
1408 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
1409 OMP_FOR_INIT (stmt
) = init
;
1410 OMP_FOR_COND (stmt
) = cond
;
1411 OMP_FOR_INCR (stmt
) = incr
;
1412 gfc_add_expr_to_block (&block
, stmt
);
1414 return gfc_finish_block (&block
);
1418 gfc_trans_omp_flush (void)
1420 tree decl
= built_in_decls
[BUILT_IN_SYNCHRONIZE
];
1421 return build_call_expr_loc (input_location
, decl
, 0);
1425 gfc_trans_omp_master (gfc_code
*code
)
1427 tree stmt
= gfc_trans_code (code
->block
->next
);
1428 if (IS_EMPTY_STMT (stmt
))
1430 return build1_v (OMP_MASTER
, stmt
);
1434 gfc_trans_omp_ordered (gfc_code
*code
)
1436 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
1440 gfc_trans_omp_parallel (gfc_code
*code
)
1443 tree stmt
, omp_clauses
;
1445 gfc_start_block (&block
);
1446 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1448 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1449 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1451 gfc_add_expr_to_block (&block
, stmt
);
1452 return gfc_finish_block (&block
);
1456 gfc_trans_omp_parallel_do (gfc_code
*code
)
1458 stmtblock_t block
, *pblock
= NULL
;
1459 gfc_omp_clauses parallel_clauses
, do_clauses
;
1460 tree stmt
, omp_clauses
= NULL_TREE
;
1462 gfc_start_block (&block
);
1464 memset (&do_clauses
, 0, sizeof (do_clauses
));
1465 if (code
->ext
.omp_clauses
!= NULL
)
1467 memcpy (¶llel_clauses
, code
->ext
.omp_clauses
,
1468 sizeof (parallel_clauses
));
1469 do_clauses
.sched_kind
= parallel_clauses
.sched_kind
;
1470 do_clauses
.chunk_size
= parallel_clauses
.chunk_size
;
1471 do_clauses
.ordered
= parallel_clauses
.ordered
;
1472 do_clauses
.collapse
= parallel_clauses
.collapse
;
1473 parallel_clauses
.sched_kind
= OMP_SCHED_NONE
;
1474 parallel_clauses
.chunk_size
= NULL
;
1475 parallel_clauses
.ordered
= false;
1476 parallel_clauses
.collapse
= 0;
1477 omp_clauses
= gfc_trans_omp_clauses (&block
, ¶llel_clauses
,
1480 do_clauses
.nowait
= true;
1481 if (!do_clauses
.ordered
&& do_clauses
.sched_kind
!= OMP_SCHED_STATIC
)
1485 stmt
= gfc_trans_omp_do (code
, pblock
, &do_clauses
, omp_clauses
);
1486 if (TREE_CODE (stmt
) != BIND_EXPR
)
1487 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
1490 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1492 OMP_PARALLEL_COMBINED (stmt
) = 1;
1493 gfc_add_expr_to_block (&block
, stmt
);
1494 return gfc_finish_block (&block
);
1498 gfc_trans_omp_parallel_sections (gfc_code
*code
)
1501 gfc_omp_clauses section_clauses
;
1502 tree stmt
, omp_clauses
;
1504 memset (§ion_clauses
, 0, sizeof (section_clauses
));
1505 section_clauses
.nowait
= true;
1507 gfc_start_block (&block
);
1508 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1511 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
1512 if (TREE_CODE (stmt
) != BIND_EXPR
)
1513 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
1516 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1518 OMP_PARALLEL_COMBINED (stmt
) = 1;
1519 gfc_add_expr_to_block (&block
, stmt
);
1520 return gfc_finish_block (&block
);
1524 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
1527 gfc_omp_clauses workshare_clauses
;
1528 tree stmt
, omp_clauses
;
1530 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
1531 workshare_clauses
.nowait
= true;
1533 gfc_start_block (&block
);
1534 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1537 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
1538 if (TREE_CODE (stmt
) != BIND_EXPR
)
1539 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0, 0));
1542 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1544 OMP_PARALLEL_COMBINED (stmt
) = 1;
1545 gfc_add_expr_to_block (&block
, stmt
);
1546 return gfc_finish_block (&block
);
1550 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1552 stmtblock_t block
, body
;
1553 tree omp_clauses
, stmt
;
1554 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
1556 gfc_start_block (&block
);
1558 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
1560 gfc_init_block (&body
);
1561 for (code
= code
->block
; code
; code
= code
->block
)
1563 /* Last section is special because of lastprivate, so even if it
1564 is empty, chain it in. */
1565 stmt
= gfc_trans_omp_code (code
->next
,
1566 has_lastprivate
&& code
->block
== NULL
);
1567 if (! IS_EMPTY_STMT (stmt
))
1569 stmt
= build1_v (OMP_SECTION
, stmt
);
1570 gfc_add_expr_to_block (&body
, stmt
);
1573 stmt
= gfc_finish_block (&body
);
1575 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
1577 gfc_add_expr_to_block (&block
, stmt
);
1579 return gfc_finish_block (&block
);
1583 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1585 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
1586 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1587 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
1593 gfc_trans_omp_task (gfc_code
*code
)
1596 tree stmt
, omp_clauses
;
1598 gfc_start_block (&block
);
1599 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1601 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1602 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
1604 gfc_add_expr_to_block (&block
, stmt
);
1605 return gfc_finish_block (&block
);
1609 gfc_trans_omp_taskwait (void)
1611 tree decl
= built_in_decls
[BUILT_IN_GOMP_TASKWAIT
];
1612 return build_call_expr_loc (input_location
, decl
, 0);
1616 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1618 tree res
, tmp
, stmt
;
1619 stmtblock_t block
, *pblock
= NULL
;
1620 stmtblock_t singleblock
;
1621 int saved_ompws_flags
;
1622 bool singleblock_in_progress
= false;
1623 /* True if previous gfc_code in workshare construct is not workshared. */
1624 bool prev_singleunit
;
1626 code
= code
->block
->next
;
1631 return build_empty_stmt (input_location
);
1633 gfc_start_block (&block
);
1636 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
1637 prev_singleunit
= false;
1639 /* Translate statements one by one to trees until we reach
1640 the end of the workshare construct. Adjacent gfc_codes that
1641 are a single unit of work are clustered and encapsulated in a
1642 single OMP_SINGLE construct. */
1643 for (; code
; code
= code
->next
)
1645 if (code
->here
!= 0)
1647 res
= gfc_trans_label_here (code
);
1648 gfc_add_expr_to_block (pblock
, res
);
1651 /* No dependence analysis, use for clauses with wait.
1652 If this is the last gfc_code, use default omp_clauses. */
1653 if (code
->next
== NULL
&& clauses
->nowait
)
1654 ompws_flags
|= OMPWS_NOWAIT
;
1656 /* By default, every gfc_code is a single unit of work. */
1657 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
1658 ompws_flags
&= ~OMPWS_SCALARIZER_WS
;
1667 res
= gfc_trans_assign (code
);
1670 case EXEC_POINTER_ASSIGN
:
1671 res
= gfc_trans_pointer_assign (code
);
1674 case EXEC_INIT_ASSIGN
:
1675 res
= gfc_trans_init_assign (code
);
1679 res
= gfc_trans_forall (code
);
1683 res
= gfc_trans_where (code
);
1686 case EXEC_OMP_ATOMIC
:
1687 res
= gfc_trans_omp_directive (code
);
1690 case EXEC_OMP_PARALLEL
:
1691 case EXEC_OMP_PARALLEL_DO
:
1692 case EXEC_OMP_PARALLEL_SECTIONS
:
1693 case EXEC_OMP_PARALLEL_WORKSHARE
:
1694 case EXEC_OMP_CRITICAL
:
1695 saved_ompws_flags
= ompws_flags
;
1697 res
= gfc_trans_omp_directive (code
);
1698 ompws_flags
= saved_ompws_flags
;
1702 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1705 gfc_set_backend_locus (&code
->loc
);
1707 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
1709 if (prev_singleunit
)
1711 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
1712 /* Add current gfc_code to single block. */
1713 gfc_add_expr_to_block (&singleblock
, res
);
1716 /* Finish single block and add it to pblock. */
1717 tmp
= gfc_finish_block (&singleblock
);
1718 tmp
= build2_loc (input_location
, OMP_SINGLE
,
1719 void_type_node
, tmp
, NULL_TREE
);
1720 gfc_add_expr_to_block (pblock
, tmp
);
1721 /* Add current gfc_code to pblock. */
1722 gfc_add_expr_to_block (pblock
, res
);
1723 singleblock_in_progress
= false;
1728 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
1730 /* Start single block. */
1731 gfc_init_block (&singleblock
);
1732 gfc_add_expr_to_block (&singleblock
, res
);
1733 singleblock_in_progress
= true;
1736 /* Add the new statement to the block. */
1737 gfc_add_expr_to_block (pblock
, res
);
1739 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
1743 /* Finish remaining SINGLE block, if we were in the middle of one. */
1744 if (singleblock_in_progress
)
1746 /* Finish single block and add it to pblock. */
1747 tmp
= gfc_finish_block (&singleblock
);
1748 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
1750 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
1752 gfc_add_expr_to_block (pblock
, tmp
);
1755 stmt
= gfc_finish_block (pblock
);
1756 if (TREE_CODE (stmt
) != BIND_EXPR
)
1758 if (!IS_EMPTY_STMT (stmt
))
1760 tree bindblock
= poplevel (1, 0, 0);
1761 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
1774 gfc_trans_omp_directive (gfc_code
*code
)
1778 case EXEC_OMP_ATOMIC
:
1779 return gfc_trans_omp_atomic (code
);
1780 case EXEC_OMP_BARRIER
:
1781 return gfc_trans_omp_barrier ();
1782 case EXEC_OMP_CRITICAL
:
1783 return gfc_trans_omp_critical (code
);
1785 return gfc_trans_omp_do (code
, NULL
, code
->ext
.omp_clauses
, NULL
);
1786 case EXEC_OMP_FLUSH
:
1787 return gfc_trans_omp_flush ();
1788 case EXEC_OMP_MASTER
:
1789 return gfc_trans_omp_master (code
);
1790 case EXEC_OMP_ORDERED
:
1791 return gfc_trans_omp_ordered (code
);
1792 case EXEC_OMP_PARALLEL
:
1793 return gfc_trans_omp_parallel (code
);
1794 case EXEC_OMP_PARALLEL_DO
:
1795 return gfc_trans_omp_parallel_do (code
);
1796 case EXEC_OMP_PARALLEL_SECTIONS
:
1797 return gfc_trans_omp_parallel_sections (code
);
1798 case EXEC_OMP_PARALLEL_WORKSHARE
:
1799 return gfc_trans_omp_parallel_workshare (code
);
1800 case EXEC_OMP_SECTIONS
:
1801 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
1802 case EXEC_OMP_SINGLE
:
1803 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
1805 return gfc_trans_omp_task (code
);
1806 case EXEC_OMP_TASKWAIT
:
1807 return gfc_trans_omp_taskwait ();
1808 case EXEC_OMP_WORKSHARE
:
1809 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);