1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "gimple.h" /* For create_tmp_var_raw. */
27 #include "diagnostic-core.h" /* For internal_error. */
30 #include "trans-stmt.h"
31 #include "trans-types.h"
32 #include "trans-array.h"
33 #include "trans-const.h"
38 /* True if OpenMP should privatize what this DECL points to rather
39 than the DECL itself. */
42 gfc_omp_privatize_by_reference (const_tree decl
)
44 tree type
= TREE_TYPE (decl
);
46 if (TREE_CODE (type
) == REFERENCE_TYPE
47 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
50 if (TREE_CODE (type
) == POINTER_TYPE
)
52 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
53 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
54 set are supposed to be privatized by reference. */
55 if (GFC_POINTER_TYPE_P (type
))
58 if (!DECL_ARTIFICIAL (decl
)
59 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
62 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 if (DECL_LANG_SPECIFIC (decl
)
65 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
72 /* True if OpenMP sharing attribute of DECL is predetermined. */
74 enum omp_clause_default_kind
75 gfc_omp_predetermined_sharing (tree decl
)
77 if (DECL_ARTIFICIAL (decl
)
78 && ! GFC_DECL_RESULT (decl
)
79 && ! (DECL_LANG_SPECIFIC (decl
)
80 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
81 return OMP_CLAUSE_DEFAULT_SHARED
;
83 /* Cray pointees shouldn't be listed in any clauses and should be
84 gimplified to dereference of the corresponding Cray pointer.
85 Make them all private, so that they are emitted in the debug
87 if (GFC_DECL_CRAY_POINTEE (decl
))
88 return OMP_CLAUSE_DEFAULT_PRIVATE
;
90 /* Assumed-size arrays are predetermined shared. */
91 if (TREE_CODE (decl
) == PARM_DECL
92 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
93 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
94 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
95 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
97 return OMP_CLAUSE_DEFAULT_SHARED
;
99 /* Dummy procedures aren't considered variables by OpenMP, thus are
100 disallowed in OpenMP clauses. They are represented as PARM_DECLs
101 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
102 to avoid complaining about their uses with default(none). */
103 if (TREE_CODE (decl
) == PARM_DECL
104 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
105 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
106 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
108 /* COMMON and EQUIVALENCE decls are shared. They
109 are only referenced through DECL_VALUE_EXPR of the variables
110 contained in them. If those are privatized, they will not be
111 gimplified to the COMMON or EQUIVALENCE decls. */
112 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
113 return OMP_CLAUSE_DEFAULT_SHARED
;
115 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
116 return OMP_CLAUSE_DEFAULT_SHARED
;
118 /* These are either array or derived parameters, or vtables.
119 In the former cases, the OpenMP standard doesn't consider them to be
120 variables at all (they can't be redefined), but they can nevertheless appear
121 in parallel/task regions and for default(none) purposes treat them as shared.
122 For vtables likely the same handling is desirable. */
123 if (TREE_CODE (decl
) == VAR_DECL
124 && TREE_READONLY (decl
)
125 && TREE_STATIC (decl
))
126 return OMP_CLAUSE_DEFAULT_SHARED
;
128 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
131 /* Return decl that should be used when reporting DEFAULT(NONE)
135 gfc_omp_report_decl (tree decl
)
137 if (DECL_ARTIFICIAL (decl
)
138 && DECL_LANG_SPECIFIC (decl
)
139 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
140 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
145 /* Return true if DECL in private clause needs
146 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
148 gfc_omp_private_outer_ref (tree decl
)
150 tree type
= TREE_TYPE (decl
);
152 if (GFC_DESCRIPTOR_TYPE_P (type
)
153 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
159 /* Return code to initialize DECL with its default constructor, or
160 NULL if there's nothing to do. */
163 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
165 tree type
= TREE_TYPE (decl
), rank
, size
, esize
, ptr
, cond
, then_b
, else_b
;
166 stmtblock_t block
, cond_block
;
168 if (! GFC_DESCRIPTOR_TYPE_P (type
)
169 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
172 gcc_assert (outer
!= NULL
);
173 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
174 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
);
176 /* Allocatable arrays in PRIVATE clauses need to be set to
177 "not currently allocated" allocation status if outer
178 array is "not currently allocated", otherwise should be allocated. */
179 gfc_start_block (&block
);
181 gfc_init_block (&cond_block
);
183 gfc_add_modify (&cond_block
, decl
, outer
);
184 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
185 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
186 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
187 size
, gfc_conv_descriptor_lbound_get (decl
, rank
));
188 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
189 size
, gfc_index_one_node
);
190 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
191 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
192 size
, gfc_conv_descriptor_stride_get (decl
, rank
));
193 esize
= fold_convert (gfc_array_index_type
,
194 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
195 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
197 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &cond_block
);
199 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
200 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
201 gfc_conv_descriptor_data_set (&cond_block
, decl
, ptr
);
203 then_b
= gfc_finish_block (&cond_block
);
205 gfc_init_block (&cond_block
);
206 gfc_conv_descriptor_data_set (&cond_block
, decl
, null_pointer_node
);
207 else_b
= gfc_finish_block (&cond_block
);
209 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
210 fold_convert (pvoid_type_node
,
211 gfc_conv_descriptor_data_get (outer
)),
213 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
214 void_type_node
, cond
, then_b
, else_b
));
216 return gfc_finish_block (&block
);
219 /* Build and return code for a copy constructor from SRC to DEST. */
222 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
224 tree type
= TREE_TYPE (dest
), ptr
, size
, esize
, rank
, call
;
225 tree cond
, then_b
, else_b
;
226 stmtblock_t block
, cond_block
;
228 if (! GFC_DESCRIPTOR_TYPE_P (type
)
229 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
230 return build2_v (MODIFY_EXPR
, dest
, src
);
232 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
);
234 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
235 and copied from SRC. */
236 gfc_start_block (&block
);
238 gfc_init_block (&cond_block
);
240 gfc_add_modify (&cond_block
, dest
, src
);
241 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
242 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
243 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
244 size
, gfc_conv_descriptor_lbound_get (dest
, rank
));
245 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
246 size
, gfc_index_one_node
);
247 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
248 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
249 size
, gfc_conv_descriptor_stride_get (dest
, rank
));
250 esize
= fold_convert (gfc_array_index_type
,
251 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
252 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
254 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &cond_block
);
256 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
257 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
258 gfc_conv_descriptor_data_set (&cond_block
, dest
, ptr
);
260 call
= build_call_expr_loc (input_location
,
261 builtin_decl_explicit (BUILT_IN_MEMCPY
),
263 fold_convert (pvoid_type_node
,
264 gfc_conv_descriptor_data_get (src
)),
266 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
267 then_b
= gfc_finish_block (&cond_block
);
269 gfc_init_block (&cond_block
);
270 gfc_conv_descriptor_data_set (&cond_block
, dest
, null_pointer_node
);
271 else_b
= gfc_finish_block (&cond_block
);
273 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
274 fold_convert (pvoid_type_node
,
275 gfc_conv_descriptor_data_get (src
)),
277 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
278 void_type_node
, cond
, then_b
, else_b
));
280 return gfc_finish_block (&block
);
283 /* Similarly, except use an assignment operator instead. */
286 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED
, tree dest
, tree src
)
288 tree type
= TREE_TYPE (dest
), rank
, size
, esize
, call
;
291 if (! GFC_DESCRIPTOR_TYPE_P (type
)
292 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
293 return build2_v (MODIFY_EXPR
, dest
, src
);
295 /* Handle copying allocatable arrays. */
296 gfc_start_block (&block
);
298 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
299 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
300 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
301 size
, gfc_conv_descriptor_lbound_get (dest
, rank
));
302 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
303 size
, gfc_index_one_node
);
304 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
305 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
306 size
, gfc_conv_descriptor_stride_get (dest
, rank
));
307 esize
= fold_convert (gfc_array_index_type
,
308 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
309 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
311 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
312 call
= build_call_expr_loc (input_location
,
313 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3,
314 fold_convert (pvoid_type_node
,
315 gfc_conv_descriptor_data_get (dest
)),
316 fold_convert (pvoid_type_node
,
317 gfc_conv_descriptor_data_get (src
)),
319 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
321 return gfc_finish_block (&block
);
324 /* Build and return code destructing DECL. Return NULL if nothing
328 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED
, tree decl
)
330 tree type
= TREE_TYPE (decl
);
332 if (! GFC_DESCRIPTOR_TYPE_P (type
)
333 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
336 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
337 to be deallocated if they were allocated. */
338 return gfc_trans_dealloc_allocated (decl
, false);
342 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
343 disregarded in OpenMP construct, because it is going to be
344 remapped during OpenMP lowering. SHARED is true if DECL
345 is going to be shared, false if it is going to be privatized. */
348 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
350 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
351 && DECL_HAS_VALUE_EXPR_P (decl
))
353 tree value
= DECL_VALUE_EXPR (decl
);
355 if (TREE_CODE (value
) == COMPONENT_REF
356 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
357 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
359 /* If variable in COMMON or EQUIVALENCE is privatized, return
360 true, as just that variable is supposed to be privatized,
361 not the whole COMMON or whole EQUIVALENCE.
362 For shared variables in COMMON or EQUIVALENCE, let them be
363 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
364 from the same COMMON or EQUIVALENCE just one sharing of the
365 whole COMMON or EQUIVALENCE is enough. */
370 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
376 /* Return true if DECL that is shared iff SHARED is true should
377 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
381 gfc_omp_private_debug_clause (tree decl
, bool shared
)
383 if (GFC_DECL_CRAY_POINTEE (decl
))
386 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
387 && DECL_HAS_VALUE_EXPR_P (decl
))
389 tree value
= DECL_VALUE_EXPR (decl
);
391 if (TREE_CODE (value
) == COMPONENT_REF
392 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
393 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
400 /* Register language specific type size variables as potentially OpenMP
401 firstprivate variables. */
404 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
406 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
410 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
411 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
413 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
414 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
415 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
417 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
418 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
424 gfc_trans_add_clause (tree node
, tree tail
)
426 OMP_CLAUSE_CHAIN (node
) = tail
;
431 gfc_trans_omp_variable (gfc_symbol
*sym
)
433 tree t
= gfc_get_symbol_decl (sym
);
437 bool alternate_entry
;
440 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
441 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
442 && sym
->result
== sym
;
443 entry_master
= sym
->attr
.result
444 && sym
->ns
->proc_name
->attr
.entry_master
445 && !gfc_return_by_reference (sym
->ns
->proc_name
);
446 parent_decl
= DECL_CONTEXT (current_function_decl
);
448 if ((t
== parent_decl
&& return_value
)
449 || (sym
->ns
&& sym
->ns
->proc_name
450 && sym
->ns
->proc_name
->backend_decl
== parent_decl
451 && (alternate_entry
|| entry_master
)))
456 /* Special case for assigning the return value of a function.
457 Self recursive functions must have an explicit return value. */
458 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
459 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
461 /* Similarly for alternate entry points. */
462 else if (alternate_entry
463 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
466 gfc_entry_list
*el
= NULL
;
468 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
471 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
476 else if (entry_master
477 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
479 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
485 gfc_trans_omp_variable_list (enum omp_clause_code code
, gfc_namelist
*namelist
,
488 for (; namelist
!= NULL
; namelist
= namelist
->next
)
489 if (namelist
->sym
->attr
.referenced
)
491 tree t
= gfc_trans_omp_variable (namelist
->sym
);
492 if (t
!= error_mark_node
)
494 tree node
= build_omp_clause (input_location
, code
);
495 OMP_CLAUSE_DECL (node
) = t
;
496 list
= gfc_trans_add_clause (node
, list
);
503 gfc_trans_omp_array_reduction (tree c
, gfc_symbol
*sym
, locus where
)
505 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
506 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
507 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
508 gfc_expr
*e1
, *e2
, *e3
, *e4
;
510 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
511 locus old_loc
= gfc_current_locus
;
515 decl
= OMP_CLAUSE_DECL (c
);
516 gfc_current_locus
= where
;
517 type
= TREE_TYPE (decl
);
518 outer_decl
= create_tmp_var_raw (type
, NULL
);
519 if (TREE_CODE (decl
) == PARM_DECL
520 && TREE_CODE (type
) == REFERENCE_TYPE
521 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
522 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
524 decl
= build_fold_indirect_ref (decl
);
525 type
= TREE_TYPE (type
);
528 /* Create a fake symbol for init value. */
529 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
530 init_val_sym
.ns
= sym
->ns
;
531 init_val_sym
.name
= sym
->name
;
532 init_val_sym
.ts
= sym
->ts
;
533 init_val_sym
.attr
.referenced
= 1;
534 init_val_sym
.declared_at
= where
;
535 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
536 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
537 init_val_sym
.backend_decl
= backend_decl
;
539 /* Create a fake symbol for the outer array reference. */
541 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
542 outer_sym
.attr
.dummy
= 0;
543 outer_sym
.attr
.result
= 0;
544 outer_sym
.attr
.flavor
= FL_VARIABLE
;
545 outer_sym
.backend_decl
= outer_decl
;
546 if (decl
!= OMP_CLAUSE_DECL (c
))
547 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
549 /* Create fake symtrees for it. */
550 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
551 symtree1
->n
.sym
= sym
;
552 gcc_assert (symtree1
== root1
);
554 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
555 symtree2
->n
.sym
= &init_val_sym
;
556 gcc_assert (symtree2
== root2
);
558 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
559 symtree3
->n
.sym
= &outer_sym
;
560 gcc_assert (symtree3
== root3
);
562 /* Create expressions. */
563 e1
= gfc_get_expr ();
564 e1
->expr_type
= EXPR_VARIABLE
;
566 e1
->symtree
= symtree1
;
568 e1
->ref
= ref
= gfc_get_ref ();
569 ref
->type
= REF_ARRAY
;
570 ref
->u
.ar
.where
= where
;
571 ref
->u
.ar
.as
= sym
->as
;
572 ref
->u
.ar
.type
= AR_FULL
;
574 t
= gfc_resolve_expr (e1
);
575 gcc_assert (t
== SUCCESS
);
577 e2
= gfc_get_expr ();
578 e2
->expr_type
= EXPR_VARIABLE
;
580 e2
->symtree
= symtree2
;
582 t
= gfc_resolve_expr (e2
);
583 gcc_assert (t
== SUCCESS
);
585 e3
= gfc_copy_expr (e1
);
586 e3
->symtree
= symtree3
;
587 t
= gfc_resolve_expr (e3
);
588 gcc_assert (t
== SUCCESS
);
591 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
595 e4
= gfc_add (e3
, e1
);
598 e4
= gfc_multiply (e3
, e1
);
600 case TRUTH_ANDIF_EXPR
:
601 e4
= gfc_and (e3
, e1
);
603 case TRUTH_ORIF_EXPR
:
604 e4
= gfc_or (e3
, e1
);
607 e4
= gfc_eqv (e3
, e1
);
610 e4
= gfc_neqv (e3
, e1
);
632 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
633 intrinsic_sym
.ns
= sym
->ns
;
634 intrinsic_sym
.name
= iname
;
635 intrinsic_sym
.ts
= sym
->ts
;
636 intrinsic_sym
.attr
.referenced
= 1;
637 intrinsic_sym
.attr
.intrinsic
= 1;
638 intrinsic_sym
.attr
.function
= 1;
639 intrinsic_sym
.result
= &intrinsic_sym
;
640 intrinsic_sym
.declared_at
= where
;
642 symtree4
= gfc_new_symtree (&root4
, iname
);
643 symtree4
->n
.sym
= &intrinsic_sym
;
644 gcc_assert (symtree4
== root4
);
646 e4
= gfc_get_expr ();
647 e4
->expr_type
= EXPR_FUNCTION
;
649 e4
->symtree
= symtree4
;
650 e4
->value
.function
.isym
= gfc_find_function (iname
);
651 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
652 e4
->value
.function
.actual
->expr
= e3
;
653 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
654 e4
->value
.function
.actual
->next
->expr
= e1
;
656 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
657 e1
= gfc_copy_expr (e1
);
658 e3
= gfc_copy_expr (e3
);
659 t
= gfc_resolve_expr (e4
);
660 gcc_assert (t
== SUCCESS
);
662 /* Create the init statement list. */
664 if (GFC_DESCRIPTOR_TYPE_P (type
)
665 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
667 /* If decl is an allocatable array, it needs to be allocated
668 with the same bounds as the outer var. */
669 tree rank
, size
, esize
, ptr
;
672 gfc_start_block (&block
);
674 gfc_add_modify (&block
, decl
, outer_sym
.backend_decl
);
675 rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
676 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
677 size
= fold_build2_loc (input_location
, MINUS_EXPR
,
678 gfc_array_index_type
, size
,
679 gfc_conv_descriptor_lbound_get (decl
, rank
));
680 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
681 size
, gfc_index_one_node
);
682 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
683 size
= fold_build2_loc (input_location
, MULT_EXPR
,
684 gfc_array_index_type
, size
,
685 gfc_conv_descriptor_stride_get (decl
, rank
));
686 esize
= fold_convert (gfc_array_index_type
,
687 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
688 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
690 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
), &block
);
692 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
693 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
694 gfc_conv_descriptor_data_set (&block
, decl
, ptr
);
696 gfc_add_expr_to_block (&block
, gfc_trans_assignment (e1
, e2
, false,
698 stmt
= gfc_finish_block (&block
);
701 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
702 if (TREE_CODE (stmt
) != BIND_EXPR
)
703 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
706 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
708 /* Create the merge statement list. */
710 if (GFC_DESCRIPTOR_TYPE_P (type
)
711 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
713 /* If decl is an allocatable array, it needs to be deallocated
717 gfc_start_block (&block
);
718 gfc_add_expr_to_block (&block
, gfc_trans_assignment (e3
, e4
, false,
720 gfc_add_expr_to_block (&block
, gfc_trans_dealloc_allocated (decl
, false));
721 stmt
= gfc_finish_block (&block
);
724 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
725 if (TREE_CODE (stmt
) != BIND_EXPR
)
726 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
729 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
731 /* And stick the placeholder VAR_DECL into the clause as well. */
732 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
734 gfc_current_locus
= old_loc
;
744 gfc_free_array_spec (outer_sym
.as
);
748 gfc_trans_omp_reduction_list (gfc_namelist
*namelist
, tree list
,
749 enum tree_code reduction_code
, locus where
)
751 for (; namelist
!= NULL
; namelist
= namelist
->next
)
752 if (namelist
->sym
->attr
.referenced
)
754 tree t
= gfc_trans_omp_variable (namelist
->sym
);
755 if (t
!= error_mark_node
)
757 tree node
= build_omp_clause (where
.lb
->location
,
758 OMP_CLAUSE_REDUCTION
);
759 OMP_CLAUSE_DECL (node
) = t
;
760 OMP_CLAUSE_REDUCTION_CODE (node
) = reduction_code
;
761 if (namelist
->sym
->attr
.dimension
)
762 gfc_trans_omp_array_reduction (node
, namelist
->sym
, where
);
763 list
= gfc_trans_add_clause (node
, list
);
770 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
773 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
775 enum omp_clause_code clause_code
;
781 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
783 gfc_namelist
*n
= clauses
->lists
[list
];
787 if (list
>= OMP_LIST_REDUCTION_FIRST
788 && list
<= OMP_LIST_REDUCTION_LAST
)
790 enum tree_code reduction_code
;
794 reduction_code
= PLUS_EXPR
;
797 reduction_code
= MULT_EXPR
;
800 reduction_code
= MINUS_EXPR
;
803 reduction_code
= TRUTH_ANDIF_EXPR
;
806 reduction_code
= TRUTH_ORIF_EXPR
;
809 reduction_code
= EQ_EXPR
;
812 reduction_code
= NE_EXPR
;
815 reduction_code
= MAX_EXPR
;
818 reduction_code
= MIN_EXPR
;
821 reduction_code
= BIT_AND_EXPR
;
824 reduction_code
= BIT_IOR_EXPR
;
827 reduction_code
= BIT_XOR_EXPR
;
833 = gfc_trans_omp_reduction_list (n
, omp_clauses
, reduction_code
,
839 case OMP_LIST_PRIVATE
:
840 clause_code
= OMP_CLAUSE_PRIVATE
;
842 case OMP_LIST_SHARED
:
843 clause_code
= OMP_CLAUSE_SHARED
;
845 case OMP_LIST_FIRSTPRIVATE
:
846 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
848 case OMP_LIST_LASTPRIVATE
:
849 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
851 case OMP_LIST_COPYIN
:
852 clause_code
= OMP_CLAUSE_COPYIN
;
854 case OMP_LIST_COPYPRIVATE
:
855 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
859 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
);
866 if (clauses
->if_expr
)
870 gfc_init_se (&se
, NULL
);
871 gfc_conv_expr (&se
, clauses
->if_expr
);
872 gfc_add_block_to_block (block
, &se
.pre
);
873 if_var
= gfc_evaluate_now (se
.expr
, block
);
874 gfc_add_block_to_block (block
, &se
.post
);
876 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
877 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
878 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
881 if (clauses
->final_expr
)
885 gfc_init_se (&se
, NULL
);
886 gfc_conv_expr (&se
, clauses
->final_expr
);
887 gfc_add_block_to_block (block
, &se
.pre
);
888 final_var
= gfc_evaluate_now (se
.expr
, block
);
889 gfc_add_block_to_block (block
, &se
.post
);
891 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
892 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
893 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
896 if (clauses
->num_threads
)
900 gfc_init_se (&se
, NULL
);
901 gfc_conv_expr (&se
, clauses
->num_threads
);
902 gfc_add_block_to_block (block
, &se
.pre
);
903 num_threads
= gfc_evaluate_now (se
.expr
, block
);
904 gfc_add_block_to_block (block
, &se
.post
);
906 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
907 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
908 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
911 chunk_size
= NULL_TREE
;
912 if (clauses
->chunk_size
)
914 gfc_init_se (&se
, NULL
);
915 gfc_conv_expr (&se
, clauses
->chunk_size
);
916 gfc_add_block_to_block (block
, &se
.pre
);
917 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
918 gfc_add_block_to_block (block
, &se
.post
);
921 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
923 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
924 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
925 switch (clauses
->sched_kind
)
927 case OMP_SCHED_STATIC
:
928 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
930 case OMP_SCHED_DYNAMIC
:
931 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
933 case OMP_SCHED_GUIDED
:
934 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
936 case OMP_SCHED_RUNTIME
:
937 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
940 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
945 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
948 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
950 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
951 switch (clauses
->default_sharing
)
953 case OMP_DEFAULT_NONE
:
954 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
956 case OMP_DEFAULT_SHARED
:
957 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
959 case OMP_DEFAULT_PRIVATE
:
960 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
962 case OMP_DEFAULT_FIRSTPRIVATE
:
963 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
968 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
973 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
974 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
977 if (clauses
->ordered
)
979 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
980 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
985 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
986 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
989 if (clauses
->mergeable
)
991 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
992 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
995 if (clauses
->collapse
)
997 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
998 OMP_CLAUSE_COLLAPSE_EXPR (c
)
999 = build_int_cst (integer_type_node
, clauses
->collapse
);
1000 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
1006 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
1009 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
1014 stmt
= gfc_trans_code (code
);
1015 if (TREE_CODE (stmt
) != BIND_EXPR
)
1017 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
1019 tree block
= poplevel (1, 0);
1020 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
1031 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
1032 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
1035 gfc_trans_omp_atomic (gfc_code
*code
)
1037 gfc_code
*atomic_code
= code
;
1041 gfc_expr
*expr2
, *e
;
1044 tree lhsaddr
, type
, rhs
, x
;
1045 enum tree_code op
= ERROR_MARK
;
1046 enum tree_code aop
= OMP_ATOMIC
;
1047 bool var_on_left
= false;
1049 code
= code
->block
->next
;
1050 gcc_assert (code
->op
== EXEC_ASSIGN
);
1051 var
= code
->expr1
->symtree
->n
.sym
;
1053 gfc_init_se (&lse
, NULL
);
1054 gfc_init_se (&rse
, NULL
);
1055 gfc_init_se (&vse
, NULL
);
1056 gfc_start_block (&block
);
1058 expr2
= code
->expr2
;
1059 if (expr2
->expr_type
== EXPR_FUNCTION
1060 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1061 expr2
= expr2
->value
.function
.actual
->expr
;
1063 switch (atomic_code
->ext
.omp_atomic
)
1065 case GFC_OMP_ATOMIC_READ
:
1066 gfc_conv_expr (&vse
, code
->expr1
);
1067 gfc_add_block_to_block (&block
, &vse
.pre
);
1069 gfc_conv_expr (&lse
, expr2
);
1070 gfc_add_block_to_block (&block
, &lse
.pre
);
1071 type
= TREE_TYPE (lse
.expr
);
1072 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
1074 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
1075 x
= convert (TREE_TYPE (vse
.expr
), x
);
1076 gfc_add_modify (&block
, vse
.expr
, x
);
1078 gfc_add_block_to_block (&block
, &lse
.pre
);
1079 gfc_add_block_to_block (&block
, &rse
.pre
);
1081 return gfc_finish_block (&block
);
1082 case GFC_OMP_ATOMIC_CAPTURE
:
1083 aop
= OMP_ATOMIC_CAPTURE_NEW
;
1084 if (expr2
->expr_type
== EXPR_VARIABLE
)
1086 aop
= OMP_ATOMIC_CAPTURE_OLD
;
1087 gfc_conv_expr (&vse
, code
->expr1
);
1088 gfc_add_block_to_block (&block
, &vse
.pre
);
1090 gfc_conv_expr (&lse
, expr2
);
1091 gfc_add_block_to_block (&block
, &lse
.pre
);
1092 gfc_init_se (&lse
, NULL
);
1094 var
= code
->expr1
->symtree
->n
.sym
;
1095 expr2
= code
->expr2
;
1096 if (expr2
->expr_type
== EXPR_FUNCTION
1097 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1098 expr2
= expr2
->value
.function
.actual
->expr
;
1105 gfc_conv_expr (&lse
, code
->expr1
);
1106 gfc_add_block_to_block (&block
, &lse
.pre
);
1107 type
= TREE_TYPE (lse
.expr
);
1108 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
1110 if (atomic_code
->ext
.omp_atomic
== GFC_OMP_ATOMIC_WRITE
)
1112 gfc_conv_expr (&rse
, expr2
);
1113 gfc_add_block_to_block (&block
, &rse
.pre
);
1115 else if (expr2
->expr_type
== EXPR_OP
)
1118 switch (expr2
->value
.op
.op
)
1120 case INTRINSIC_PLUS
:
1123 case INTRINSIC_TIMES
:
1126 case INTRINSIC_MINUS
:
1129 case INTRINSIC_DIVIDE
:
1130 if (expr2
->ts
.type
== BT_INTEGER
)
1131 op
= TRUNC_DIV_EXPR
;
1136 op
= TRUTH_ANDIF_EXPR
;
1139 op
= TRUTH_ORIF_EXPR
;
1144 case INTRINSIC_NEQV
:
1150 e
= expr2
->value
.op
.op1
;
1151 if (e
->expr_type
== EXPR_FUNCTION
1152 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1153 e
= e
->value
.function
.actual
->expr
;
1154 if (e
->expr_type
== EXPR_VARIABLE
1155 && e
->symtree
!= NULL
1156 && e
->symtree
->n
.sym
== var
)
1158 expr2
= expr2
->value
.op
.op2
;
1163 e
= expr2
->value
.op
.op2
;
1164 if (e
->expr_type
== EXPR_FUNCTION
1165 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1166 e
= e
->value
.function
.actual
->expr
;
1167 gcc_assert (e
->expr_type
== EXPR_VARIABLE
1168 && e
->symtree
!= NULL
1169 && e
->symtree
->n
.sym
== var
);
1170 expr2
= expr2
->value
.op
.op1
;
1171 var_on_left
= false;
1173 gfc_conv_expr (&rse
, expr2
);
1174 gfc_add_block_to_block (&block
, &rse
.pre
);
1178 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
1179 switch (expr2
->value
.function
.isym
->id
)
1199 e
= expr2
->value
.function
.actual
->expr
;
1200 gcc_assert (e
->expr_type
== EXPR_VARIABLE
1201 && e
->symtree
!= NULL
1202 && e
->symtree
->n
.sym
== var
);
1204 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
1205 gfc_add_block_to_block (&block
, &rse
.pre
);
1206 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
1208 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
1209 gfc_actual_arglist
*arg
;
1211 gfc_add_modify (&block
, accum
, rse
.expr
);
1212 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
1215 gfc_init_block (&rse
.pre
);
1216 gfc_conv_expr (&rse
, arg
->expr
);
1217 gfc_add_block_to_block (&block
, &rse
.pre
);
1218 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
1220 gfc_add_modify (&block
, accum
, x
);
1226 expr2
= expr2
->value
.function
.actual
->next
->expr
;
1229 lhsaddr
= save_expr (lhsaddr
);
1230 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
1232 if (atomic_code
->ext
.omp_atomic
== GFC_OMP_ATOMIC_WRITE
)
1236 x
= convert (TREE_TYPE (rhs
),
1237 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
1239 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
1241 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
1244 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
1245 && TREE_CODE (type
) != COMPLEX_TYPE
)
1246 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
1247 TREE_TYPE (TREE_TYPE (rhs
)), x
);
1249 gfc_add_block_to_block (&block
, &lse
.pre
);
1250 gfc_add_block_to_block (&block
, &rse
.pre
);
1252 if (aop
== OMP_ATOMIC
)
1254 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
1255 gfc_add_expr_to_block (&block
, x
);
1259 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
1262 expr2
= code
->expr2
;
1263 if (expr2
->expr_type
== EXPR_FUNCTION
1264 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
1265 expr2
= expr2
->value
.function
.actual
->expr
;
1267 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
1268 gfc_conv_expr (&vse
, code
->expr1
);
1269 gfc_add_block_to_block (&block
, &vse
.pre
);
1271 gfc_init_se (&lse
, NULL
);
1272 gfc_conv_expr (&lse
, expr2
);
1273 gfc_add_block_to_block (&block
, &lse
.pre
);
1275 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
1276 x
= convert (TREE_TYPE (vse
.expr
), x
);
1277 gfc_add_modify (&block
, vse
.expr
, x
);
1280 return gfc_finish_block (&block
);
1284 gfc_trans_omp_barrier (void)
1286 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
1287 return build_call_expr_loc (input_location
, decl
, 0);
1291 gfc_trans_omp_critical (gfc_code
*code
)
1293 tree name
= NULL_TREE
, stmt
;
1294 if (code
->ext
.omp_name
!= NULL
)
1295 name
= get_identifier (code
->ext
.omp_name
);
1296 stmt
= gfc_trans_code (code
->block
->next
);
1297 return build2_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
, name
);
1300 typedef struct dovar_init_d
{
1307 gfc_trans_omp_do (gfc_code
*code
, stmtblock_t
*pblock
,
1308 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
1311 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
1312 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
1315 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
1316 int i
, collapse
= clauses
->collapse
;
1317 vec
<dovar_init
> inits
= vNULL
;
1324 code
= code
->block
->next
;
1325 gcc_assert (code
->op
== EXEC_DO
);
1327 init
= make_tree_vec (collapse
);
1328 cond
= make_tree_vec (collapse
);
1329 incr
= make_tree_vec (collapse
);
1333 gfc_start_block (&block
);
1337 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
1339 for (i
= 0; i
< collapse
; i
++)
1342 int dovar_found
= 0;
1348 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
]; n
!= NULL
;
1350 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
1355 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
1356 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
1362 /* Evaluate all the expressions in the iterator. */
1363 gfc_init_se (&se
, NULL
);
1364 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1365 gfc_add_block_to_block (pblock
, &se
.pre
);
1367 type
= TREE_TYPE (dovar
);
1368 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
1370 gfc_init_se (&se
, NULL
);
1371 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1372 gfc_add_block_to_block (pblock
, &se
.pre
);
1373 from
= gfc_evaluate_now (se
.expr
, pblock
);
1375 gfc_init_se (&se
, NULL
);
1376 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1377 gfc_add_block_to_block (pblock
, &se
.pre
);
1378 to
= gfc_evaluate_now (se
.expr
, pblock
);
1380 gfc_init_se (&se
, NULL
);
1381 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1382 gfc_add_block_to_block (pblock
, &se
.pre
);
1383 step
= gfc_evaluate_now (se
.expr
, pblock
);
1386 /* Special case simple loops. */
1387 if (TREE_CODE (dovar
) == VAR_DECL
)
1389 if (integer_onep (step
))
1391 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
1396 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
);
1401 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
1402 /* The condition should not be folded. */
1403 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
1404 ? LE_EXPR
: GE_EXPR
,
1405 boolean_type_node
, dovar
, to
);
1406 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
1408 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
1411 TREE_VEC_ELT (incr
, i
));
1415 /* STEP is not 1 or -1. Use:
1416 for (count = 0; count < (to + step - from) / step; count++)
1418 dovar = from + count * step;
1422 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
1423 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
1424 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
1426 tmp
= gfc_evaluate_now (tmp
, pblock
);
1427 count
= gfc_create_var (type
, "count");
1428 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
1429 build_int_cst (type
, 0));
1430 /* The condition should not be folded. */
1431 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
1434 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
1436 build_int_cst (type
, 1));
1437 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
1438 MODIFY_EXPR
, type
, count
,
1439 TREE_VEC_ELT (incr
, i
));
1441 /* Initialize DOVAR. */
1442 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
1443 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
1444 dovar_init e
= {dovar
, tmp
};
1445 inits
.safe_push (e
);
1450 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
1451 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
1452 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
1454 else if (dovar_found
== 2)
1461 /* If dovar is lastprivate, but different counter is used,
1462 dovar += step needs to be added to
1463 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1464 will have the value on entry of the last loop, rather
1465 than value after iterator increment. */
1466 tmp
= gfc_evaluate_now (step
, pblock
);
1467 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
1469 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
1471 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1472 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1473 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
1475 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
1479 if (c
== NULL
&& par_clauses
!= NULL
)
1481 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1482 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1483 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
1485 tree l
= build_omp_clause (input_location
,
1486 OMP_CLAUSE_LASTPRIVATE
);
1487 OMP_CLAUSE_DECL (l
) = dovar_decl
;
1488 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
1489 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
1491 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
1495 gcc_assert (simple
|| c
!= NULL
);
1499 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
1500 OMP_CLAUSE_DECL (tmp
) = count
;
1501 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
1504 if (i
+ 1 < collapse
)
1505 code
= code
->block
->next
;
1508 if (pblock
!= &block
)
1511 gfc_start_block (&block
);
1514 gfc_start_block (&body
);
1516 FOR_EACH_VEC_ELT (inits
, ix
, di
)
1517 gfc_add_modify (&body
, di
->var
, di
->init
);
1520 /* Cycle statement is implemented with a goto. Exit statement must not be
1521 present for this loop. */
1522 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1524 /* Put these labels where they can be found later. */
1526 code
->cycle_label
= cycle_label
;
1527 code
->exit_label
= NULL_TREE
;
1529 /* Main loop body. */
1530 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
1531 gfc_add_expr_to_block (&body
, tmp
);
1533 /* Label for cycle statements (if needed). */
1534 if (TREE_USED (cycle_label
))
1536 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1537 gfc_add_expr_to_block (&body
, tmp
);
1540 /* End of loop body. */
1541 stmt
= make_node (OMP_FOR
);
1543 TREE_TYPE (stmt
) = void_type_node
;
1544 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
1545 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
1546 OMP_FOR_INIT (stmt
) = init
;
1547 OMP_FOR_COND (stmt
) = cond
;
1548 OMP_FOR_INCR (stmt
) = incr
;
1549 gfc_add_expr_to_block (&block
, stmt
);
1551 return gfc_finish_block (&block
);
1555 gfc_trans_omp_flush (void)
1557 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
1558 return build_call_expr_loc (input_location
, decl
, 0);
1562 gfc_trans_omp_master (gfc_code
*code
)
1564 tree stmt
= gfc_trans_code (code
->block
->next
);
1565 if (IS_EMPTY_STMT (stmt
))
1567 return build1_v (OMP_MASTER
, stmt
);
1571 gfc_trans_omp_ordered (gfc_code
*code
)
1573 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
1577 gfc_trans_omp_parallel (gfc_code
*code
)
1580 tree stmt
, omp_clauses
;
1582 gfc_start_block (&block
);
1583 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1585 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1586 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1588 gfc_add_expr_to_block (&block
, stmt
);
1589 return gfc_finish_block (&block
);
1593 gfc_trans_omp_parallel_do (gfc_code
*code
)
1595 stmtblock_t block
, *pblock
= NULL
;
1596 gfc_omp_clauses parallel_clauses
, do_clauses
;
1597 tree stmt
, omp_clauses
= NULL_TREE
;
1599 gfc_start_block (&block
);
1601 memset (&do_clauses
, 0, sizeof (do_clauses
));
1602 if (code
->ext
.omp_clauses
!= NULL
)
1604 memcpy (¶llel_clauses
, code
->ext
.omp_clauses
,
1605 sizeof (parallel_clauses
));
1606 do_clauses
.sched_kind
= parallel_clauses
.sched_kind
;
1607 do_clauses
.chunk_size
= parallel_clauses
.chunk_size
;
1608 do_clauses
.ordered
= parallel_clauses
.ordered
;
1609 do_clauses
.collapse
= parallel_clauses
.collapse
;
1610 parallel_clauses
.sched_kind
= OMP_SCHED_NONE
;
1611 parallel_clauses
.chunk_size
= NULL
;
1612 parallel_clauses
.ordered
= false;
1613 parallel_clauses
.collapse
= 0;
1614 omp_clauses
= gfc_trans_omp_clauses (&block
, ¶llel_clauses
,
1617 do_clauses
.nowait
= true;
1618 if (!do_clauses
.ordered
&& do_clauses
.sched_kind
!= OMP_SCHED_STATIC
)
1622 stmt
= gfc_trans_omp_do (code
, pblock
, &do_clauses
, omp_clauses
);
1623 if (TREE_CODE (stmt
) != BIND_EXPR
)
1624 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1627 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1629 OMP_PARALLEL_COMBINED (stmt
) = 1;
1630 gfc_add_expr_to_block (&block
, stmt
);
1631 return gfc_finish_block (&block
);
1635 gfc_trans_omp_parallel_sections (gfc_code
*code
)
1638 gfc_omp_clauses section_clauses
;
1639 tree stmt
, omp_clauses
;
1641 memset (§ion_clauses
, 0, sizeof (section_clauses
));
1642 section_clauses
.nowait
= true;
1644 gfc_start_block (&block
);
1645 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1648 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
1649 if (TREE_CODE (stmt
) != BIND_EXPR
)
1650 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1653 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1655 OMP_PARALLEL_COMBINED (stmt
) = 1;
1656 gfc_add_expr_to_block (&block
, stmt
);
1657 return gfc_finish_block (&block
);
1661 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
1664 gfc_omp_clauses workshare_clauses
;
1665 tree stmt
, omp_clauses
;
1667 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
1668 workshare_clauses
.nowait
= true;
1670 gfc_start_block (&block
);
1671 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1674 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
1675 if (TREE_CODE (stmt
) != BIND_EXPR
)
1676 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1679 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
1681 OMP_PARALLEL_COMBINED (stmt
) = 1;
1682 gfc_add_expr_to_block (&block
, stmt
);
1683 return gfc_finish_block (&block
);
1687 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1689 stmtblock_t block
, body
;
1690 tree omp_clauses
, stmt
;
1691 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
1693 gfc_start_block (&block
);
1695 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
1697 gfc_init_block (&body
);
1698 for (code
= code
->block
; code
; code
= code
->block
)
1700 /* Last section is special because of lastprivate, so even if it
1701 is empty, chain it in. */
1702 stmt
= gfc_trans_omp_code (code
->next
,
1703 has_lastprivate
&& code
->block
== NULL
);
1704 if (! IS_EMPTY_STMT (stmt
))
1706 stmt
= build1_v (OMP_SECTION
, stmt
);
1707 gfc_add_expr_to_block (&body
, stmt
);
1710 stmt
= gfc_finish_block (&body
);
1712 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
1714 gfc_add_expr_to_block (&block
, stmt
);
1716 return gfc_finish_block (&block
);
1720 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1722 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
1723 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1724 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
1730 gfc_trans_omp_task (gfc_code
*code
)
1733 tree stmt
, omp_clauses
;
1735 gfc_start_block (&block
);
1736 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
1738 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
1739 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
1741 gfc_add_expr_to_block (&block
, stmt
);
1742 return gfc_finish_block (&block
);
1746 gfc_trans_omp_taskwait (void)
1748 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
1749 return build_call_expr_loc (input_location
, decl
, 0);
1753 gfc_trans_omp_taskyield (void)
1755 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
1756 return build_call_expr_loc (input_location
, decl
, 0);
1760 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
1762 tree res
, tmp
, stmt
;
1763 stmtblock_t block
, *pblock
= NULL
;
1764 stmtblock_t singleblock
;
1765 int saved_ompws_flags
;
1766 bool singleblock_in_progress
= false;
1767 /* True if previous gfc_code in workshare construct is not workshared. */
1768 bool prev_singleunit
;
1770 code
= code
->block
->next
;
1774 gfc_start_block (&block
);
1777 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
1778 prev_singleunit
= false;
1780 /* Translate statements one by one to trees until we reach
1781 the end of the workshare construct. Adjacent gfc_codes that
1782 are a single unit of work are clustered and encapsulated in a
1783 single OMP_SINGLE construct. */
1784 for (; code
; code
= code
->next
)
1786 if (code
->here
!= 0)
1788 res
= gfc_trans_label_here (code
);
1789 gfc_add_expr_to_block (pblock
, res
);
1792 /* No dependence analysis, use for clauses with wait.
1793 If this is the last gfc_code, use default omp_clauses. */
1794 if (code
->next
== NULL
&& clauses
->nowait
)
1795 ompws_flags
|= OMPWS_NOWAIT
;
1797 /* By default, every gfc_code is a single unit of work. */
1798 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
1799 ompws_flags
&= ~OMPWS_SCALARIZER_WS
;
1808 res
= gfc_trans_assign (code
);
1811 case EXEC_POINTER_ASSIGN
:
1812 res
= gfc_trans_pointer_assign (code
);
1815 case EXEC_INIT_ASSIGN
:
1816 res
= gfc_trans_init_assign (code
);
1820 res
= gfc_trans_forall (code
);
1824 res
= gfc_trans_where (code
);
1827 case EXEC_OMP_ATOMIC
:
1828 res
= gfc_trans_omp_directive (code
);
1831 case EXEC_OMP_PARALLEL
:
1832 case EXEC_OMP_PARALLEL_DO
:
1833 case EXEC_OMP_PARALLEL_SECTIONS
:
1834 case EXEC_OMP_PARALLEL_WORKSHARE
:
1835 case EXEC_OMP_CRITICAL
:
1836 saved_ompws_flags
= ompws_flags
;
1838 res
= gfc_trans_omp_directive (code
);
1839 ompws_flags
= saved_ompws_flags
;
1843 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1846 gfc_set_backend_locus (&code
->loc
);
1848 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
1850 if (prev_singleunit
)
1852 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
1853 /* Add current gfc_code to single block. */
1854 gfc_add_expr_to_block (&singleblock
, res
);
1857 /* Finish single block and add it to pblock. */
1858 tmp
= gfc_finish_block (&singleblock
);
1859 tmp
= build2_loc (input_location
, OMP_SINGLE
,
1860 void_type_node
, tmp
, NULL_TREE
);
1861 gfc_add_expr_to_block (pblock
, tmp
);
1862 /* Add current gfc_code to pblock. */
1863 gfc_add_expr_to_block (pblock
, res
);
1864 singleblock_in_progress
= false;
1869 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
1871 /* Start single block. */
1872 gfc_init_block (&singleblock
);
1873 gfc_add_expr_to_block (&singleblock
, res
);
1874 singleblock_in_progress
= true;
1877 /* Add the new statement to the block. */
1878 gfc_add_expr_to_block (pblock
, res
);
1880 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
1884 /* Finish remaining SINGLE block, if we were in the middle of one. */
1885 if (singleblock_in_progress
)
1887 /* Finish single block and add it to pblock. */
1888 tmp
= gfc_finish_block (&singleblock
);
1889 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
1891 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
1893 gfc_add_expr_to_block (pblock
, tmp
);
1896 stmt
= gfc_finish_block (pblock
);
1897 if (TREE_CODE (stmt
) != BIND_EXPR
)
1899 if (!IS_EMPTY_STMT (stmt
))
1901 tree bindblock
= poplevel (1, 0);
1902 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
1910 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
1911 stmt
= gfc_trans_omp_barrier ();
1918 gfc_trans_omp_directive (gfc_code
*code
)
1922 case EXEC_OMP_ATOMIC
:
1923 return gfc_trans_omp_atomic (code
);
1924 case EXEC_OMP_BARRIER
:
1925 return gfc_trans_omp_barrier ();
1926 case EXEC_OMP_CRITICAL
:
1927 return gfc_trans_omp_critical (code
);
1929 return gfc_trans_omp_do (code
, NULL
, code
->ext
.omp_clauses
, NULL
);
1930 case EXEC_OMP_FLUSH
:
1931 return gfc_trans_omp_flush ();
1932 case EXEC_OMP_MASTER
:
1933 return gfc_trans_omp_master (code
);
1934 case EXEC_OMP_ORDERED
:
1935 return gfc_trans_omp_ordered (code
);
1936 case EXEC_OMP_PARALLEL
:
1937 return gfc_trans_omp_parallel (code
);
1938 case EXEC_OMP_PARALLEL_DO
:
1939 return gfc_trans_omp_parallel_do (code
);
1940 case EXEC_OMP_PARALLEL_SECTIONS
:
1941 return gfc_trans_omp_parallel_sections (code
);
1942 case EXEC_OMP_PARALLEL_WORKSHARE
:
1943 return gfc_trans_omp_parallel_workshare (code
);
1944 case EXEC_OMP_SECTIONS
:
1945 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
1946 case EXEC_OMP_SINGLE
:
1947 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
1949 return gfc_trans_omp_task (code
);
1950 case EXEC_OMP_TASKWAIT
:
1951 return gfc_trans_omp_taskwait ();
1952 case EXEC_OMP_TASKYIELD
:
1953 return gfc_trans_omp_taskyield ();
1954 case EXEC_OMP_WORKSHARE
:
1955 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);