PR rtl-optimization/43520
[official-gcc.git] / gcc / fortran / trans-openmp.c
blob016c5cff269bddd18fecf5fc242792b5476fd263
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ggc.h"
28 #include "toplev.h"
29 #include "real.h"
30 #include "gfortran.h"
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
38 int ompws_flags;
40 /* True if OpenMP should privatize what this DECL points to rather
41 than the DECL itself. */
43 bool
44 gfc_omp_privatize_by_reference (const_tree decl)
46 tree type = TREE_TYPE (decl);
48 if (TREE_CODE (type) == REFERENCE_TYPE
49 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
50 return true;
52 if (TREE_CODE (type) == POINTER_TYPE)
54 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
55 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
56 set are supposed to be privatized by reference. */
57 if (GFC_POINTER_TYPE_P (type))
58 return false;
60 if (!DECL_ARTIFICIAL (decl))
61 return true;
63 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 by the frontend. */
65 if (DECL_LANG_SPECIFIC (decl)
66 && GFC_DECL_SAVED_DESCRIPTOR (decl))
67 return true;
70 return false;
73 /* True if OpenMP sharing attribute of DECL is predetermined. */
75 enum omp_clause_default_kind
76 gfc_omp_predetermined_sharing (tree decl)
78 if (DECL_ARTIFICIAL (decl) && ! GFC_DECL_RESULT (decl))
79 return OMP_CLAUSE_DEFAULT_SHARED;
81 /* Cray pointees shouldn't be listed in any clauses and should be
82 gimplified to dereference of the corresponding Cray pointer.
83 Make them all private, so that they are emitted in the debug
84 information. */
85 if (GFC_DECL_CRAY_POINTEE (decl))
86 return OMP_CLAUSE_DEFAULT_PRIVATE;
88 /* Assumed-size arrays are predetermined to inherit sharing
89 attributes of the associated actual argument, which is shared
90 for all we care. */
91 if (TREE_CODE (decl) == PARM_DECL
92 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
93 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
94 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
95 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
96 == NULL)
97 return OMP_CLAUSE_DEFAULT_SHARED;
99 /* COMMON and EQUIVALENCE decls are shared. They
100 are only referenced through DECL_VALUE_EXPR of the variables
101 contained in them. If those are privatized, they will not be
102 gimplified to the COMMON or EQUIVALENCE decls. */
103 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
104 return OMP_CLAUSE_DEFAULT_SHARED;
106 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
107 return OMP_CLAUSE_DEFAULT_SHARED;
109 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
113 /* Return true if DECL in private clause needs
114 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
115 bool
116 gfc_omp_private_outer_ref (tree decl)
118 tree type = TREE_TYPE (decl);
120 if (GFC_DESCRIPTOR_TYPE_P (type)
121 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
122 return true;
124 return false;
127 /* Return code to initialize DECL with its default constructor, or
128 NULL if there's nothing to do. */
130 tree
131 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
133 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
134 stmtblock_t block, cond_block;
136 if (! GFC_DESCRIPTOR_TYPE_P (type)
137 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
138 return NULL;
140 gcc_assert (outer != NULL);
141 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
142 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
144 /* Allocatable arrays in PRIVATE clauses need to be set to
145 "not currently allocated" allocation status if outer
146 array is "not currently allocated", otherwise should be allocated. */
147 gfc_start_block (&block);
149 gfc_init_block (&cond_block);
151 gfc_add_modify (&cond_block, decl, outer);
152 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
153 size = gfc_conv_descriptor_ubound_get (decl, rank);
154 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
155 gfc_conv_descriptor_lbound_get (decl, rank));
156 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
157 gfc_index_one_node);
158 if (GFC_TYPE_ARRAY_RANK (type) > 1)
159 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
160 gfc_conv_descriptor_stride_get (decl, rank));
161 esize = fold_convert (gfc_array_index_type,
162 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
163 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
164 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
165 ptr = gfc_allocate_array_with_status (&cond_block,
166 build_int_cst (pvoid_type_node, 0),
167 size, NULL, NULL);
168 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
169 then_b = gfc_finish_block (&cond_block);
171 gfc_init_block (&cond_block);
172 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
173 else_b = gfc_finish_block (&cond_block);
175 cond = fold_build2 (NE_EXPR, boolean_type_node,
176 fold_convert (pvoid_type_node,
177 gfc_conv_descriptor_data_get (outer)),
178 null_pointer_node);
179 gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
180 cond, then_b, else_b));
182 return gfc_finish_block (&block);
185 /* Build and return code for a copy constructor from SRC to DEST. */
187 tree
188 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
190 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
191 stmtblock_t block;
193 if (! GFC_DESCRIPTOR_TYPE_P (type)
194 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
195 return build2_v (MODIFY_EXPR, dest, src);
197 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
199 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
200 and copied from SRC. */
201 gfc_start_block (&block);
203 gfc_add_modify (&block, dest, src);
204 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
205 size = gfc_conv_descriptor_ubound_get (dest, rank);
206 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
207 gfc_conv_descriptor_lbound_get (dest, rank));
208 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
209 gfc_index_one_node);
210 if (GFC_TYPE_ARRAY_RANK (type) > 1)
211 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
212 gfc_conv_descriptor_stride_get (dest, rank));
213 esize = fold_convert (gfc_array_index_type,
214 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
215 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
216 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
217 ptr = gfc_allocate_array_with_status (&block,
218 build_int_cst (pvoid_type_node, 0),
219 size, NULL, NULL);
220 gfc_conv_descriptor_data_set (&block, dest, ptr);
221 call = build_call_expr_loc (input_location,
222 built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
223 fold_convert (pvoid_type_node,
224 gfc_conv_descriptor_data_get (src)),
225 size);
226 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
228 return gfc_finish_block (&block);
231 /* Similarly, except use an assignment operator instead. */
233 tree
234 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
236 tree type = TREE_TYPE (dest), rank, size, esize, call;
237 stmtblock_t block;
239 if (! GFC_DESCRIPTOR_TYPE_P (type)
240 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
241 return build2_v (MODIFY_EXPR, dest, src);
243 /* Handle copying allocatable arrays. */
244 gfc_start_block (&block);
246 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
247 size = gfc_conv_descriptor_ubound_get (dest, rank);
248 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
249 gfc_conv_descriptor_lbound_get (dest, rank));
250 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
251 gfc_index_one_node);
252 if (GFC_TYPE_ARRAY_RANK (type) > 1)
253 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
254 gfc_conv_descriptor_stride_get (dest, rank));
255 esize = fold_convert (gfc_array_index_type,
256 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
257 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
258 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
259 call = build_call_expr_loc (input_location,
260 built_in_decls[BUILT_IN_MEMCPY], 3,
261 fold_convert (pvoid_type_node,
262 gfc_conv_descriptor_data_get (dest)),
263 fold_convert (pvoid_type_node,
264 gfc_conv_descriptor_data_get (src)),
265 size);
266 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
268 return gfc_finish_block (&block);
271 /* Build and return code destructing DECL. Return NULL if nothing
272 to be done. */
274 tree
275 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
277 tree type = TREE_TYPE (decl);
279 if (! GFC_DESCRIPTOR_TYPE_P (type)
280 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
281 return NULL;
283 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
284 to be deallocated if they were allocated. */
285 return gfc_trans_dealloc_allocated (decl);
289 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
290 disregarded in OpenMP construct, because it is going to be
291 remapped during OpenMP lowering. SHARED is true if DECL
292 is going to be shared, false if it is going to be privatized. */
294 bool
295 gfc_omp_disregard_value_expr (tree decl, bool shared)
297 if (GFC_DECL_COMMON_OR_EQUIV (decl)
298 && DECL_HAS_VALUE_EXPR_P (decl))
300 tree value = DECL_VALUE_EXPR (decl);
302 if (TREE_CODE (value) == COMPONENT_REF
303 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
304 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
306 /* If variable in COMMON or EQUIVALENCE is privatized, return
307 true, as just that variable is supposed to be privatized,
308 not the whole COMMON or whole EQUIVALENCE.
309 For shared variables in COMMON or EQUIVALENCE, let them be
310 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
311 from the same COMMON or EQUIVALENCE just one sharing of the
312 whole COMMON or EQUIVALENCE is enough. */
313 return ! shared;
317 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
318 return ! shared;
320 return false;
323 /* Return true if DECL that is shared iff SHARED is true should
324 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
325 flag set. */
327 bool
328 gfc_omp_private_debug_clause (tree decl, bool shared)
330 if (GFC_DECL_CRAY_POINTEE (decl))
331 return true;
333 if (GFC_DECL_COMMON_OR_EQUIV (decl)
334 && DECL_HAS_VALUE_EXPR_P (decl))
336 tree value = DECL_VALUE_EXPR (decl);
338 if (TREE_CODE (value) == COMPONENT_REF
339 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
340 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
341 return shared;
344 return false;
347 /* Register language specific type size variables as potentially OpenMP
348 firstprivate variables. */
350 void
351 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
353 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
355 int r;
357 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
358 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
360 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
361 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
362 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
364 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
365 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
370 static inline tree
371 gfc_trans_add_clause (tree node, tree tail)
373 OMP_CLAUSE_CHAIN (node) = tail;
374 return node;
377 static tree
378 gfc_trans_omp_variable (gfc_symbol *sym)
380 tree t = gfc_get_symbol_decl (sym);
381 tree parent_decl;
382 int parent_flag;
383 bool return_value;
384 bool alternate_entry;
385 bool entry_master;
387 return_value = sym->attr.function && sym->result == sym;
388 alternate_entry = sym->attr.function && sym->attr.entry
389 && sym->result == sym;
390 entry_master = sym->attr.result
391 && sym->ns->proc_name->attr.entry_master
392 && !gfc_return_by_reference (sym->ns->proc_name);
393 parent_decl = DECL_CONTEXT (current_function_decl);
395 if ((t == parent_decl && return_value)
396 || (sym->ns && sym->ns->proc_name
397 && sym->ns->proc_name->backend_decl == parent_decl
398 && (alternate_entry || entry_master)))
399 parent_flag = 1;
400 else
401 parent_flag = 0;
403 /* Special case for assigning the return value of a function.
404 Self recursive functions must have an explicit return value. */
405 if (return_value && (t == current_function_decl || parent_flag))
406 t = gfc_get_fake_result_decl (sym, parent_flag);
408 /* Similarly for alternate entry points. */
409 else if (alternate_entry
410 && (sym->ns->proc_name->backend_decl == current_function_decl
411 || parent_flag))
413 gfc_entry_list *el = NULL;
415 for (el = sym->ns->entries; el; el = el->next)
416 if (sym == el->sym)
418 t = gfc_get_fake_result_decl (sym, parent_flag);
419 break;
423 else if (entry_master
424 && (sym->ns->proc_name->backend_decl == current_function_decl
425 || parent_flag))
426 t = gfc_get_fake_result_decl (sym, parent_flag);
428 return t;
431 static tree
432 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
433 tree list)
435 for (; namelist != NULL; namelist = namelist->next)
436 if (namelist->sym->attr.referenced)
438 tree t = gfc_trans_omp_variable (namelist->sym);
439 if (t != error_mark_node)
441 tree node = build_omp_clause (input_location, code);
442 OMP_CLAUSE_DECL (node) = t;
443 list = gfc_trans_add_clause (node, list);
446 return list;
449 static void
450 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
452 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
453 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
454 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
455 gfc_expr *e1, *e2, *e3, *e4;
456 gfc_ref *ref;
457 tree decl, backend_decl, stmt;
458 locus old_loc = gfc_current_locus;
459 const char *iname;
460 gfc_try t;
462 decl = OMP_CLAUSE_DECL (c);
463 gfc_current_locus = where;
465 /* Create a fake symbol for init value. */
466 memset (&init_val_sym, 0, sizeof (init_val_sym));
467 init_val_sym.ns = sym->ns;
468 init_val_sym.name = sym->name;
469 init_val_sym.ts = sym->ts;
470 init_val_sym.attr.referenced = 1;
471 init_val_sym.declared_at = where;
472 init_val_sym.attr.flavor = FL_VARIABLE;
473 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
474 init_val_sym.backend_decl = backend_decl;
476 /* Create a fake symbol for the outer array reference. */
477 outer_sym = *sym;
478 outer_sym.as = gfc_copy_array_spec (sym->as);
479 outer_sym.attr.dummy = 0;
480 outer_sym.attr.result = 0;
481 outer_sym.attr.flavor = FL_VARIABLE;
482 outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
484 /* Create fake symtrees for it. */
485 symtree1 = gfc_new_symtree (&root1, sym->name);
486 symtree1->n.sym = sym;
487 gcc_assert (symtree1 == root1);
489 symtree2 = gfc_new_symtree (&root2, sym->name);
490 symtree2->n.sym = &init_val_sym;
491 gcc_assert (symtree2 == root2);
493 symtree3 = gfc_new_symtree (&root3, sym->name);
494 symtree3->n.sym = &outer_sym;
495 gcc_assert (symtree3 == root3);
497 /* Create expressions. */
498 e1 = gfc_get_expr ();
499 e1->expr_type = EXPR_VARIABLE;
500 e1->where = where;
501 e1->symtree = symtree1;
502 e1->ts = sym->ts;
503 e1->ref = ref = gfc_get_ref ();
504 ref->type = REF_ARRAY;
505 ref->u.ar.where = where;
506 ref->u.ar.as = sym->as;
507 ref->u.ar.type = AR_FULL;
508 ref->u.ar.dimen = 0;
509 t = gfc_resolve_expr (e1);
510 gcc_assert (t == SUCCESS);
512 e2 = gfc_get_expr ();
513 e2->expr_type = EXPR_VARIABLE;
514 e2->where = where;
515 e2->symtree = symtree2;
516 e2->ts = sym->ts;
517 t = gfc_resolve_expr (e2);
518 gcc_assert (t == SUCCESS);
520 e3 = gfc_copy_expr (e1);
521 e3->symtree = symtree3;
522 t = gfc_resolve_expr (e3);
523 gcc_assert (t == SUCCESS);
525 iname = NULL;
526 switch (OMP_CLAUSE_REDUCTION_CODE (c))
528 case PLUS_EXPR:
529 case MINUS_EXPR:
530 e4 = gfc_add (e3, e1);
531 break;
532 case MULT_EXPR:
533 e4 = gfc_multiply (e3, e1);
534 break;
535 case TRUTH_ANDIF_EXPR:
536 e4 = gfc_and (e3, e1);
537 break;
538 case TRUTH_ORIF_EXPR:
539 e4 = gfc_or (e3, e1);
540 break;
541 case EQ_EXPR:
542 e4 = gfc_eqv (e3, e1);
543 break;
544 case NE_EXPR:
545 e4 = gfc_neqv (e3, e1);
546 break;
547 case MIN_EXPR:
548 iname = "min";
549 break;
550 case MAX_EXPR:
551 iname = "max";
552 break;
553 case BIT_AND_EXPR:
554 iname = "iand";
555 break;
556 case BIT_IOR_EXPR:
557 iname = "ior";
558 break;
559 case BIT_XOR_EXPR:
560 iname = "ieor";
561 break;
562 default:
563 gcc_unreachable ();
565 if (iname != NULL)
567 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
568 intrinsic_sym.ns = sym->ns;
569 intrinsic_sym.name = iname;
570 intrinsic_sym.ts = sym->ts;
571 intrinsic_sym.attr.referenced = 1;
572 intrinsic_sym.attr.intrinsic = 1;
573 intrinsic_sym.attr.function = 1;
574 intrinsic_sym.result = &intrinsic_sym;
575 intrinsic_sym.declared_at = where;
577 symtree4 = gfc_new_symtree (&root4, iname);
578 symtree4->n.sym = &intrinsic_sym;
579 gcc_assert (symtree4 == root4);
581 e4 = gfc_get_expr ();
582 e4->expr_type = EXPR_FUNCTION;
583 e4->where = where;
584 e4->symtree = symtree4;
585 e4->value.function.isym = gfc_find_function (iname);
586 e4->value.function.actual = gfc_get_actual_arglist ();
587 e4->value.function.actual->expr = e3;
588 e4->value.function.actual->next = gfc_get_actual_arglist ();
589 e4->value.function.actual->next->expr = e1;
591 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
592 e1 = gfc_copy_expr (e1);
593 e3 = gfc_copy_expr (e3);
594 t = gfc_resolve_expr (e4);
595 gcc_assert (t == SUCCESS);
597 /* Create the init statement list. */
598 pushlevel (0);
599 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
600 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
602 /* If decl is an allocatable array, it needs to be allocated
603 with the same bounds as the outer var. */
604 tree type = TREE_TYPE (decl), rank, size, esize, ptr;
605 stmtblock_t block;
607 gfc_start_block (&block);
609 gfc_add_modify (&block, decl, outer_sym.backend_decl);
610 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
611 size = gfc_conv_descriptor_ubound_get (decl, rank);
612 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
613 gfc_conv_descriptor_lbound_get (decl, rank));
614 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
615 gfc_index_one_node);
616 if (GFC_TYPE_ARRAY_RANK (type) > 1)
617 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
618 gfc_conv_descriptor_stride_get (decl, rank));
619 esize = fold_convert (gfc_array_index_type,
620 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
621 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
622 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
623 ptr = gfc_allocate_array_with_status (&block,
624 build_int_cst (pvoid_type_node, 0),
625 size, NULL, NULL);
626 gfc_conv_descriptor_data_set (&block, decl, ptr);
627 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
628 false));
629 stmt = gfc_finish_block (&block);
631 else
632 stmt = gfc_trans_assignment (e1, e2, false, false);
633 if (TREE_CODE (stmt) != BIND_EXPR)
634 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
635 else
636 poplevel (0, 0, 0);
637 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
639 /* Create the merge statement list. */
640 pushlevel (0);
641 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
642 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
644 /* If decl is an allocatable array, it needs to be deallocated
645 afterwards. */
646 stmtblock_t block;
648 gfc_start_block (&block);
649 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
650 true));
651 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
652 stmt = gfc_finish_block (&block);
654 else
655 stmt = gfc_trans_assignment (e3, e4, false, true);
656 if (TREE_CODE (stmt) != BIND_EXPR)
657 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
658 else
659 poplevel (0, 0, 0);
660 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
662 /* And stick the placeholder VAR_DECL into the clause as well. */
663 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
665 gfc_current_locus = old_loc;
667 gfc_free_expr (e1);
668 gfc_free_expr (e2);
669 gfc_free_expr (e3);
670 gfc_free_expr (e4);
671 gfc_free (symtree1);
672 gfc_free (symtree2);
673 gfc_free (symtree3);
674 if (symtree4)
675 gfc_free (symtree4);
676 gfc_free_array_spec (outer_sym.as);
679 static tree
680 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
681 enum tree_code reduction_code, locus where)
683 for (; namelist != NULL; namelist = namelist->next)
684 if (namelist->sym->attr.referenced)
686 tree t = gfc_trans_omp_variable (namelist->sym);
687 if (t != error_mark_node)
689 tree node = build_omp_clause (where.lb->location,
690 OMP_CLAUSE_REDUCTION);
691 OMP_CLAUSE_DECL (node) = t;
692 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
693 if (namelist->sym->attr.dimension)
694 gfc_trans_omp_array_reduction (node, namelist->sym, where);
695 list = gfc_trans_add_clause (node, list);
698 return list;
701 static tree
702 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
703 locus where)
705 tree omp_clauses = NULL_TREE, chunk_size, c;
706 int list;
707 enum omp_clause_code clause_code;
708 gfc_se se;
710 if (clauses == NULL)
711 return NULL_TREE;
713 for (list = 0; list < OMP_LIST_NUM; list++)
715 gfc_namelist *n = clauses->lists[list];
717 if (n == NULL)
718 continue;
719 if (list >= OMP_LIST_REDUCTION_FIRST
720 && list <= OMP_LIST_REDUCTION_LAST)
722 enum tree_code reduction_code;
723 switch (list)
725 case OMP_LIST_PLUS:
726 reduction_code = PLUS_EXPR;
727 break;
728 case OMP_LIST_MULT:
729 reduction_code = MULT_EXPR;
730 break;
731 case OMP_LIST_SUB:
732 reduction_code = MINUS_EXPR;
733 break;
734 case OMP_LIST_AND:
735 reduction_code = TRUTH_ANDIF_EXPR;
736 break;
737 case OMP_LIST_OR:
738 reduction_code = TRUTH_ORIF_EXPR;
739 break;
740 case OMP_LIST_EQV:
741 reduction_code = EQ_EXPR;
742 break;
743 case OMP_LIST_NEQV:
744 reduction_code = NE_EXPR;
745 break;
746 case OMP_LIST_MAX:
747 reduction_code = MAX_EXPR;
748 break;
749 case OMP_LIST_MIN:
750 reduction_code = MIN_EXPR;
751 break;
752 case OMP_LIST_IAND:
753 reduction_code = BIT_AND_EXPR;
754 break;
755 case OMP_LIST_IOR:
756 reduction_code = BIT_IOR_EXPR;
757 break;
758 case OMP_LIST_IEOR:
759 reduction_code = BIT_XOR_EXPR;
760 break;
761 default:
762 gcc_unreachable ();
764 omp_clauses
765 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
766 where);
767 continue;
769 switch (list)
771 case OMP_LIST_PRIVATE:
772 clause_code = OMP_CLAUSE_PRIVATE;
773 goto add_clause;
774 case OMP_LIST_SHARED:
775 clause_code = OMP_CLAUSE_SHARED;
776 goto add_clause;
777 case OMP_LIST_FIRSTPRIVATE:
778 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
779 goto add_clause;
780 case OMP_LIST_LASTPRIVATE:
781 clause_code = OMP_CLAUSE_LASTPRIVATE;
782 goto add_clause;
783 case OMP_LIST_COPYIN:
784 clause_code = OMP_CLAUSE_COPYIN;
785 goto add_clause;
786 case OMP_LIST_COPYPRIVATE:
787 clause_code = OMP_CLAUSE_COPYPRIVATE;
788 /* FALLTHROUGH */
789 add_clause:
790 omp_clauses
791 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
792 break;
793 default:
794 break;
798 if (clauses->if_expr)
800 tree if_var;
802 gfc_init_se (&se, NULL);
803 gfc_conv_expr (&se, clauses->if_expr);
804 gfc_add_block_to_block (block, &se.pre);
805 if_var = gfc_evaluate_now (se.expr, block);
806 gfc_add_block_to_block (block, &se.post);
808 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
809 OMP_CLAUSE_IF_EXPR (c) = if_var;
810 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
813 if (clauses->num_threads)
815 tree num_threads;
817 gfc_init_se (&se, NULL);
818 gfc_conv_expr (&se, clauses->num_threads);
819 gfc_add_block_to_block (block, &se.pre);
820 num_threads = gfc_evaluate_now (se.expr, block);
821 gfc_add_block_to_block (block, &se.post);
823 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
824 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
825 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
828 chunk_size = NULL_TREE;
829 if (clauses->chunk_size)
831 gfc_init_se (&se, NULL);
832 gfc_conv_expr (&se, clauses->chunk_size);
833 gfc_add_block_to_block (block, &se.pre);
834 chunk_size = gfc_evaluate_now (se.expr, block);
835 gfc_add_block_to_block (block, &se.post);
838 if (clauses->sched_kind != OMP_SCHED_NONE)
840 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
841 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
842 switch (clauses->sched_kind)
844 case OMP_SCHED_STATIC:
845 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
846 break;
847 case OMP_SCHED_DYNAMIC:
848 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
849 break;
850 case OMP_SCHED_GUIDED:
851 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
852 break;
853 case OMP_SCHED_RUNTIME:
854 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
855 break;
856 case OMP_SCHED_AUTO:
857 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
858 break;
859 default:
860 gcc_unreachable ();
862 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
865 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
867 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
868 switch (clauses->default_sharing)
870 case OMP_DEFAULT_NONE:
871 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
872 break;
873 case OMP_DEFAULT_SHARED:
874 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
875 break;
876 case OMP_DEFAULT_PRIVATE:
877 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
878 break;
879 case OMP_DEFAULT_FIRSTPRIVATE:
880 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
881 break;
882 default:
883 gcc_unreachable ();
885 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
888 if (clauses->nowait)
890 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
891 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
894 if (clauses->ordered)
896 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
897 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
900 if (clauses->untied)
902 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
903 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
906 if (clauses->collapse)
908 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
909 OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
910 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
913 return omp_clauses;
916 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
918 static tree
919 gfc_trans_omp_code (gfc_code *code, bool force_empty)
921 tree stmt;
923 pushlevel (0);
924 stmt = gfc_trans_code (code);
925 if (TREE_CODE (stmt) != BIND_EXPR)
927 if (!IS_EMPTY_STMT (stmt) || force_empty)
929 tree block = poplevel (1, 0, 0);
930 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
932 else
933 poplevel (0, 0, 0);
935 else
936 poplevel (0, 0, 0);
937 return stmt;
941 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
942 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
944 static tree
945 gfc_trans_omp_atomic (gfc_code *code)
947 gfc_se lse;
948 gfc_se rse;
949 gfc_expr *expr2, *e;
950 gfc_symbol *var;
951 stmtblock_t block;
952 tree lhsaddr, type, rhs, x;
953 enum tree_code op = ERROR_MARK;
954 bool var_on_left = false;
956 code = code->block->next;
957 gcc_assert (code->op == EXEC_ASSIGN);
958 gcc_assert (code->next == NULL);
959 var = code->expr1->symtree->n.sym;
961 gfc_init_se (&lse, NULL);
962 gfc_init_se (&rse, NULL);
963 gfc_start_block (&block);
965 gfc_conv_expr (&lse, code->expr1);
966 gfc_add_block_to_block (&block, &lse.pre);
967 type = TREE_TYPE (lse.expr);
968 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
970 expr2 = code->expr2;
971 if (expr2->expr_type == EXPR_FUNCTION
972 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
973 expr2 = expr2->value.function.actual->expr;
975 if (expr2->expr_type == EXPR_OP)
977 gfc_expr *e;
978 switch (expr2->value.op.op)
980 case INTRINSIC_PLUS:
981 op = PLUS_EXPR;
982 break;
983 case INTRINSIC_TIMES:
984 op = MULT_EXPR;
985 break;
986 case INTRINSIC_MINUS:
987 op = MINUS_EXPR;
988 break;
989 case INTRINSIC_DIVIDE:
990 if (expr2->ts.type == BT_INTEGER)
991 op = TRUNC_DIV_EXPR;
992 else
993 op = RDIV_EXPR;
994 break;
995 case INTRINSIC_AND:
996 op = TRUTH_ANDIF_EXPR;
997 break;
998 case INTRINSIC_OR:
999 op = TRUTH_ORIF_EXPR;
1000 break;
1001 case INTRINSIC_EQV:
1002 op = EQ_EXPR;
1003 break;
1004 case INTRINSIC_NEQV:
1005 op = NE_EXPR;
1006 break;
1007 default:
1008 gcc_unreachable ();
1010 e = expr2->value.op.op1;
1011 if (e->expr_type == EXPR_FUNCTION
1012 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1013 e = e->value.function.actual->expr;
1014 if (e->expr_type == EXPR_VARIABLE
1015 && e->symtree != NULL
1016 && e->symtree->n.sym == var)
1018 expr2 = expr2->value.op.op2;
1019 var_on_left = true;
1021 else
1023 e = expr2->value.op.op2;
1024 if (e->expr_type == EXPR_FUNCTION
1025 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1026 e = e->value.function.actual->expr;
1027 gcc_assert (e->expr_type == EXPR_VARIABLE
1028 && e->symtree != NULL
1029 && e->symtree->n.sym == var);
1030 expr2 = expr2->value.op.op1;
1031 var_on_left = false;
1033 gfc_conv_expr (&rse, expr2);
1034 gfc_add_block_to_block (&block, &rse.pre);
1036 else
1038 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1039 switch (expr2->value.function.isym->id)
1041 case GFC_ISYM_MIN:
1042 op = MIN_EXPR;
1043 break;
1044 case GFC_ISYM_MAX:
1045 op = MAX_EXPR;
1046 break;
1047 case GFC_ISYM_IAND:
1048 op = BIT_AND_EXPR;
1049 break;
1050 case GFC_ISYM_IOR:
1051 op = BIT_IOR_EXPR;
1052 break;
1053 case GFC_ISYM_IEOR:
1054 op = BIT_XOR_EXPR;
1055 break;
1056 default:
1057 gcc_unreachable ();
1059 e = expr2->value.function.actual->expr;
1060 gcc_assert (e->expr_type == EXPR_VARIABLE
1061 && e->symtree != NULL
1062 && e->symtree->n.sym == var);
1064 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1065 gfc_add_block_to_block (&block, &rse.pre);
1066 if (expr2->value.function.actual->next->next != NULL)
1068 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1069 gfc_actual_arglist *arg;
1071 gfc_add_modify (&block, accum, rse.expr);
1072 for (arg = expr2->value.function.actual->next->next; arg;
1073 arg = arg->next)
1075 gfc_init_block (&rse.pre);
1076 gfc_conv_expr (&rse, arg->expr);
1077 gfc_add_block_to_block (&block, &rse.pre);
1078 x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
1079 gfc_add_modify (&block, accum, x);
1082 rse.expr = accum;
1085 expr2 = expr2->value.function.actual->next->expr;
1088 lhsaddr = save_expr (lhsaddr);
1089 rhs = gfc_evaluate_now (rse.expr, &block);
1090 x = convert (TREE_TYPE (rhs), build_fold_indirect_ref_loc (input_location,
1091 lhsaddr));
1093 if (var_on_left)
1094 x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
1095 else
1096 x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
1098 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1099 && TREE_CODE (type) != COMPLEX_TYPE)
1100 x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
1102 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1103 gfc_add_expr_to_block (&block, x);
1105 gfc_add_block_to_block (&block, &lse.pre);
1106 gfc_add_block_to_block (&block, &rse.pre);
1108 return gfc_finish_block (&block);
1111 static tree
1112 gfc_trans_omp_barrier (void)
1114 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1115 return build_call_expr_loc (input_location, decl, 0);
1118 static tree
1119 gfc_trans_omp_critical (gfc_code *code)
1121 tree name = NULL_TREE, stmt;
1122 if (code->ext.omp_name != NULL)
1123 name = get_identifier (code->ext.omp_name);
1124 stmt = gfc_trans_code (code->block->next);
1125 return build2 (OMP_CRITICAL, void_type_node, stmt, name);
1128 static tree
1129 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1130 gfc_omp_clauses *do_clauses, tree par_clauses)
1132 gfc_se se;
1133 tree dovar, stmt, from, to, step, type, init, cond, incr;
1134 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1135 stmtblock_t block;
1136 stmtblock_t body;
1137 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1138 int i, collapse = clauses->collapse;
1139 tree dovar_init = NULL_TREE;
1141 if (collapse <= 0)
1142 collapse = 1;
1144 code = code->block->next;
1145 gcc_assert (code->op == EXEC_DO);
1147 init = make_tree_vec (collapse);
1148 cond = make_tree_vec (collapse);
1149 incr = make_tree_vec (collapse);
1151 if (pblock == NULL)
1153 gfc_start_block (&block);
1154 pblock = &block;
1157 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1159 for (i = 0; i < collapse; i++)
1161 int simple = 0;
1162 int dovar_found = 0;
1163 tree dovar_decl;
1165 if (clauses)
1167 gfc_namelist *n;
1168 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1169 n = n->next)
1170 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1171 break;
1172 if (n != NULL)
1173 dovar_found = 1;
1174 else if (n == NULL)
1175 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1176 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1177 break;
1178 if (n != NULL)
1179 dovar_found++;
1182 /* Evaluate all the expressions in the iterator. */
1183 gfc_init_se (&se, NULL);
1184 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1185 gfc_add_block_to_block (pblock, &se.pre);
1186 dovar = se.expr;
1187 type = TREE_TYPE (dovar);
1188 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1190 gfc_init_se (&se, NULL);
1191 gfc_conv_expr_val (&se, code->ext.iterator->start);
1192 gfc_add_block_to_block (pblock, &se.pre);
1193 from = gfc_evaluate_now (se.expr, pblock);
1195 gfc_init_se (&se, NULL);
1196 gfc_conv_expr_val (&se, code->ext.iterator->end);
1197 gfc_add_block_to_block (pblock, &se.pre);
1198 to = gfc_evaluate_now (se.expr, pblock);
1200 gfc_init_se (&se, NULL);
1201 gfc_conv_expr_val (&se, code->ext.iterator->step);
1202 gfc_add_block_to_block (pblock, &se.pre);
1203 step = gfc_evaluate_now (se.expr, pblock);
1204 dovar_decl = dovar;
1206 /* Special case simple loops. */
1207 if (TREE_CODE (dovar) == VAR_DECL)
1209 if (integer_onep (step))
1210 simple = 1;
1211 else if (tree_int_cst_equal (step, integer_minus_one_node))
1212 simple = -1;
1214 else
1215 dovar_decl
1216 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1218 /* Loop body. */
1219 if (simple)
1221 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1222 TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
1223 boolean_type_node, dovar, to);
1224 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
1225 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
1226 TREE_VEC_ELT (incr, i));
1228 else
1230 /* STEP is not 1 or -1. Use:
1231 for (count = 0; count < (to + step - from) / step; count++)
1233 dovar = from + count * step;
1234 body;
1235 cycle_label:;
1236 } */
1237 tmp = fold_build2 (MINUS_EXPR, type, step, from);
1238 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
1239 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
1240 tmp = gfc_evaluate_now (tmp, pblock);
1241 count = gfc_create_var (type, "count");
1242 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1243 build_int_cst (type, 0));
1244 TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
1245 count, tmp);
1246 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
1247 build_int_cst (type, 1));
1248 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
1249 count, TREE_VEC_ELT (incr, i));
1251 /* Initialize DOVAR. */
1252 tmp = fold_build2 (MULT_EXPR, type, count, step);
1253 tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
1254 dovar_init = tree_cons (dovar, tmp, dovar_init);
1257 if (!dovar_found)
1259 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1260 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1261 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1263 else if (dovar_found == 2)
1265 tree c = NULL;
1267 tmp = NULL;
1268 if (!simple)
1270 /* If dovar is lastprivate, but different counter is used,
1271 dovar += step needs to be added to
1272 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1273 will have the value on entry of the last loop, rather
1274 than value after iterator increment. */
1275 tmp = gfc_evaluate_now (step, pblock);
1276 tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
1277 tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
1278 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1279 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1280 && OMP_CLAUSE_DECL (c) == dovar_decl)
1282 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1283 break;
1286 if (c == NULL && par_clauses != NULL)
1288 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1289 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1290 && OMP_CLAUSE_DECL (c) == dovar_decl)
1292 tree l = build_omp_clause (input_location,
1293 OMP_CLAUSE_LASTPRIVATE);
1294 OMP_CLAUSE_DECL (l) = dovar_decl;
1295 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1296 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1297 omp_clauses = l;
1298 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1299 break;
1302 gcc_assert (simple || c != NULL);
1304 if (!simple)
1306 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1307 OMP_CLAUSE_DECL (tmp) = count;
1308 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1311 if (i + 1 < collapse)
1312 code = code->block->next;
1315 if (pblock != &block)
1317 pushlevel (0);
1318 gfc_start_block (&block);
1321 gfc_start_block (&body);
1323 dovar_init = nreverse (dovar_init);
1324 while (dovar_init)
1326 gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
1327 TREE_VALUE (dovar_init));
1328 dovar_init = TREE_CHAIN (dovar_init);
1331 /* Cycle statement is implemented with a goto. Exit statement must not be
1332 present for this loop. */
1333 cycle_label = gfc_build_label_decl (NULL_TREE);
1335 /* Put these labels where they can be found later. We put the
1336 labels in a TREE_LIST node (because TREE_CHAIN is already
1337 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1338 label in TREE_VALUE (backend_decl). */
1340 code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
1342 /* Main loop body. */
1343 tmp = gfc_trans_omp_code (code->block->next, true);
1344 gfc_add_expr_to_block (&body, tmp);
1346 /* Label for cycle statements (if needed). */
1347 if (TREE_USED (cycle_label))
1349 tmp = build1_v (LABEL_EXPR, cycle_label);
1350 gfc_add_expr_to_block (&body, tmp);
1353 /* End of loop body. */
1354 stmt = make_node (OMP_FOR);
1356 TREE_TYPE (stmt) = void_type_node;
1357 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1358 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1359 OMP_FOR_INIT (stmt) = init;
1360 OMP_FOR_COND (stmt) = cond;
1361 OMP_FOR_INCR (stmt) = incr;
1362 gfc_add_expr_to_block (&block, stmt);
1364 return gfc_finish_block (&block);
1367 static tree
1368 gfc_trans_omp_flush (void)
1370 tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
1371 return build_call_expr_loc (input_location, decl, 0);
1374 static tree
1375 gfc_trans_omp_master (gfc_code *code)
1377 tree stmt = gfc_trans_code (code->block->next);
1378 if (IS_EMPTY_STMT (stmt))
1379 return stmt;
1380 return build1_v (OMP_MASTER, stmt);
1383 static tree
1384 gfc_trans_omp_ordered (gfc_code *code)
1386 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1389 static tree
1390 gfc_trans_omp_parallel (gfc_code *code)
1392 stmtblock_t block;
1393 tree stmt, omp_clauses;
1395 gfc_start_block (&block);
1396 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1397 code->loc);
1398 stmt = gfc_trans_omp_code (code->block->next, true);
1399 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1400 gfc_add_expr_to_block (&block, stmt);
1401 return gfc_finish_block (&block);
1404 static tree
1405 gfc_trans_omp_parallel_do (gfc_code *code)
1407 stmtblock_t block, *pblock = NULL;
1408 gfc_omp_clauses parallel_clauses, do_clauses;
1409 tree stmt, omp_clauses = NULL_TREE;
1411 gfc_start_block (&block);
1413 memset (&do_clauses, 0, sizeof (do_clauses));
1414 if (code->ext.omp_clauses != NULL)
1416 memcpy (&parallel_clauses, code->ext.omp_clauses,
1417 sizeof (parallel_clauses));
1418 do_clauses.sched_kind = parallel_clauses.sched_kind;
1419 do_clauses.chunk_size = parallel_clauses.chunk_size;
1420 do_clauses.ordered = parallel_clauses.ordered;
1421 do_clauses.collapse = parallel_clauses.collapse;
1422 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1423 parallel_clauses.chunk_size = NULL;
1424 parallel_clauses.ordered = false;
1425 parallel_clauses.collapse = 0;
1426 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1427 code->loc);
1429 do_clauses.nowait = true;
1430 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1431 pblock = &block;
1432 else
1433 pushlevel (0);
1434 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1435 if (TREE_CODE (stmt) != BIND_EXPR)
1436 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1437 else
1438 poplevel (0, 0, 0);
1439 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1440 OMP_PARALLEL_COMBINED (stmt) = 1;
1441 gfc_add_expr_to_block (&block, stmt);
1442 return gfc_finish_block (&block);
1445 static tree
1446 gfc_trans_omp_parallel_sections (gfc_code *code)
1448 stmtblock_t block;
1449 gfc_omp_clauses section_clauses;
1450 tree stmt, omp_clauses;
1452 memset (&section_clauses, 0, sizeof (section_clauses));
1453 section_clauses.nowait = true;
1455 gfc_start_block (&block);
1456 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1457 code->loc);
1458 pushlevel (0);
1459 stmt = gfc_trans_omp_sections (code, &section_clauses);
1460 if (TREE_CODE (stmt) != BIND_EXPR)
1461 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1462 else
1463 poplevel (0, 0, 0);
1464 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1465 OMP_PARALLEL_COMBINED (stmt) = 1;
1466 gfc_add_expr_to_block (&block, stmt);
1467 return gfc_finish_block (&block);
1470 static tree
1471 gfc_trans_omp_parallel_workshare (gfc_code *code)
1473 stmtblock_t block;
1474 gfc_omp_clauses workshare_clauses;
1475 tree stmt, omp_clauses;
1477 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1478 workshare_clauses.nowait = true;
1480 gfc_start_block (&block);
1481 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1482 code->loc);
1483 pushlevel (0);
1484 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1485 if (TREE_CODE (stmt) != BIND_EXPR)
1486 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1487 else
1488 poplevel (0, 0, 0);
1489 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1490 OMP_PARALLEL_COMBINED (stmt) = 1;
1491 gfc_add_expr_to_block (&block, stmt);
1492 return gfc_finish_block (&block);
1495 static tree
1496 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1498 stmtblock_t block, body;
1499 tree omp_clauses, stmt;
1500 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1502 gfc_start_block (&block);
1504 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1506 gfc_init_block (&body);
1507 for (code = code->block; code; code = code->block)
1509 /* Last section is special because of lastprivate, so even if it
1510 is empty, chain it in. */
1511 stmt = gfc_trans_omp_code (code->next,
1512 has_lastprivate && code->block == NULL);
1513 if (! IS_EMPTY_STMT (stmt))
1515 stmt = build1_v (OMP_SECTION, stmt);
1516 gfc_add_expr_to_block (&body, stmt);
1519 stmt = gfc_finish_block (&body);
1521 stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
1522 gfc_add_expr_to_block (&block, stmt);
1524 return gfc_finish_block (&block);
1527 static tree
1528 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1530 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1531 tree stmt = gfc_trans_omp_code (code->block->next, true);
1532 stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
1533 return stmt;
1536 static tree
1537 gfc_trans_omp_task (gfc_code *code)
1539 stmtblock_t block;
1540 tree stmt, omp_clauses;
1542 gfc_start_block (&block);
1543 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1544 code->loc);
1545 stmt = gfc_trans_omp_code (code->block->next, true);
1546 stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
1547 gfc_add_expr_to_block (&block, stmt);
1548 return gfc_finish_block (&block);
1551 static tree
1552 gfc_trans_omp_taskwait (void)
1554 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1555 return build_call_expr_loc (input_location, decl, 0);
1558 static tree
1559 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1561 tree res, tmp, stmt;
1562 stmtblock_t block, *pblock = NULL;
1563 stmtblock_t singleblock;
1564 int saved_ompws_flags;
1565 bool singleblock_in_progress = false;
1566 /* True if previous gfc_code in workshare construct is not workshared. */
1567 bool prev_singleunit;
1569 code = code->block->next;
1571 pushlevel (0);
1573 if (!code)
1574 return build_empty_stmt (input_location);
1576 gfc_start_block (&block);
1577 pblock = &block;
1579 ompws_flags = OMPWS_WORKSHARE_FLAG;
1580 prev_singleunit = false;
1582 /* Translate statements one by one to trees until we reach
1583 the end of the workshare construct. Adjacent gfc_codes that
1584 are a single unit of work are clustered and encapsulated in a
1585 single OMP_SINGLE construct. */
1586 for (; code; code = code->next)
1588 if (code->here != 0)
1590 res = gfc_trans_label_here (code);
1591 gfc_add_expr_to_block (pblock, res);
1594 /* No dependence analysis, use for clauses with wait.
1595 If this is the last gfc_code, use default omp_clauses. */
1596 if (code->next == NULL && clauses->nowait)
1597 ompws_flags |= OMPWS_NOWAIT;
1599 /* By default, every gfc_code is a single unit of work. */
1600 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1601 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1603 switch (code->op)
1605 case EXEC_NOP:
1606 res = NULL_TREE;
1607 break;
1609 case EXEC_ASSIGN:
1610 res = gfc_trans_assign (code);
1611 break;
1613 case EXEC_POINTER_ASSIGN:
1614 res = gfc_trans_pointer_assign (code);
1615 break;
1617 case EXEC_INIT_ASSIGN:
1618 res = gfc_trans_init_assign (code);
1619 break;
1621 case EXEC_FORALL:
1622 res = gfc_trans_forall (code);
1623 break;
1625 case EXEC_WHERE:
1626 res = gfc_trans_where (code);
1627 break;
1629 case EXEC_OMP_ATOMIC:
1630 res = gfc_trans_omp_directive (code);
1631 break;
1633 case EXEC_OMP_PARALLEL:
1634 case EXEC_OMP_PARALLEL_DO:
1635 case EXEC_OMP_PARALLEL_SECTIONS:
1636 case EXEC_OMP_PARALLEL_WORKSHARE:
1637 case EXEC_OMP_CRITICAL:
1638 saved_ompws_flags = ompws_flags;
1639 ompws_flags = 0;
1640 res = gfc_trans_omp_directive (code);
1641 ompws_flags = saved_ompws_flags;
1642 break;
1644 default:
1645 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1648 gfc_set_backend_locus (&code->loc);
1650 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1652 if (prev_singleunit)
1654 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1655 /* Add current gfc_code to single block. */
1656 gfc_add_expr_to_block (&singleblock, res);
1657 else
1659 /* Finish single block and add it to pblock. */
1660 tmp = gfc_finish_block (&singleblock);
1661 tmp = build2 (OMP_SINGLE, void_type_node, tmp, NULL_TREE);
1662 gfc_add_expr_to_block (pblock, tmp);
1663 /* Add current gfc_code to pblock. */
1664 gfc_add_expr_to_block (pblock, res);
1665 singleblock_in_progress = false;
1668 else
1670 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1672 /* Start single block. */
1673 gfc_init_block (&singleblock);
1674 gfc_add_expr_to_block (&singleblock, res);
1675 singleblock_in_progress = true;
1677 else
1678 /* Add the new statement to the block. */
1679 gfc_add_expr_to_block (pblock, res);
1681 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1685 /* Finish remaining SINGLE block, if we were in the middle of one. */
1686 if (singleblock_in_progress)
1688 /* Finish single block and add it to pblock. */
1689 tmp = gfc_finish_block (&singleblock);
1690 tmp = build2 (OMP_SINGLE, void_type_node, tmp,
1691 clauses->nowait
1692 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1693 : NULL_TREE);
1694 gfc_add_expr_to_block (pblock, tmp);
1697 stmt = gfc_finish_block (pblock);
1698 if (TREE_CODE (stmt) != BIND_EXPR)
1700 if (!IS_EMPTY_STMT (stmt))
1702 tree bindblock = poplevel (1, 0, 0);
1703 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1705 else
1706 poplevel (0, 0, 0);
1708 else
1709 poplevel (0, 0, 0);
1711 ompws_flags = 0;
1712 return stmt;
1715 tree
1716 gfc_trans_omp_directive (gfc_code *code)
1718 switch (code->op)
1720 case EXEC_OMP_ATOMIC:
1721 return gfc_trans_omp_atomic (code);
1722 case EXEC_OMP_BARRIER:
1723 return gfc_trans_omp_barrier ();
1724 case EXEC_OMP_CRITICAL:
1725 return gfc_trans_omp_critical (code);
1726 case EXEC_OMP_DO:
1727 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1728 case EXEC_OMP_FLUSH:
1729 return gfc_trans_omp_flush ();
1730 case EXEC_OMP_MASTER:
1731 return gfc_trans_omp_master (code);
1732 case EXEC_OMP_ORDERED:
1733 return gfc_trans_omp_ordered (code);
1734 case EXEC_OMP_PARALLEL:
1735 return gfc_trans_omp_parallel (code);
1736 case EXEC_OMP_PARALLEL_DO:
1737 return gfc_trans_omp_parallel_do (code);
1738 case EXEC_OMP_PARALLEL_SECTIONS:
1739 return gfc_trans_omp_parallel_sections (code);
1740 case EXEC_OMP_PARALLEL_WORKSHARE:
1741 return gfc_trans_omp_parallel_workshare (code);
1742 case EXEC_OMP_SECTIONS:
1743 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1744 case EXEC_OMP_SINGLE:
1745 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1746 case EXEC_OMP_TASK:
1747 return gfc_trans_omp_task (code);
1748 case EXEC_OMP_TASKWAIT:
1749 return gfc_trans_omp_taskwait ();
1750 case EXEC_OMP_WORKSHARE:
1751 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1752 default:
1753 gcc_unreachable ();