Remove outermost loop parameter.
[official-gcc/graphite-test-results.git] / gcc / fortran / trans-openmp.c
blob50e7847d0e173b79f36e6cd3d51c9de6f59b61ff
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "gimple.h" /* For create_tmp_var_raw. */
28 #include "toplev.h" /* For internal_error. */
29 #include "gfortran.h"
30 #include "trans.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
35 #include "arith.h"
37 int ompws_flags;
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
42 bool
43 gfc_omp_privatize_by_reference (const_tree decl)
45 tree type = TREE_TYPE (decl);
47 if (TREE_CODE (type) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
49 return true;
51 if (TREE_CODE (type) == POINTER_TYPE)
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type))
57 return false;
59 if (!DECL_ARTIFICIAL (decl)
60 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
61 return true;
63 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 by the frontend. */
65 if (DECL_LANG_SPECIFIC (decl)
66 && GFC_DECL_SAVED_DESCRIPTOR (decl))
67 return true;
70 return false;
73 /* True if OpenMP sharing attribute of DECL is predetermined. */
75 enum omp_clause_default_kind
76 gfc_omp_predetermined_sharing (tree decl)
78 if (DECL_ARTIFICIAL (decl) && ! GFC_DECL_RESULT (decl))
79 return OMP_CLAUSE_DEFAULT_SHARED;
81 /* Cray pointees shouldn't be listed in any clauses and should be
82 gimplified to dereference of the corresponding Cray pointer.
83 Make them all private, so that they are emitted in the debug
84 information. */
85 if (GFC_DECL_CRAY_POINTEE (decl))
86 return OMP_CLAUSE_DEFAULT_PRIVATE;
88 /* Assumed-size arrays are predetermined to inherit sharing
89 attributes of the associated actual argument, which is shared
90 for all we care. */
91 if (TREE_CODE (decl) == PARM_DECL
92 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
93 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
94 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
95 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
96 == NULL)
97 return OMP_CLAUSE_DEFAULT_SHARED;
99 /* Dummy procedures aren't considered variables by OpenMP, thus are
100 disallowed in OpenMP clauses. They are represented as PARM_DECLs
101 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
102 to avoid complaining about their uses with default(none). */
103 if (TREE_CODE (decl) == PARM_DECL
104 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
105 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
106 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
108 /* COMMON and EQUIVALENCE decls are shared. They
109 are only referenced through DECL_VALUE_EXPR of the variables
110 contained in them. If those are privatized, they will not be
111 gimplified to the COMMON or EQUIVALENCE decls. */
112 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
113 return OMP_CLAUSE_DEFAULT_SHARED;
115 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
116 return OMP_CLAUSE_DEFAULT_SHARED;
118 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
122 /* Return true if DECL in private clause needs
123 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
124 bool
125 gfc_omp_private_outer_ref (tree decl)
127 tree type = TREE_TYPE (decl);
129 if (GFC_DESCRIPTOR_TYPE_P (type)
130 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
131 return true;
133 return false;
136 /* Return code to initialize DECL with its default constructor, or
137 NULL if there's nothing to do. */
139 tree
140 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
142 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
143 stmtblock_t block, cond_block;
145 if (! GFC_DESCRIPTOR_TYPE_P (type)
146 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
147 return NULL;
149 gcc_assert (outer != NULL);
150 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
151 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
153 /* Allocatable arrays in PRIVATE clauses need to be set to
154 "not currently allocated" allocation status if outer
155 array is "not currently allocated", otherwise should be allocated. */
156 gfc_start_block (&block);
158 gfc_init_block (&cond_block);
160 gfc_add_modify (&cond_block, decl, outer);
161 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
162 size = gfc_conv_descriptor_ubound_get (decl, rank);
163 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
164 gfc_conv_descriptor_lbound_get (decl, rank));
165 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
166 gfc_index_one_node);
167 if (GFC_TYPE_ARRAY_RANK (type) > 1)
168 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
169 gfc_conv_descriptor_stride_get (decl, rank));
170 esize = fold_convert (gfc_array_index_type,
171 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
172 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
173 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
174 ptr = gfc_allocate_array_with_status (&cond_block,
175 build_int_cst (pvoid_type_node, 0),
176 size, NULL, NULL);
177 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
178 then_b = gfc_finish_block (&cond_block);
180 gfc_init_block (&cond_block);
181 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
182 else_b = gfc_finish_block (&cond_block);
184 cond = fold_build2 (NE_EXPR, boolean_type_node,
185 fold_convert (pvoid_type_node,
186 gfc_conv_descriptor_data_get (outer)),
187 null_pointer_node);
188 gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
189 cond, then_b, else_b));
191 return gfc_finish_block (&block);
194 /* Build and return code for a copy constructor from SRC to DEST. */
196 tree
197 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
199 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
200 stmtblock_t block;
202 if (! GFC_DESCRIPTOR_TYPE_P (type)
203 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
204 return build2_v (MODIFY_EXPR, dest, src);
206 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
208 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
209 and copied from SRC. */
210 gfc_start_block (&block);
212 gfc_add_modify (&block, dest, src);
213 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
214 size = gfc_conv_descriptor_ubound_get (dest, rank);
215 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
216 gfc_conv_descriptor_lbound_get (dest, rank));
217 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
218 gfc_index_one_node);
219 if (GFC_TYPE_ARRAY_RANK (type) > 1)
220 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
221 gfc_conv_descriptor_stride_get (dest, rank));
222 esize = fold_convert (gfc_array_index_type,
223 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
224 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
225 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
226 ptr = gfc_allocate_array_with_status (&block,
227 build_int_cst (pvoid_type_node, 0),
228 size, NULL, NULL);
229 gfc_conv_descriptor_data_set (&block, dest, ptr);
230 call = build_call_expr_loc (input_location,
231 built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
232 fold_convert (pvoid_type_node,
233 gfc_conv_descriptor_data_get (src)),
234 size);
235 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
237 return gfc_finish_block (&block);
240 /* Similarly, except use an assignment operator instead. */
242 tree
243 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
245 tree type = TREE_TYPE (dest), rank, size, esize, call;
246 stmtblock_t block;
248 if (! GFC_DESCRIPTOR_TYPE_P (type)
249 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
250 return build2_v (MODIFY_EXPR, dest, src);
252 /* Handle copying allocatable arrays. */
253 gfc_start_block (&block);
255 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
256 size = gfc_conv_descriptor_ubound_get (dest, rank);
257 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
258 gfc_conv_descriptor_lbound_get (dest, rank));
259 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
260 gfc_index_one_node);
261 if (GFC_TYPE_ARRAY_RANK (type) > 1)
262 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
263 gfc_conv_descriptor_stride_get (dest, rank));
264 esize = fold_convert (gfc_array_index_type,
265 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
266 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
267 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
268 call = build_call_expr_loc (input_location,
269 built_in_decls[BUILT_IN_MEMCPY], 3,
270 fold_convert (pvoid_type_node,
271 gfc_conv_descriptor_data_get (dest)),
272 fold_convert (pvoid_type_node,
273 gfc_conv_descriptor_data_get (src)),
274 size);
275 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
277 return gfc_finish_block (&block);
280 /* Build and return code destructing DECL. Return NULL if nothing
281 to be done. */
283 tree
284 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
286 tree type = TREE_TYPE (decl);
288 if (! GFC_DESCRIPTOR_TYPE_P (type)
289 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
290 return NULL;
292 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
293 to be deallocated if they were allocated. */
294 return gfc_trans_dealloc_allocated (decl);
298 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
299 disregarded in OpenMP construct, because it is going to be
300 remapped during OpenMP lowering. SHARED is true if DECL
301 is going to be shared, false if it is going to be privatized. */
303 bool
304 gfc_omp_disregard_value_expr (tree decl, bool shared)
306 if (GFC_DECL_COMMON_OR_EQUIV (decl)
307 && DECL_HAS_VALUE_EXPR_P (decl))
309 tree value = DECL_VALUE_EXPR (decl);
311 if (TREE_CODE (value) == COMPONENT_REF
312 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
313 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
315 /* If variable in COMMON or EQUIVALENCE is privatized, return
316 true, as just that variable is supposed to be privatized,
317 not the whole COMMON or whole EQUIVALENCE.
318 For shared variables in COMMON or EQUIVALENCE, let them be
319 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
320 from the same COMMON or EQUIVALENCE just one sharing of the
321 whole COMMON or EQUIVALENCE is enough. */
322 return ! shared;
326 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
327 return ! shared;
329 return false;
332 /* Return true if DECL that is shared iff SHARED is true should
333 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
334 flag set. */
336 bool
337 gfc_omp_private_debug_clause (tree decl, bool shared)
339 if (GFC_DECL_CRAY_POINTEE (decl))
340 return true;
342 if (GFC_DECL_COMMON_OR_EQUIV (decl)
343 && DECL_HAS_VALUE_EXPR_P (decl))
345 tree value = DECL_VALUE_EXPR (decl);
347 if (TREE_CODE (value) == COMPONENT_REF
348 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
349 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
350 return shared;
353 return false;
356 /* Register language specific type size variables as potentially OpenMP
357 firstprivate variables. */
359 void
360 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
362 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
364 int r;
366 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
367 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
369 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
370 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
371 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
373 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
374 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
379 static inline tree
380 gfc_trans_add_clause (tree node, tree tail)
382 OMP_CLAUSE_CHAIN (node) = tail;
383 return node;
386 static tree
387 gfc_trans_omp_variable (gfc_symbol *sym)
389 tree t = gfc_get_symbol_decl (sym);
390 tree parent_decl;
391 int parent_flag;
392 bool return_value;
393 bool alternate_entry;
394 bool entry_master;
396 return_value = sym->attr.function && sym->result == sym;
397 alternate_entry = sym->attr.function && sym->attr.entry
398 && sym->result == sym;
399 entry_master = sym->attr.result
400 && sym->ns->proc_name->attr.entry_master
401 && !gfc_return_by_reference (sym->ns->proc_name);
402 parent_decl = DECL_CONTEXT (current_function_decl);
404 if ((t == parent_decl && return_value)
405 || (sym->ns && sym->ns->proc_name
406 && sym->ns->proc_name->backend_decl == parent_decl
407 && (alternate_entry || entry_master)))
408 parent_flag = 1;
409 else
410 parent_flag = 0;
412 /* Special case for assigning the return value of a function.
413 Self recursive functions must have an explicit return value. */
414 if (return_value && (t == current_function_decl || parent_flag))
415 t = gfc_get_fake_result_decl (sym, parent_flag);
417 /* Similarly for alternate entry points. */
418 else if (alternate_entry
419 && (sym->ns->proc_name->backend_decl == current_function_decl
420 || parent_flag))
422 gfc_entry_list *el = NULL;
424 for (el = sym->ns->entries; el; el = el->next)
425 if (sym == el->sym)
427 t = gfc_get_fake_result_decl (sym, parent_flag);
428 break;
432 else if (entry_master
433 && (sym->ns->proc_name->backend_decl == current_function_decl
434 || parent_flag))
435 t = gfc_get_fake_result_decl (sym, parent_flag);
437 return t;
440 static tree
441 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
442 tree list)
444 for (; namelist != NULL; namelist = namelist->next)
445 if (namelist->sym->attr.referenced)
447 tree t = gfc_trans_omp_variable (namelist->sym);
448 if (t != error_mark_node)
450 tree node = build_omp_clause (input_location, code);
451 OMP_CLAUSE_DECL (node) = t;
452 list = gfc_trans_add_clause (node, list);
455 return list;
458 static void
459 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
461 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
462 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
463 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
464 gfc_expr *e1, *e2, *e3, *e4;
465 gfc_ref *ref;
466 tree decl, backend_decl, stmt;
467 locus old_loc = gfc_current_locus;
468 const char *iname;
469 gfc_try t;
471 decl = OMP_CLAUSE_DECL (c);
472 gfc_current_locus = where;
474 /* Create a fake symbol for init value. */
475 memset (&init_val_sym, 0, sizeof (init_val_sym));
476 init_val_sym.ns = sym->ns;
477 init_val_sym.name = sym->name;
478 init_val_sym.ts = sym->ts;
479 init_val_sym.attr.referenced = 1;
480 init_val_sym.declared_at = where;
481 init_val_sym.attr.flavor = FL_VARIABLE;
482 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
483 init_val_sym.backend_decl = backend_decl;
485 /* Create a fake symbol for the outer array reference. */
486 outer_sym = *sym;
487 outer_sym.as = gfc_copy_array_spec (sym->as);
488 outer_sym.attr.dummy = 0;
489 outer_sym.attr.result = 0;
490 outer_sym.attr.flavor = FL_VARIABLE;
491 outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
493 /* Create fake symtrees for it. */
494 symtree1 = gfc_new_symtree (&root1, sym->name);
495 symtree1->n.sym = sym;
496 gcc_assert (symtree1 == root1);
498 symtree2 = gfc_new_symtree (&root2, sym->name);
499 symtree2->n.sym = &init_val_sym;
500 gcc_assert (symtree2 == root2);
502 symtree3 = gfc_new_symtree (&root3, sym->name);
503 symtree3->n.sym = &outer_sym;
504 gcc_assert (symtree3 == root3);
506 /* Create expressions. */
507 e1 = gfc_get_expr ();
508 e1->expr_type = EXPR_VARIABLE;
509 e1->where = where;
510 e1->symtree = symtree1;
511 e1->ts = sym->ts;
512 e1->ref = ref = gfc_get_ref ();
513 ref->type = REF_ARRAY;
514 ref->u.ar.where = where;
515 ref->u.ar.as = sym->as;
516 ref->u.ar.type = AR_FULL;
517 ref->u.ar.dimen = 0;
518 t = gfc_resolve_expr (e1);
519 gcc_assert (t == SUCCESS);
521 e2 = gfc_get_expr ();
522 e2->expr_type = EXPR_VARIABLE;
523 e2->where = where;
524 e2->symtree = symtree2;
525 e2->ts = sym->ts;
526 t = gfc_resolve_expr (e2);
527 gcc_assert (t == SUCCESS);
529 e3 = gfc_copy_expr (e1);
530 e3->symtree = symtree3;
531 t = gfc_resolve_expr (e3);
532 gcc_assert (t == SUCCESS);
534 iname = NULL;
535 switch (OMP_CLAUSE_REDUCTION_CODE (c))
537 case PLUS_EXPR:
538 case MINUS_EXPR:
539 e4 = gfc_add (e3, e1);
540 break;
541 case MULT_EXPR:
542 e4 = gfc_multiply (e3, e1);
543 break;
544 case TRUTH_ANDIF_EXPR:
545 e4 = gfc_and (e3, e1);
546 break;
547 case TRUTH_ORIF_EXPR:
548 e4 = gfc_or (e3, e1);
549 break;
550 case EQ_EXPR:
551 e4 = gfc_eqv (e3, e1);
552 break;
553 case NE_EXPR:
554 e4 = gfc_neqv (e3, e1);
555 break;
556 case MIN_EXPR:
557 iname = "min";
558 break;
559 case MAX_EXPR:
560 iname = "max";
561 break;
562 case BIT_AND_EXPR:
563 iname = "iand";
564 break;
565 case BIT_IOR_EXPR:
566 iname = "ior";
567 break;
568 case BIT_XOR_EXPR:
569 iname = "ieor";
570 break;
571 default:
572 gcc_unreachable ();
574 if (iname != NULL)
576 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
577 intrinsic_sym.ns = sym->ns;
578 intrinsic_sym.name = iname;
579 intrinsic_sym.ts = sym->ts;
580 intrinsic_sym.attr.referenced = 1;
581 intrinsic_sym.attr.intrinsic = 1;
582 intrinsic_sym.attr.function = 1;
583 intrinsic_sym.result = &intrinsic_sym;
584 intrinsic_sym.declared_at = where;
586 symtree4 = gfc_new_symtree (&root4, iname);
587 symtree4->n.sym = &intrinsic_sym;
588 gcc_assert (symtree4 == root4);
590 e4 = gfc_get_expr ();
591 e4->expr_type = EXPR_FUNCTION;
592 e4->where = where;
593 e4->symtree = symtree4;
594 e4->value.function.isym = gfc_find_function (iname);
595 e4->value.function.actual = gfc_get_actual_arglist ();
596 e4->value.function.actual->expr = e3;
597 e4->value.function.actual->next = gfc_get_actual_arglist ();
598 e4->value.function.actual->next->expr = e1;
600 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
601 e1 = gfc_copy_expr (e1);
602 e3 = gfc_copy_expr (e3);
603 t = gfc_resolve_expr (e4);
604 gcc_assert (t == SUCCESS);
606 /* Create the init statement list. */
607 pushlevel (0);
608 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
609 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
611 /* If decl is an allocatable array, it needs to be allocated
612 with the same bounds as the outer var. */
613 tree type = TREE_TYPE (decl), rank, size, esize, ptr;
614 stmtblock_t block;
616 gfc_start_block (&block);
618 gfc_add_modify (&block, decl, outer_sym.backend_decl);
619 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
620 size = gfc_conv_descriptor_ubound_get (decl, rank);
621 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
622 gfc_conv_descriptor_lbound_get (decl, rank));
623 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
624 gfc_index_one_node);
625 if (GFC_TYPE_ARRAY_RANK (type) > 1)
626 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
627 gfc_conv_descriptor_stride_get (decl, rank));
628 esize = fold_convert (gfc_array_index_type,
629 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
630 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
631 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
632 ptr = gfc_allocate_array_with_status (&block,
633 build_int_cst (pvoid_type_node, 0),
634 size, NULL, NULL);
635 gfc_conv_descriptor_data_set (&block, decl, ptr);
636 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
637 false));
638 stmt = gfc_finish_block (&block);
640 else
641 stmt = gfc_trans_assignment (e1, e2, false, false);
642 if (TREE_CODE (stmt) != BIND_EXPR)
643 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
644 else
645 poplevel (0, 0, 0);
646 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
648 /* Create the merge statement list. */
649 pushlevel (0);
650 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
651 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
653 /* If decl is an allocatable array, it needs to be deallocated
654 afterwards. */
655 stmtblock_t block;
657 gfc_start_block (&block);
658 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
659 true));
660 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
661 stmt = gfc_finish_block (&block);
663 else
664 stmt = gfc_trans_assignment (e3, e4, false, true);
665 if (TREE_CODE (stmt) != BIND_EXPR)
666 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
667 else
668 poplevel (0, 0, 0);
669 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
671 /* And stick the placeholder VAR_DECL into the clause as well. */
672 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
674 gfc_current_locus = old_loc;
676 gfc_free_expr (e1);
677 gfc_free_expr (e2);
678 gfc_free_expr (e3);
679 gfc_free_expr (e4);
680 gfc_free (symtree1);
681 gfc_free (symtree2);
682 gfc_free (symtree3);
683 if (symtree4)
684 gfc_free (symtree4);
685 gfc_free_array_spec (outer_sym.as);
688 static tree
689 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
690 enum tree_code reduction_code, locus where)
692 for (; namelist != NULL; namelist = namelist->next)
693 if (namelist->sym->attr.referenced)
695 tree t = gfc_trans_omp_variable (namelist->sym);
696 if (t != error_mark_node)
698 tree node = build_omp_clause (where.lb->location,
699 OMP_CLAUSE_REDUCTION);
700 OMP_CLAUSE_DECL (node) = t;
701 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
702 if (namelist->sym->attr.dimension)
703 gfc_trans_omp_array_reduction (node, namelist->sym, where);
704 list = gfc_trans_add_clause (node, list);
707 return list;
710 static tree
711 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
712 locus where)
714 tree omp_clauses = NULL_TREE, chunk_size, c;
715 int list;
716 enum omp_clause_code clause_code;
717 gfc_se se;
719 if (clauses == NULL)
720 return NULL_TREE;
722 for (list = 0; list < OMP_LIST_NUM; list++)
724 gfc_namelist *n = clauses->lists[list];
726 if (n == NULL)
727 continue;
728 if (list >= OMP_LIST_REDUCTION_FIRST
729 && list <= OMP_LIST_REDUCTION_LAST)
731 enum tree_code reduction_code;
732 switch (list)
734 case OMP_LIST_PLUS:
735 reduction_code = PLUS_EXPR;
736 break;
737 case OMP_LIST_MULT:
738 reduction_code = MULT_EXPR;
739 break;
740 case OMP_LIST_SUB:
741 reduction_code = MINUS_EXPR;
742 break;
743 case OMP_LIST_AND:
744 reduction_code = TRUTH_ANDIF_EXPR;
745 break;
746 case OMP_LIST_OR:
747 reduction_code = TRUTH_ORIF_EXPR;
748 break;
749 case OMP_LIST_EQV:
750 reduction_code = EQ_EXPR;
751 break;
752 case OMP_LIST_NEQV:
753 reduction_code = NE_EXPR;
754 break;
755 case OMP_LIST_MAX:
756 reduction_code = MAX_EXPR;
757 break;
758 case OMP_LIST_MIN:
759 reduction_code = MIN_EXPR;
760 break;
761 case OMP_LIST_IAND:
762 reduction_code = BIT_AND_EXPR;
763 break;
764 case OMP_LIST_IOR:
765 reduction_code = BIT_IOR_EXPR;
766 break;
767 case OMP_LIST_IEOR:
768 reduction_code = BIT_XOR_EXPR;
769 break;
770 default:
771 gcc_unreachable ();
773 omp_clauses
774 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
775 where);
776 continue;
778 switch (list)
780 case OMP_LIST_PRIVATE:
781 clause_code = OMP_CLAUSE_PRIVATE;
782 goto add_clause;
783 case OMP_LIST_SHARED:
784 clause_code = OMP_CLAUSE_SHARED;
785 goto add_clause;
786 case OMP_LIST_FIRSTPRIVATE:
787 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
788 goto add_clause;
789 case OMP_LIST_LASTPRIVATE:
790 clause_code = OMP_CLAUSE_LASTPRIVATE;
791 goto add_clause;
792 case OMP_LIST_COPYIN:
793 clause_code = OMP_CLAUSE_COPYIN;
794 goto add_clause;
795 case OMP_LIST_COPYPRIVATE:
796 clause_code = OMP_CLAUSE_COPYPRIVATE;
797 /* FALLTHROUGH */
798 add_clause:
799 omp_clauses
800 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
801 break;
802 default:
803 break;
807 if (clauses->if_expr)
809 tree if_var;
811 gfc_init_se (&se, NULL);
812 gfc_conv_expr (&se, clauses->if_expr);
813 gfc_add_block_to_block (block, &se.pre);
814 if_var = gfc_evaluate_now (se.expr, block);
815 gfc_add_block_to_block (block, &se.post);
817 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
818 OMP_CLAUSE_IF_EXPR (c) = if_var;
819 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
822 if (clauses->num_threads)
824 tree num_threads;
826 gfc_init_se (&se, NULL);
827 gfc_conv_expr (&se, clauses->num_threads);
828 gfc_add_block_to_block (block, &se.pre);
829 num_threads = gfc_evaluate_now (se.expr, block);
830 gfc_add_block_to_block (block, &se.post);
832 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
833 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
834 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
837 chunk_size = NULL_TREE;
838 if (clauses->chunk_size)
840 gfc_init_se (&se, NULL);
841 gfc_conv_expr (&se, clauses->chunk_size);
842 gfc_add_block_to_block (block, &se.pre);
843 chunk_size = gfc_evaluate_now (se.expr, block);
844 gfc_add_block_to_block (block, &se.post);
847 if (clauses->sched_kind != OMP_SCHED_NONE)
849 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
850 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
851 switch (clauses->sched_kind)
853 case OMP_SCHED_STATIC:
854 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
855 break;
856 case OMP_SCHED_DYNAMIC:
857 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
858 break;
859 case OMP_SCHED_GUIDED:
860 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
861 break;
862 case OMP_SCHED_RUNTIME:
863 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
864 break;
865 case OMP_SCHED_AUTO:
866 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
867 break;
868 default:
869 gcc_unreachable ();
871 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
874 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
876 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
877 switch (clauses->default_sharing)
879 case OMP_DEFAULT_NONE:
880 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
881 break;
882 case OMP_DEFAULT_SHARED:
883 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
884 break;
885 case OMP_DEFAULT_PRIVATE:
886 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
887 break;
888 case OMP_DEFAULT_FIRSTPRIVATE:
889 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
890 break;
891 default:
892 gcc_unreachable ();
894 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
897 if (clauses->nowait)
899 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
900 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
903 if (clauses->ordered)
905 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
906 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
909 if (clauses->untied)
911 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
912 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
915 if (clauses->collapse)
917 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
918 OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
919 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
922 return omp_clauses;
925 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
927 static tree
928 gfc_trans_omp_code (gfc_code *code, bool force_empty)
930 tree stmt;
932 pushlevel (0);
933 stmt = gfc_trans_code (code);
934 if (TREE_CODE (stmt) != BIND_EXPR)
936 if (!IS_EMPTY_STMT (stmt) || force_empty)
938 tree block = poplevel (1, 0, 0);
939 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
941 else
942 poplevel (0, 0, 0);
944 else
945 poplevel (0, 0, 0);
946 return stmt;
950 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
951 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
953 static tree
954 gfc_trans_omp_atomic (gfc_code *code)
956 gfc_se lse;
957 gfc_se rse;
958 gfc_expr *expr2, *e;
959 gfc_symbol *var;
960 stmtblock_t block;
961 tree lhsaddr, type, rhs, x;
962 enum tree_code op = ERROR_MARK;
963 bool var_on_left = false;
965 code = code->block->next;
966 gcc_assert (code->op == EXEC_ASSIGN);
967 gcc_assert (code->next == NULL);
968 var = code->expr1->symtree->n.sym;
970 gfc_init_se (&lse, NULL);
971 gfc_init_se (&rse, NULL);
972 gfc_start_block (&block);
974 gfc_conv_expr (&lse, code->expr1);
975 gfc_add_block_to_block (&block, &lse.pre);
976 type = TREE_TYPE (lse.expr);
977 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
979 expr2 = code->expr2;
980 if (expr2->expr_type == EXPR_FUNCTION
981 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
982 expr2 = expr2->value.function.actual->expr;
984 if (expr2->expr_type == EXPR_OP)
986 gfc_expr *e;
987 switch (expr2->value.op.op)
989 case INTRINSIC_PLUS:
990 op = PLUS_EXPR;
991 break;
992 case INTRINSIC_TIMES:
993 op = MULT_EXPR;
994 break;
995 case INTRINSIC_MINUS:
996 op = MINUS_EXPR;
997 break;
998 case INTRINSIC_DIVIDE:
999 if (expr2->ts.type == BT_INTEGER)
1000 op = TRUNC_DIV_EXPR;
1001 else
1002 op = RDIV_EXPR;
1003 break;
1004 case INTRINSIC_AND:
1005 op = TRUTH_ANDIF_EXPR;
1006 break;
1007 case INTRINSIC_OR:
1008 op = TRUTH_ORIF_EXPR;
1009 break;
1010 case INTRINSIC_EQV:
1011 op = EQ_EXPR;
1012 break;
1013 case INTRINSIC_NEQV:
1014 op = NE_EXPR;
1015 break;
1016 default:
1017 gcc_unreachable ();
1019 e = expr2->value.op.op1;
1020 if (e->expr_type == EXPR_FUNCTION
1021 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1022 e = e->value.function.actual->expr;
1023 if (e->expr_type == EXPR_VARIABLE
1024 && e->symtree != NULL
1025 && e->symtree->n.sym == var)
1027 expr2 = expr2->value.op.op2;
1028 var_on_left = true;
1030 else
1032 e = expr2->value.op.op2;
1033 if (e->expr_type == EXPR_FUNCTION
1034 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1035 e = e->value.function.actual->expr;
1036 gcc_assert (e->expr_type == EXPR_VARIABLE
1037 && e->symtree != NULL
1038 && e->symtree->n.sym == var);
1039 expr2 = expr2->value.op.op1;
1040 var_on_left = false;
1042 gfc_conv_expr (&rse, expr2);
1043 gfc_add_block_to_block (&block, &rse.pre);
1045 else
1047 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1048 switch (expr2->value.function.isym->id)
1050 case GFC_ISYM_MIN:
1051 op = MIN_EXPR;
1052 break;
1053 case GFC_ISYM_MAX:
1054 op = MAX_EXPR;
1055 break;
1056 case GFC_ISYM_IAND:
1057 op = BIT_AND_EXPR;
1058 break;
1059 case GFC_ISYM_IOR:
1060 op = BIT_IOR_EXPR;
1061 break;
1062 case GFC_ISYM_IEOR:
1063 op = BIT_XOR_EXPR;
1064 break;
1065 default:
1066 gcc_unreachable ();
1068 e = expr2->value.function.actual->expr;
1069 gcc_assert (e->expr_type == EXPR_VARIABLE
1070 && e->symtree != NULL
1071 && e->symtree->n.sym == var);
1073 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1074 gfc_add_block_to_block (&block, &rse.pre);
1075 if (expr2->value.function.actual->next->next != NULL)
1077 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1078 gfc_actual_arglist *arg;
1080 gfc_add_modify (&block, accum, rse.expr);
1081 for (arg = expr2->value.function.actual->next->next; arg;
1082 arg = arg->next)
1084 gfc_init_block (&rse.pre);
1085 gfc_conv_expr (&rse, arg->expr);
1086 gfc_add_block_to_block (&block, &rse.pre);
1087 x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
1088 gfc_add_modify (&block, accum, x);
1091 rse.expr = accum;
1094 expr2 = expr2->value.function.actual->next->expr;
1097 lhsaddr = save_expr (lhsaddr);
1098 rhs = gfc_evaluate_now (rse.expr, &block);
1099 x = convert (TREE_TYPE (rhs), build_fold_indirect_ref_loc (input_location,
1100 lhsaddr));
1102 if (var_on_left)
1103 x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
1104 else
1105 x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
1107 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1108 && TREE_CODE (type) != COMPLEX_TYPE)
1109 x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
1111 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1112 gfc_add_expr_to_block (&block, x);
1114 gfc_add_block_to_block (&block, &lse.pre);
1115 gfc_add_block_to_block (&block, &rse.pre);
1117 return gfc_finish_block (&block);
1120 static tree
1121 gfc_trans_omp_barrier (void)
1123 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1124 return build_call_expr_loc (input_location, decl, 0);
1127 static tree
1128 gfc_trans_omp_critical (gfc_code *code)
1130 tree name = NULL_TREE, stmt;
1131 if (code->ext.omp_name != NULL)
1132 name = get_identifier (code->ext.omp_name);
1133 stmt = gfc_trans_code (code->block->next);
1134 return build2 (OMP_CRITICAL, void_type_node, stmt, name);
1137 static tree
1138 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1139 gfc_omp_clauses *do_clauses, tree par_clauses)
1141 gfc_se se;
1142 tree dovar, stmt, from, to, step, type, init, cond, incr;
1143 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1144 stmtblock_t block;
1145 stmtblock_t body;
1146 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1147 int i, collapse = clauses->collapse;
1148 tree dovar_init = NULL_TREE;
1150 if (collapse <= 0)
1151 collapse = 1;
1153 code = code->block->next;
1154 gcc_assert (code->op == EXEC_DO);
1156 init = make_tree_vec (collapse);
1157 cond = make_tree_vec (collapse);
1158 incr = make_tree_vec (collapse);
1160 if (pblock == NULL)
1162 gfc_start_block (&block);
1163 pblock = &block;
1166 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1168 for (i = 0; i < collapse; i++)
1170 int simple = 0;
1171 int dovar_found = 0;
1172 tree dovar_decl;
1174 if (clauses)
1176 gfc_namelist *n;
1177 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1178 n = n->next)
1179 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1180 break;
1181 if (n != NULL)
1182 dovar_found = 1;
1183 else if (n == NULL)
1184 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1185 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1186 break;
1187 if (n != NULL)
1188 dovar_found++;
1191 /* Evaluate all the expressions in the iterator. */
1192 gfc_init_se (&se, NULL);
1193 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1194 gfc_add_block_to_block (pblock, &se.pre);
1195 dovar = se.expr;
1196 type = TREE_TYPE (dovar);
1197 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1199 gfc_init_se (&se, NULL);
1200 gfc_conv_expr_val (&se, code->ext.iterator->start);
1201 gfc_add_block_to_block (pblock, &se.pre);
1202 from = gfc_evaluate_now (se.expr, pblock);
1204 gfc_init_se (&se, NULL);
1205 gfc_conv_expr_val (&se, code->ext.iterator->end);
1206 gfc_add_block_to_block (pblock, &se.pre);
1207 to = gfc_evaluate_now (se.expr, pblock);
1209 gfc_init_se (&se, NULL);
1210 gfc_conv_expr_val (&se, code->ext.iterator->step);
1211 gfc_add_block_to_block (pblock, &se.pre);
1212 step = gfc_evaluate_now (se.expr, pblock);
1213 dovar_decl = dovar;
1215 /* Special case simple loops. */
1216 if (TREE_CODE (dovar) == VAR_DECL)
1218 if (integer_onep (step))
1219 simple = 1;
1220 else if (tree_int_cst_equal (step, integer_minus_one_node))
1221 simple = -1;
1223 else
1224 dovar_decl
1225 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1227 /* Loop body. */
1228 if (simple)
1230 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1231 TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
1232 boolean_type_node, dovar, to);
1233 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
1234 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
1235 TREE_VEC_ELT (incr, i));
1237 else
1239 /* STEP is not 1 or -1. Use:
1240 for (count = 0; count < (to + step - from) / step; count++)
1242 dovar = from + count * step;
1243 body;
1244 cycle_label:;
1245 } */
1246 tmp = fold_build2 (MINUS_EXPR, type, step, from);
1247 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
1248 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
1249 tmp = gfc_evaluate_now (tmp, pblock);
1250 count = gfc_create_var (type, "count");
1251 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1252 build_int_cst (type, 0));
1253 TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
1254 count, tmp);
1255 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
1256 build_int_cst (type, 1));
1257 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
1258 count, TREE_VEC_ELT (incr, i));
1260 /* Initialize DOVAR. */
1261 tmp = fold_build2 (MULT_EXPR, type, count, step);
1262 tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
1263 dovar_init = tree_cons (dovar, tmp, dovar_init);
1266 if (!dovar_found)
1268 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1269 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1270 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1272 else if (dovar_found == 2)
1274 tree c = NULL;
1276 tmp = NULL;
1277 if (!simple)
1279 /* If dovar is lastprivate, but different counter is used,
1280 dovar += step needs to be added to
1281 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1282 will have the value on entry of the last loop, rather
1283 than value after iterator increment. */
1284 tmp = gfc_evaluate_now (step, pblock);
1285 tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
1286 tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
1287 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1289 && OMP_CLAUSE_DECL (c) == dovar_decl)
1291 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1292 break;
1295 if (c == NULL && par_clauses != NULL)
1297 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1298 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1299 && OMP_CLAUSE_DECL (c) == dovar_decl)
1301 tree l = build_omp_clause (input_location,
1302 OMP_CLAUSE_LASTPRIVATE);
1303 OMP_CLAUSE_DECL (l) = dovar_decl;
1304 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1305 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1306 omp_clauses = l;
1307 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1308 break;
1311 gcc_assert (simple || c != NULL);
1313 if (!simple)
1315 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1316 OMP_CLAUSE_DECL (tmp) = count;
1317 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1320 if (i + 1 < collapse)
1321 code = code->block->next;
1324 if (pblock != &block)
1326 pushlevel (0);
1327 gfc_start_block (&block);
1330 gfc_start_block (&body);
1332 dovar_init = nreverse (dovar_init);
1333 while (dovar_init)
1335 gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
1336 TREE_VALUE (dovar_init));
1337 dovar_init = TREE_CHAIN (dovar_init);
1340 /* Cycle statement is implemented with a goto. Exit statement must not be
1341 present for this loop. */
1342 cycle_label = gfc_build_label_decl (NULL_TREE);
1344 /* Put these labels where they can be found later. We put the
1345 labels in a TREE_LIST node (because TREE_CHAIN is already
1346 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1347 label in TREE_VALUE (backend_decl). */
1349 code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
1351 /* Main loop body. */
1352 tmp = gfc_trans_omp_code (code->block->next, true);
1353 gfc_add_expr_to_block (&body, tmp);
1355 /* Label for cycle statements (if needed). */
1356 if (TREE_USED (cycle_label))
1358 tmp = build1_v (LABEL_EXPR, cycle_label);
1359 gfc_add_expr_to_block (&body, tmp);
1362 /* End of loop body. */
1363 stmt = make_node (OMP_FOR);
1365 TREE_TYPE (stmt) = void_type_node;
1366 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1367 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1368 OMP_FOR_INIT (stmt) = init;
1369 OMP_FOR_COND (stmt) = cond;
1370 OMP_FOR_INCR (stmt) = incr;
1371 gfc_add_expr_to_block (&block, stmt);
1373 return gfc_finish_block (&block);
1376 static tree
1377 gfc_trans_omp_flush (void)
1379 tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
1380 return build_call_expr_loc (input_location, decl, 0);
1383 static tree
1384 gfc_trans_omp_master (gfc_code *code)
1386 tree stmt = gfc_trans_code (code->block->next);
1387 if (IS_EMPTY_STMT (stmt))
1388 return stmt;
1389 return build1_v (OMP_MASTER, stmt);
1392 static tree
1393 gfc_trans_omp_ordered (gfc_code *code)
1395 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1398 static tree
1399 gfc_trans_omp_parallel (gfc_code *code)
1401 stmtblock_t block;
1402 tree stmt, omp_clauses;
1404 gfc_start_block (&block);
1405 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1406 code->loc);
1407 stmt = gfc_trans_omp_code (code->block->next, true);
1408 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1409 gfc_add_expr_to_block (&block, stmt);
1410 return gfc_finish_block (&block);
1413 static tree
1414 gfc_trans_omp_parallel_do (gfc_code *code)
1416 stmtblock_t block, *pblock = NULL;
1417 gfc_omp_clauses parallel_clauses, do_clauses;
1418 tree stmt, omp_clauses = NULL_TREE;
1420 gfc_start_block (&block);
1422 memset (&do_clauses, 0, sizeof (do_clauses));
1423 if (code->ext.omp_clauses != NULL)
1425 memcpy (&parallel_clauses, code->ext.omp_clauses,
1426 sizeof (parallel_clauses));
1427 do_clauses.sched_kind = parallel_clauses.sched_kind;
1428 do_clauses.chunk_size = parallel_clauses.chunk_size;
1429 do_clauses.ordered = parallel_clauses.ordered;
1430 do_clauses.collapse = parallel_clauses.collapse;
1431 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1432 parallel_clauses.chunk_size = NULL;
1433 parallel_clauses.ordered = false;
1434 parallel_clauses.collapse = 0;
1435 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1436 code->loc);
1438 do_clauses.nowait = true;
1439 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1440 pblock = &block;
1441 else
1442 pushlevel (0);
1443 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1444 if (TREE_CODE (stmt) != BIND_EXPR)
1445 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1446 else
1447 poplevel (0, 0, 0);
1448 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1449 OMP_PARALLEL_COMBINED (stmt) = 1;
1450 gfc_add_expr_to_block (&block, stmt);
1451 return gfc_finish_block (&block);
1454 static tree
1455 gfc_trans_omp_parallel_sections (gfc_code *code)
1457 stmtblock_t block;
1458 gfc_omp_clauses section_clauses;
1459 tree stmt, omp_clauses;
1461 memset (&section_clauses, 0, sizeof (section_clauses));
1462 section_clauses.nowait = true;
1464 gfc_start_block (&block);
1465 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1466 code->loc);
1467 pushlevel (0);
1468 stmt = gfc_trans_omp_sections (code, &section_clauses);
1469 if (TREE_CODE (stmt) != BIND_EXPR)
1470 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1471 else
1472 poplevel (0, 0, 0);
1473 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1474 OMP_PARALLEL_COMBINED (stmt) = 1;
1475 gfc_add_expr_to_block (&block, stmt);
1476 return gfc_finish_block (&block);
1479 static tree
1480 gfc_trans_omp_parallel_workshare (gfc_code *code)
1482 stmtblock_t block;
1483 gfc_omp_clauses workshare_clauses;
1484 tree stmt, omp_clauses;
1486 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1487 workshare_clauses.nowait = true;
1489 gfc_start_block (&block);
1490 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1491 code->loc);
1492 pushlevel (0);
1493 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1494 if (TREE_CODE (stmt) != BIND_EXPR)
1495 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1496 else
1497 poplevel (0, 0, 0);
1498 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1499 OMP_PARALLEL_COMBINED (stmt) = 1;
1500 gfc_add_expr_to_block (&block, stmt);
1501 return gfc_finish_block (&block);
1504 static tree
1505 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1507 stmtblock_t block, body;
1508 tree omp_clauses, stmt;
1509 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1511 gfc_start_block (&block);
1513 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1515 gfc_init_block (&body);
1516 for (code = code->block; code; code = code->block)
1518 /* Last section is special because of lastprivate, so even if it
1519 is empty, chain it in. */
1520 stmt = gfc_trans_omp_code (code->next,
1521 has_lastprivate && code->block == NULL);
1522 if (! IS_EMPTY_STMT (stmt))
1524 stmt = build1_v (OMP_SECTION, stmt);
1525 gfc_add_expr_to_block (&body, stmt);
1528 stmt = gfc_finish_block (&body);
1530 stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
1531 gfc_add_expr_to_block (&block, stmt);
1533 return gfc_finish_block (&block);
1536 static tree
1537 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1539 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1540 tree stmt = gfc_trans_omp_code (code->block->next, true);
1541 stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
1542 return stmt;
1545 static tree
1546 gfc_trans_omp_task (gfc_code *code)
1548 stmtblock_t block;
1549 tree stmt, omp_clauses;
1551 gfc_start_block (&block);
1552 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1553 code->loc);
1554 stmt = gfc_trans_omp_code (code->block->next, true);
1555 stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
1556 gfc_add_expr_to_block (&block, stmt);
1557 return gfc_finish_block (&block);
1560 static tree
1561 gfc_trans_omp_taskwait (void)
1563 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1564 return build_call_expr_loc (input_location, decl, 0);
1567 static tree
1568 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1570 tree res, tmp, stmt;
1571 stmtblock_t block, *pblock = NULL;
1572 stmtblock_t singleblock;
1573 int saved_ompws_flags;
1574 bool singleblock_in_progress = false;
1575 /* True if previous gfc_code in workshare construct is not workshared. */
1576 bool prev_singleunit;
1578 code = code->block->next;
1580 pushlevel (0);
1582 if (!code)
1583 return build_empty_stmt (input_location);
1585 gfc_start_block (&block);
1586 pblock = &block;
1588 ompws_flags = OMPWS_WORKSHARE_FLAG;
1589 prev_singleunit = false;
1591 /* Translate statements one by one to trees until we reach
1592 the end of the workshare construct. Adjacent gfc_codes that
1593 are a single unit of work are clustered and encapsulated in a
1594 single OMP_SINGLE construct. */
1595 for (; code; code = code->next)
1597 if (code->here != 0)
1599 res = gfc_trans_label_here (code);
1600 gfc_add_expr_to_block (pblock, res);
1603 /* No dependence analysis, use for clauses with wait.
1604 If this is the last gfc_code, use default omp_clauses. */
1605 if (code->next == NULL && clauses->nowait)
1606 ompws_flags |= OMPWS_NOWAIT;
1608 /* By default, every gfc_code is a single unit of work. */
1609 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1610 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1612 switch (code->op)
1614 case EXEC_NOP:
1615 res = NULL_TREE;
1616 break;
1618 case EXEC_ASSIGN:
1619 res = gfc_trans_assign (code);
1620 break;
1622 case EXEC_POINTER_ASSIGN:
1623 res = gfc_trans_pointer_assign (code);
1624 break;
1626 case EXEC_INIT_ASSIGN:
1627 res = gfc_trans_init_assign (code);
1628 break;
1630 case EXEC_FORALL:
1631 res = gfc_trans_forall (code);
1632 break;
1634 case EXEC_WHERE:
1635 res = gfc_trans_where (code);
1636 break;
1638 case EXEC_OMP_ATOMIC:
1639 res = gfc_trans_omp_directive (code);
1640 break;
1642 case EXEC_OMP_PARALLEL:
1643 case EXEC_OMP_PARALLEL_DO:
1644 case EXEC_OMP_PARALLEL_SECTIONS:
1645 case EXEC_OMP_PARALLEL_WORKSHARE:
1646 case EXEC_OMP_CRITICAL:
1647 saved_ompws_flags = ompws_flags;
1648 ompws_flags = 0;
1649 res = gfc_trans_omp_directive (code);
1650 ompws_flags = saved_ompws_flags;
1651 break;
1653 default:
1654 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1657 gfc_set_backend_locus (&code->loc);
1659 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1661 if (prev_singleunit)
1663 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1664 /* Add current gfc_code to single block. */
1665 gfc_add_expr_to_block (&singleblock, res);
1666 else
1668 /* Finish single block and add it to pblock. */
1669 tmp = gfc_finish_block (&singleblock);
1670 tmp = build2 (OMP_SINGLE, void_type_node, tmp, NULL_TREE);
1671 gfc_add_expr_to_block (pblock, tmp);
1672 /* Add current gfc_code to pblock. */
1673 gfc_add_expr_to_block (pblock, res);
1674 singleblock_in_progress = false;
1677 else
1679 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1681 /* Start single block. */
1682 gfc_init_block (&singleblock);
1683 gfc_add_expr_to_block (&singleblock, res);
1684 singleblock_in_progress = true;
1686 else
1687 /* Add the new statement to the block. */
1688 gfc_add_expr_to_block (pblock, res);
1690 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1694 /* Finish remaining SINGLE block, if we were in the middle of one. */
1695 if (singleblock_in_progress)
1697 /* Finish single block and add it to pblock. */
1698 tmp = gfc_finish_block (&singleblock);
1699 tmp = build2 (OMP_SINGLE, void_type_node, tmp,
1700 clauses->nowait
1701 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1702 : NULL_TREE);
1703 gfc_add_expr_to_block (pblock, tmp);
1706 stmt = gfc_finish_block (pblock);
1707 if (TREE_CODE (stmt) != BIND_EXPR)
1709 if (!IS_EMPTY_STMT (stmt))
1711 tree bindblock = poplevel (1, 0, 0);
1712 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1714 else
1715 poplevel (0, 0, 0);
1717 else
1718 poplevel (0, 0, 0);
1720 ompws_flags = 0;
1721 return stmt;
1724 tree
1725 gfc_trans_omp_directive (gfc_code *code)
1727 switch (code->op)
1729 case EXEC_OMP_ATOMIC:
1730 return gfc_trans_omp_atomic (code);
1731 case EXEC_OMP_BARRIER:
1732 return gfc_trans_omp_barrier ();
1733 case EXEC_OMP_CRITICAL:
1734 return gfc_trans_omp_critical (code);
1735 case EXEC_OMP_DO:
1736 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1737 case EXEC_OMP_FLUSH:
1738 return gfc_trans_omp_flush ();
1739 case EXEC_OMP_MASTER:
1740 return gfc_trans_omp_master (code);
1741 case EXEC_OMP_ORDERED:
1742 return gfc_trans_omp_ordered (code);
1743 case EXEC_OMP_PARALLEL:
1744 return gfc_trans_omp_parallel (code);
1745 case EXEC_OMP_PARALLEL_DO:
1746 return gfc_trans_omp_parallel_do (code);
1747 case EXEC_OMP_PARALLEL_SECTIONS:
1748 return gfc_trans_omp_parallel_sections (code);
1749 case EXEC_OMP_PARALLEL_WORKSHARE:
1750 return gfc_trans_omp_parallel_workshare (code);
1751 case EXEC_OMP_SECTIONS:
1752 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1753 case EXEC_OMP_SINGLE:
1754 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1755 case EXEC_OMP_TASK:
1756 return gfc_trans_omp_task (code);
1757 case EXEC_OMP_TASKWAIT:
1758 return gfc_trans_omp_taskwait ();
1759 case EXEC_OMP_WORKSHARE:
1760 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1761 default:
1762 gcc_unreachable ();