2014-01-17 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / fortran / trans-openmp.c
blob41020a836a75912b5390d4e1de9e2c535c1982cd
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
29 #include "diagnostic-core.h" /* For internal_error. */
30 #include "gfortran.h"
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
37 #include "omp-low.h"
39 int ompws_flags;
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
44 bool
45 gfc_omp_privatize_by_reference (const_tree decl)
47 tree type = TREE_TYPE (decl);
49 if (TREE_CODE (type) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
51 return true;
53 if (TREE_CODE (type) == POINTER_TYPE)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
57 set are supposed to be privatized by reference. */
58 if (GFC_POINTER_TYPE_P (type))
59 return false;
61 if (!DECL_ARTIFICIAL (decl)
62 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
63 return true;
65 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
66 by the frontend. */
67 if (DECL_LANG_SPECIFIC (decl)
68 && GFC_DECL_SAVED_DESCRIPTOR (decl))
69 return true;
72 return false;
75 /* True if OpenMP sharing attribute of DECL is predetermined. */
77 enum omp_clause_default_kind
78 gfc_omp_predetermined_sharing (tree decl)
80 if (DECL_ARTIFICIAL (decl)
81 && ! GFC_DECL_RESULT (decl)
82 && ! (DECL_LANG_SPECIFIC (decl)
83 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
84 return OMP_CLAUSE_DEFAULT_SHARED;
86 /* Cray pointees shouldn't be listed in any clauses and should be
87 gimplified to dereference of the corresponding Cray pointer.
88 Make them all private, so that they are emitted in the debug
89 information. */
90 if (GFC_DECL_CRAY_POINTEE (decl))
91 return OMP_CLAUSE_DEFAULT_PRIVATE;
93 /* Assumed-size arrays are predetermined shared. */
94 if (TREE_CODE (decl) == PARM_DECL
95 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
96 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
97 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
98 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
99 == NULL)
100 return OMP_CLAUSE_DEFAULT_SHARED;
102 /* Dummy procedures aren't considered variables by OpenMP, thus are
103 disallowed in OpenMP clauses. They are represented as PARM_DECLs
104 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
105 to avoid complaining about their uses with default(none). */
106 if (TREE_CODE (decl) == PARM_DECL
107 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
108 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
109 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
111 /* COMMON and EQUIVALENCE decls are shared. They
112 are only referenced through DECL_VALUE_EXPR of the variables
113 contained in them. If those are privatized, they will not be
114 gimplified to the COMMON or EQUIVALENCE decls. */
115 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
116 return OMP_CLAUSE_DEFAULT_SHARED;
118 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
119 return OMP_CLAUSE_DEFAULT_SHARED;
121 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
124 /* Return decl that should be used when reporting DEFAULT(NONE)
125 diagnostics. */
127 tree
128 gfc_omp_report_decl (tree decl)
130 if (DECL_ARTIFICIAL (decl)
131 && DECL_LANG_SPECIFIC (decl)
132 && GFC_DECL_SAVED_DESCRIPTOR (decl))
133 return GFC_DECL_SAVED_DESCRIPTOR (decl);
135 return decl;
138 /* Return true if DECL in private clause needs
139 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
140 bool
141 gfc_omp_private_outer_ref (tree decl)
143 tree type = TREE_TYPE (decl);
145 if (GFC_DESCRIPTOR_TYPE_P (type)
146 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
147 return true;
149 return false;
152 /* Return code to initialize DECL with its default constructor, or
153 NULL if there's nothing to do. */
155 tree
156 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
158 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
159 stmtblock_t block, cond_block;
161 if (! GFC_DESCRIPTOR_TYPE_P (type)
162 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
163 return NULL;
165 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION)
166 return NULL;
168 gcc_assert (outer != NULL);
169 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
170 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
172 /* Allocatable arrays in PRIVATE clauses need to be set to
173 "not currently allocated" allocation status if outer
174 array is "not currently allocated", otherwise should be allocated. */
175 gfc_start_block (&block);
177 gfc_init_block (&cond_block);
179 gfc_add_modify (&cond_block, decl, outer);
180 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
181 size = gfc_conv_descriptor_ubound_get (decl, rank);
182 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
183 size, gfc_conv_descriptor_lbound_get (decl, rank));
184 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
185 size, gfc_index_one_node);
186 if (GFC_TYPE_ARRAY_RANK (type) > 1)
187 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
188 size, gfc_conv_descriptor_stride_get (decl, rank));
189 esize = fold_convert (gfc_array_index_type,
190 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
191 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
192 size, esize);
193 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
195 ptr = gfc_create_var (pvoid_type_node, NULL);
196 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
197 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
199 then_b = gfc_finish_block (&cond_block);
201 gfc_init_block (&cond_block);
202 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
203 else_b = gfc_finish_block (&cond_block);
205 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
206 fold_convert (pvoid_type_node,
207 gfc_conv_descriptor_data_get (outer)),
208 null_pointer_node);
209 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
210 void_type_node, cond, then_b, else_b));
212 return gfc_finish_block (&block);
215 /* Build and return code for a copy constructor from SRC to DEST. */
217 tree
218 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
220 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
221 tree cond, then_b, else_b;
222 stmtblock_t block, cond_block;
224 if (! GFC_DESCRIPTOR_TYPE_P (type)
225 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
226 return build2_v (MODIFY_EXPR, dest, src);
228 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
230 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
231 and copied from SRC. */
232 gfc_start_block (&block);
234 gfc_init_block (&cond_block);
236 gfc_add_modify (&cond_block, dest, src);
237 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
238 size = gfc_conv_descriptor_ubound_get (dest, rank);
239 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
240 size, gfc_conv_descriptor_lbound_get (dest, rank));
241 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
242 size, gfc_index_one_node);
243 if (GFC_TYPE_ARRAY_RANK (type) > 1)
244 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
245 size, gfc_conv_descriptor_stride_get (dest, rank));
246 esize = fold_convert (gfc_array_index_type,
247 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
248 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
249 size, esize);
250 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
252 ptr = gfc_create_var (pvoid_type_node, NULL);
253 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
254 gfc_conv_descriptor_data_set (&cond_block, dest, ptr);
256 call = build_call_expr_loc (input_location,
257 builtin_decl_explicit (BUILT_IN_MEMCPY),
258 3, ptr,
259 fold_convert (pvoid_type_node,
260 gfc_conv_descriptor_data_get (src)),
261 size);
262 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
263 then_b = gfc_finish_block (&cond_block);
265 gfc_init_block (&cond_block);
266 gfc_conv_descriptor_data_set (&cond_block, dest, null_pointer_node);
267 else_b = gfc_finish_block (&cond_block);
269 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
270 fold_convert (pvoid_type_node,
271 gfc_conv_descriptor_data_get (src)),
272 null_pointer_node);
273 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
274 void_type_node, cond, then_b, else_b));
276 return gfc_finish_block (&block);
279 /* Similarly, except use an assignment operator instead. */
281 tree
282 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
284 tree type = TREE_TYPE (dest), rank, size, esize, call;
285 stmtblock_t block;
287 if (! GFC_DESCRIPTOR_TYPE_P (type)
288 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
289 return build2_v (MODIFY_EXPR, dest, src);
291 /* Handle copying allocatable arrays. */
292 gfc_start_block (&block);
294 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
295 size = gfc_conv_descriptor_ubound_get (dest, rank);
296 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
297 size, gfc_conv_descriptor_lbound_get (dest, rank));
298 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
299 size, gfc_index_one_node);
300 if (GFC_TYPE_ARRAY_RANK (type) > 1)
301 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
302 size, gfc_conv_descriptor_stride_get (dest, rank));
303 esize = fold_convert (gfc_array_index_type,
304 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
305 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
306 size, esize);
307 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
308 call = build_call_expr_loc (input_location,
309 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
310 fold_convert (pvoid_type_node,
311 gfc_conv_descriptor_data_get (dest)),
312 fold_convert (pvoid_type_node,
313 gfc_conv_descriptor_data_get (src)),
314 size);
315 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
317 return gfc_finish_block (&block);
320 /* Build and return code destructing DECL. Return NULL if nothing
321 to be done. */
323 tree
324 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
326 tree type = TREE_TYPE (decl);
328 if (! GFC_DESCRIPTOR_TYPE_P (type)
329 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
330 return NULL;
332 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION)
333 return NULL;
335 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
336 to be deallocated if they were allocated. */
337 return gfc_trans_dealloc_allocated (decl, false, NULL);
341 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
342 disregarded in OpenMP construct, because it is going to be
343 remapped during OpenMP lowering. SHARED is true if DECL
344 is going to be shared, false if it is going to be privatized. */
346 bool
347 gfc_omp_disregard_value_expr (tree decl, bool shared)
349 if (GFC_DECL_COMMON_OR_EQUIV (decl)
350 && DECL_HAS_VALUE_EXPR_P (decl))
352 tree value = DECL_VALUE_EXPR (decl);
354 if (TREE_CODE (value) == COMPONENT_REF
355 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
356 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
358 /* If variable in COMMON or EQUIVALENCE is privatized, return
359 true, as just that variable is supposed to be privatized,
360 not the whole COMMON or whole EQUIVALENCE.
361 For shared variables in COMMON or EQUIVALENCE, let them be
362 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
363 from the same COMMON or EQUIVALENCE just one sharing of the
364 whole COMMON or EQUIVALENCE is enough. */
365 return ! shared;
369 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
370 return ! shared;
372 return false;
375 /* Return true if DECL that is shared iff SHARED is true should
376 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
377 flag set. */
379 bool
380 gfc_omp_private_debug_clause (tree decl, bool shared)
382 if (GFC_DECL_CRAY_POINTEE (decl))
383 return true;
385 if (GFC_DECL_COMMON_OR_EQUIV (decl)
386 && DECL_HAS_VALUE_EXPR_P (decl))
388 tree value = DECL_VALUE_EXPR (decl);
390 if (TREE_CODE (value) == COMPONENT_REF
391 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
392 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
393 return shared;
396 return false;
399 /* Register language specific type size variables as potentially OpenMP
400 firstprivate variables. */
402 void
403 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
405 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
407 int r;
409 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
410 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
412 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
413 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
414 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
416 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
417 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
422 static inline tree
423 gfc_trans_add_clause (tree node, tree tail)
425 OMP_CLAUSE_CHAIN (node) = tail;
426 return node;
429 static tree
430 gfc_trans_omp_variable (gfc_symbol *sym)
432 tree t = gfc_get_symbol_decl (sym);
433 tree parent_decl;
434 int parent_flag;
435 bool return_value;
436 bool alternate_entry;
437 bool entry_master;
439 return_value = sym->attr.function && sym->result == sym;
440 alternate_entry = sym->attr.function && sym->attr.entry
441 && sym->result == sym;
442 entry_master = sym->attr.result
443 && sym->ns->proc_name->attr.entry_master
444 && !gfc_return_by_reference (sym->ns->proc_name);
445 parent_decl = DECL_CONTEXT (current_function_decl);
447 if ((t == parent_decl && return_value)
448 || (sym->ns && sym->ns->proc_name
449 && sym->ns->proc_name->backend_decl == parent_decl
450 && (alternate_entry || entry_master)))
451 parent_flag = 1;
452 else
453 parent_flag = 0;
455 /* Special case for assigning the return value of a function.
456 Self recursive functions must have an explicit return value. */
457 if (return_value && (t == current_function_decl || parent_flag))
458 t = gfc_get_fake_result_decl (sym, parent_flag);
460 /* Similarly for alternate entry points. */
461 else if (alternate_entry
462 && (sym->ns->proc_name->backend_decl == current_function_decl
463 || parent_flag))
465 gfc_entry_list *el = NULL;
467 for (el = sym->ns->entries; el; el = el->next)
468 if (sym == el->sym)
470 t = gfc_get_fake_result_decl (sym, parent_flag);
471 break;
475 else if (entry_master
476 && (sym->ns->proc_name->backend_decl == current_function_decl
477 || parent_flag))
478 t = gfc_get_fake_result_decl (sym, parent_flag);
480 return t;
483 static tree
484 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
485 tree list)
487 for (; namelist != NULL; namelist = namelist->next)
488 if (namelist->sym->attr.referenced)
490 tree t = gfc_trans_omp_variable (namelist->sym);
491 if (t != error_mark_node)
493 tree node = build_omp_clause (input_location, code);
494 OMP_CLAUSE_DECL (node) = t;
495 list = gfc_trans_add_clause (node, list);
498 return list;
501 static void
502 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
504 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
505 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
506 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
507 gfc_expr *e1, *e2, *e3, *e4;
508 gfc_ref *ref;
509 tree decl, backend_decl, stmt, type, outer_decl;
510 locus old_loc = gfc_current_locus;
511 const char *iname;
512 bool t;
514 decl = OMP_CLAUSE_DECL (c);
515 gfc_current_locus = where;
516 type = TREE_TYPE (decl);
517 outer_decl = create_tmp_var_raw (type, NULL);
518 if (TREE_CODE (decl) == PARM_DECL
519 && TREE_CODE (type) == REFERENCE_TYPE
520 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
521 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
523 decl = build_fold_indirect_ref (decl);
524 type = TREE_TYPE (type);
527 /* Create a fake symbol for init value. */
528 memset (&init_val_sym, 0, sizeof (init_val_sym));
529 init_val_sym.ns = sym->ns;
530 init_val_sym.name = sym->name;
531 init_val_sym.ts = sym->ts;
532 init_val_sym.attr.referenced = 1;
533 init_val_sym.declared_at = where;
534 init_val_sym.attr.flavor = FL_VARIABLE;
535 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
536 init_val_sym.backend_decl = backend_decl;
538 /* Create a fake symbol for the outer array reference. */
539 outer_sym = *sym;
540 outer_sym.as = gfc_copy_array_spec (sym->as);
541 outer_sym.attr.dummy = 0;
542 outer_sym.attr.result = 0;
543 outer_sym.attr.flavor = FL_VARIABLE;
544 outer_sym.backend_decl = outer_decl;
545 if (decl != OMP_CLAUSE_DECL (c))
546 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
548 /* Create fake symtrees for it. */
549 symtree1 = gfc_new_symtree (&root1, sym->name);
550 symtree1->n.sym = sym;
551 gcc_assert (symtree1 == root1);
553 symtree2 = gfc_new_symtree (&root2, sym->name);
554 symtree2->n.sym = &init_val_sym;
555 gcc_assert (symtree2 == root2);
557 symtree3 = gfc_new_symtree (&root3, sym->name);
558 symtree3->n.sym = &outer_sym;
559 gcc_assert (symtree3 == root3);
561 /* Create expressions. */
562 e1 = gfc_get_expr ();
563 e1->expr_type = EXPR_VARIABLE;
564 e1->where = where;
565 e1->symtree = symtree1;
566 e1->ts = sym->ts;
567 e1->ref = ref = gfc_get_ref ();
568 ref->type = REF_ARRAY;
569 ref->u.ar.where = where;
570 ref->u.ar.as = sym->as;
571 ref->u.ar.type = AR_FULL;
572 ref->u.ar.dimen = 0;
573 t = gfc_resolve_expr (e1);
574 gcc_assert (t);
576 e2 = gfc_get_expr ();
577 e2->expr_type = EXPR_VARIABLE;
578 e2->where = where;
579 e2->symtree = symtree2;
580 e2->ts = sym->ts;
581 t = gfc_resolve_expr (e2);
582 gcc_assert (t);
584 e3 = gfc_copy_expr (e1);
585 e3->symtree = symtree3;
586 t = gfc_resolve_expr (e3);
587 gcc_assert (t);
589 iname = NULL;
590 switch (OMP_CLAUSE_REDUCTION_CODE (c))
592 case PLUS_EXPR:
593 case MINUS_EXPR:
594 e4 = gfc_add (e3, e1);
595 break;
596 case MULT_EXPR:
597 e4 = gfc_multiply (e3, e1);
598 break;
599 case TRUTH_ANDIF_EXPR:
600 e4 = gfc_and (e3, e1);
601 break;
602 case TRUTH_ORIF_EXPR:
603 e4 = gfc_or (e3, e1);
604 break;
605 case EQ_EXPR:
606 e4 = gfc_eqv (e3, e1);
607 break;
608 case NE_EXPR:
609 e4 = gfc_neqv (e3, e1);
610 break;
611 case MIN_EXPR:
612 iname = "min";
613 break;
614 case MAX_EXPR:
615 iname = "max";
616 break;
617 case BIT_AND_EXPR:
618 iname = "iand";
619 break;
620 case BIT_IOR_EXPR:
621 iname = "ior";
622 break;
623 case BIT_XOR_EXPR:
624 iname = "ieor";
625 break;
626 default:
627 gcc_unreachable ();
629 if (iname != NULL)
631 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
632 intrinsic_sym.ns = sym->ns;
633 intrinsic_sym.name = iname;
634 intrinsic_sym.ts = sym->ts;
635 intrinsic_sym.attr.referenced = 1;
636 intrinsic_sym.attr.intrinsic = 1;
637 intrinsic_sym.attr.function = 1;
638 intrinsic_sym.result = &intrinsic_sym;
639 intrinsic_sym.declared_at = where;
641 symtree4 = gfc_new_symtree (&root4, iname);
642 symtree4->n.sym = &intrinsic_sym;
643 gcc_assert (symtree4 == root4);
645 e4 = gfc_get_expr ();
646 e4->expr_type = EXPR_FUNCTION;
647 e4->where = where;
648 e4->symtree = symtree4;
649 e4->value.function.isym = gfc_find_function (iname);
650 e4->value.function.actual = gfc_get_actual_arglist ();
651 e4->value.function.actual->expr = e3;
652 e4->value.function.actual->next = gfc_get_actual_arglist ();
653 e4->value.function.actual->next->expr = e1;
655 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
656 e1 = gfc_copy_expr (e1);
657 e3 = gfc_copy_expr (e3);
658 t = gfc_resolve_expr (e4);
659 gcc_assert (t);
661 /* Create the init statement list. */
662 pushlevel ();
663 if (GFC_DESCRIPTOR_TYPE_P (type)
664 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
666 /* If decl is an allocatable array, it needs to be allocated
667 with the same bounds as the outer var. */
668 tree rank, size, esize, ptr;
669 stmtblock_t block;
671 gfc_start_block (&block);
673 gfc_add_modify (&block, decl, outer_sym.backend_decl);
674 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
675 size = gfc_conv_descriptor_ubound_get (decl, rank);
676 size = fold_build2_loc (input_location, MINUS_EXPR,
677 gfc_array_index_type, size,
678 gfc_conv_descriptor_lbound_get (decl, rank));
679 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
680 size, gfc_index_one_node);
681 if (GFC_TYPE_ARRAY_RANK (type) > 1)
682 size = fold_build2_loc (input_location, MULT_EXPR,
683 gfc_array_index_type, size,
684 gfc_conv_descriptor_stride_get (decl, rank));
685 esize = fold_convert (gfc_array_index_type,
686 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
687 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
688 size, esize);
689 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
691 ptr = gfc_create_var (pvoid_type_node, NULL);
692 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
693 gfc_conv_descriptor_data_set (&block, decl, ptr);
695 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
696 false));
697 stmt = gfc_finish_block (&block);
699 else
700 stmt = gfc_trans_assignment (e1, e2, false, false);
701 if (TREE_CODE (stmt) != BIND_EXPR)
702 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
703 else
704 poplevel (0, 0);
705 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
707 /* Create the merge statement list. */
708 pushlevel ();
709 if (GFC_DESCRIPTOR_TYPE_P (type)
710 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
712 /* If decl is an allocatable array, it needs to be deallocated
713 afterwards. */
714 stmtblock_t block;
716 gfc_start_block (&block);
717 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
718 true));
719 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl, false,
720 NULL));
721 stmt = gfc_finish_block (&block);
723 else
724 stmt = gfc_trans_assignment (e3, e4, false, true);
725 if (TREE_CODE (stmt) != BIND_EXPR)
726 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
727 else
728 poplevel (0, 0);
729 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
731 /* And stick the placeholder VAR_DECL into the clause as well. */
732 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
734 gfc_current_locus = old_loc;
736 gfc_free_expr (e1);
737 gfc_free_expr (e2);
738 gfc_free_expr (e3);
739 gfc_free_expr (e4);
740 free (symtree1);
741 free (symtree2);
742 free (symtree3);
743 free (symtree4);
744 gfc_free_array_spec (outer_sym.as);
747 static tree
748 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
749 enum tree_code reduction_code, locus where)
751 for (; namelist != NULL; namelist = namelist->next)
752 if (namelist->sym->attr.referenced)
754 tree t = gfc_trans_omp_variable (namelist->sym);
755 if (t != error_mark_node)
757 tree node = build_omp_clause (where.lb->location,
758 OMP_CLAUSE_REDUCTION);
759 OMP_CLAUSE_DECL (node) = t;
760 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
761 if (namelist->sym->attr.dimension)
762 gfc_trans_omp_array_reduction (node, namelist->sym, where);
763 list = gfc_trans_add_clause (node, list);
766 return list;
769 static tree
770 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
771 locus where)
773 tree omp_clauses = NULL_TREE, chunk_size, c;
774 int list;
775 enum omp_clause_code clause_code;
776 gfc_se se;
778 if (clauses == NULL)
779 return NULL_TREE;
781 for (list = 0; list < OMP_LIST_NUM; list++)
783 gfc_namelist *n = clauses->lists[list];
785 if (n == NULL)
786 continue;
787 if (list >= OMP_LIST_REDUCTION_FIRST
788 && list <= OMP_LIST_REDUCTION_LAST)
790 enum tree_code reduction_code;
791 switch (list)
793 case OMP_LIST_PLUS:
794 reduction_code = PLUS_EXPR;
795 break;
796 case OMP_LIST_MULT:
797 reduction_code = MULT_EXPR;
798 break;
799 case OMP_LIST_SUB:
800 reduction_code = MINUS_EXPR;
801 break;
802 case OMP_LIST_AND:
803 reduction_code = TRUTH_ANDIF_EXPR;
804 break;
805 case OMP_LIST_OR:
806 reduction_code = TRUTH_ORIF_EXPR;
807 break;
808 case OMP_LIST_EQV:
809 reduction_code = EQ_EXPR;
810 break;
811 case OMP_LIST_NEQV:
812 reduction_code = NE_EXPR;
813 break;
814 case OMP_LIST_MAX:
815 reduction_code = MAX_EXPR;
816 break;
817 case OMP_LIST_MIN:
818 reduction_code = MIN_EXPR;
819 break;
820 case OMP_LIST_IAND:
821 reduction_code = BIT_AND_EXPR;
822 break;
823 case OMP_LIST_IOR:
824 reduction_code = BIT_IOR_EXPR;
825 break;
826 case OMP_LIST_IEOR:
827 reduction_code = BIT_XOR_EXPR;
828 break;
829 default:
830 gcc_unreachable ();
832 omp_clauses
833 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
834 where);
835 continue;
837 switch (list)
839 case OMP_LIST_PRIVATE:
840 clause_code = OMP_CLAUSE_PRIVATE;
841 goto add_clause;
842 case OMP_LIST_SHARED:
843 clause_code = OMP_CLAUSE_SHARED;
844 goto add_clause;
845 case OMP_LIST_FIRSTPRIVATE:
846 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
847 goto add_clause;
848 case OMP_LIST_LASTPRIVATE:
849 clause_code = OMP_CLAUSE_LASTPRIVATE;
850 goto add_clause;
851 case OMP_LIST_COPYIN:
852 clause_code = OMP_CLAUSE_COPYIN;
853 goto add_clause;
854 case OMP_LIST_COPYPRIVATE:
855 clause_code = OMP_CLAUSE_COPYPRIVATE;
856 /* FALLTHROUGH */
857 add_clause:
858 omp_clauses
859 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
860 break;
861 default:
862 break;
866 if (clauses->if_expr)
868 tree if_var;
870 gfc_init_se (&se, NULL);
871 gfc_conv_expr (&se, clauses->if_expr);
872 gfc_add_block_to_block (block, &se.pre);
873 if_var = gfc_evaluate_now (se.expr, block);
874 gfc_add_block_to_block (block, &se.post);
876 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
877 OMP_CLAUSE_IF_EXPR (c) = if_var;
878 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
881 if (clauses->final_expr)
883 tree final_var;
885 gfc_init_se (&se, NULL);
886 gfc_conv_expr (&se, clauses->final_expr);
887 gfc_add_block_to_block (block, &se.pre);
888 final_var = gfc_evaluate_now (se.expr, block);
889 gfc_add_block_to_block (block, &se.post);
891 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
892 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
893 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
896 if (clauses->num_threads)
898 tree num_threads;
900 gfc_init_se (&se, NULL);
901 gfc_conv_expr (&se, clauses->num_threads);
902 gfc_add_block_to_block (block, &se.pre);
903 num_threads = gfc_evaluate_now (se.expr, block);
904 gfc_add_block_to_block (block, &se.post);
906 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
907 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
908 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
911 chunk_size = NULL_TREE;
912 if (clauses->chunk_size)
914 gfc_init_se (&se, NULL);
915 gfc_conv_expr (&se, clauses->chunk_size);
916 gfc_add_block_to_block (block, &se.pre);
917 chunk_size = gfc_evaluate_now (se.expr, block);
918 gfc_add_block_to_block (block, &se.post);
921 if (clauses->sched_kind != OMP_SCHED_NONE)
923 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
924 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
925 switch (clauses->sched_kind)
927 case OMP_SCHED_STATIC:
928 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
929 break;
930 case OMP_SCHED_DYNAMIC:
931 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
932 break;
933 case OMP_SCHED_GUIDED:
934 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
935 break;
936 case OMP_SCHED_RUNTIME:
937 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
938 break;
939 case OMP_SCHED_AUTO:
940 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
941 break;
942 default:
943 gcc_unreachable ();
945 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
948 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
950 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
951 switch (clauses->default_sharing)
953 case OMP_DEFAULT_NONE:
954 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
955 break;
956 case OMP_DEFAULT_SHARED:
957 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
958 break;
959 case OMP_DEFAULT_PRIVATE:
960 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
961 break;
962 case OMP_DEFAULT_FIRSTPRIVATE:
963 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
964 break;
965 default:
966 gcc_unreachable ();
968 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
971 if (clauses->nowait)
973 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
974 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
977 if (clauses->ordered)
979 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
980 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
983 if (clauses->untied)
985 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
986 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
989 if (clauses->mergeable)
991 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
992 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
995 if (clauses->collapse)
997 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
998 OMP_CLAUSE_COLLAPSE_EXPR (c)
999 = build_int_cst (integer_type_node, clauses->collapse);
1000 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1003 return omp_clauses;
1006 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
1008 static tree
1009 gfc_trans_omp_code (gfc_code *code, bool force_empty)
1011 tree stmt;
1013 pushlevel ();
1014 stmt = gfc_trans_code (code);
1015 if (TREE_CODE (stmt) != BIND_EXPR)
1017 if (!IS_EMPTY_STMT (stmt) || force_empty)
1019 tree block = poplevel (1, 0);
1020 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
1022 else
1023 poplevel (0, 0);
1025 else
1026 poplevel (0, 0);
1027 return stmt;
1031 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
1032 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
1034 static tree
1035 gfc_trans_omp_atomic (gfc_code *code)
1037 gfc_code *atomic_code = code;
1038 gfc_se lse;
1039 gfc_se rse;
1040 gfc_se vse;
1041 gfc_expr *expr2, *e;
1042 gfc_symbol *var;
1043 stmtblock_t block;
1044 tree lhsaddr, type, rhs, x;
1045 enum tree_code op = ERROR_MARK;
1046 enum tree_code aop = OMP_ATOMIC;
1047 bool var_on_left = false;
1049 code = code->block->next;
1050 gcc_assert (code->op == EXEC_ASSIGN);
1051 var = code->expr1->symtree->n.sym;
1053 gfc_init_se (&lse, NULL);
1054 gfc_init_se (&rse, NULL);
1055 gfc_init_se (&vse, NULL);
1056 gfc_start_block (&block);
1058 expr2 = code->expr2;
1059 if (expr2->expr_type == EXPR_FUNCTION
1060 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1061 expr2 = expr2->value.function.actual->expr;
1063 switch (atomic_code->ext.omp_atomic)
1065 case GFC_OMP_ATOMIC_READ:
1066 gfc_conv_expr (&vse, code->expr1);
1067 gfc_add_block_to_block (&block, &vse.pre);
1069 gfc_conv_expr (&lse, expr2);
1070 gfc_add_block_to_block (&block, &lse.pre);
1071 type = TREE_TYPE (lse.expr);
1072 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1074 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
1075 x = convert (TREE_TYPE (vse.expr), x);
1076 gfc_add_modify (&block, vse.expr, x);
1078 gfc_add_block_to_block (&block, &lse.pre);
1079 gfc_add_block_to_block (&block, &rse.pre);
1081 return gfc_finish_block (&block);
1082 case GFC_OMP_ATOMIC_CAPTURE:
1083 aop = OMP_ATOMIC_CAPTURE_NEW;
1084 if (expr2->expr_type == EXPR_VARIABLE)
1086 aop = OMP_ATOMIC_CAPTURE_OLD;
1087 gfc_conv_expr (&vse, code->expr1);
1088 gfc_add_block_to_block (&block, &vse.pre);
1090 gfc_conv_expr (&lse, expr2);
1091 gfc_add_block_to_block (&block, &lse.pre);
1092 gfc_init_se (&lse, NULL);
1093 code = code->next;
1094 var = code->expr1->symtree->n.sym;
1095 expr2 = code->expr2;
1096 if (expr2->expr_type == EXPR_FUNCTION
1097 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1098 expr2 = expr2->value.function.actual->expr;
1100 break;
1101 default:
1102 break;
1105 gfc_conv_expr (&lse, code->expr1);
1106 gfc_add_block_to_block (&block, &lse.pre);
1107 type = TREE_TYPE (lse.expr);
1108 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1110 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1112 gfc_conv_expr (&rse, expr2);
1113 gfc_add_block_to_block (&block, &rse.pre);
1115 else if (expr2->expr_type == EXPR_OP)
1117 gfc_expr *e;
1118 switch (expr2->value.op.op)
1120 case INTRINSIC_PLUS:
1121 op = PLUS_EXPR;
1122 break;
1123 case INTRINSIC_TIMES:
1124 op = MULT_EXPR;
1125 break;
1126 case INTRINSIC_MINUS:
1127 op = MINUS_EXPR;
1128 break;
1129 case INTRINSIC_DIVIDE:
1130 if (expr2->ts.type == BT_INTEGER)
1131 op = TRUNC_DIV_EXPR;
1132 else
1133 op = RDIV_EXPR;
1134 break;
1135 case INTRINSIC_AND:
1136 op = TRUTH_ANDIF_EXPR;
1137 break;
1138 case INTRINSIC_OR:
1139 op = TRUTH_ORIF_EXPR;
1140 break;
1141 case INTRINSIC_EQV:
1142 op = EQ_EXPR;
1143 break;
1144 case INTRINSIC_NEQV:
1145 op = NE_EXPR;
1146 break;
1147 default:
1148 gcc_unreachable ();
1150 e = expr2->value.op.op1;
1151 if (e->expr_type == EXPR_FUNCTION
1152 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1153 e = e->value.function.actual->expr;
1154 if (e->expr_type == EXPR_VARIABLE
1155 && e->symtree != NULL
1156 && e->symtree->n.sym == var)
1158 expr2 = expr2->value.op.op2;
1159 var_on_left = true;
1161 else
1163 e = expr2->value.op.op2;
1164 if (e->expr_type == EXPR_FUNCTION
1165 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1166 e = e->value.function.actual->expr;
1167 gcc_assert (e->expr_type == EXPR_VARIABLE
1168 && e->symtree != NULL
1169 && e->symtree->n.sym == var);
1170 expr2 = expr2->value.op.op1;
1171 var_on_left = false;
1173 gfc_conv_expr (&rse, expr2);
1174 gfc_add_block_to_block (&block, &rse.pre);
1176 else
1178 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1179 switch (expr2->value.function.isym->id)
1181 case GFC_ISYM_MIN:
1182 op = MIN_EXPR;
1183 break;
1184 case GFC_ISYM_MAX:
1185 op = MAX_EXPR;
1186 break;
1187 case GFC_ISYM_IAND:
1188 op = BIT_AND_EXPR;
1189 break;
1190 case GFC_ISYM_IOR:
1191 op = BIT_IOR_EXPR;
1192 break;
1193 case GFC_ISYM_IEOR:
1194 op = BIT_XOR_EXPR;
1195 break;
1196 default:
1197 gcc_unreachable ();
1199 e = expr2->value.function.actual->expr;
1200 gcc_assert (e->expr_type == EXPR_VARIABLE
1201 && e->symtree != NULL
1202 && e->symtree->n.sym == var);
1204 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1205 gfc_add_block_to_block (&block, &rse.pre);
1206 if (expr2->value.function.actual->next->next != NULL)
1208 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1209 gfc_actual_arglist *arg;
1211 gfc_add_modify (&block, accum, rse.expr);
1212 for (arg = expr2->value.function.actual->next->next; arg;
1213 arg = arg->next)
1215 gfc_init_block (&rse.pre);
1216 gfc_conv_expr (&rse, arg->expr);
1217 gfc_add_block_to_block (&block, &rse.pre);
1218 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
1219 accum, rse.expr);
1220 gfc_add_modify (&block, accum, x);
1223 rse.expr = accum;
1226 expr2 = expr2->value.function.actual->next->expr;
1229 lhsaddr = save_expr (lhsaddr);
1230 rhs = gfc_evaluate_now (rse.expr, &block);
1232 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1233 x = rhs;
1234 else
1236 x = convert (TREE_TYPE (rhs),
1237 build_fold_indirect_ref_loc (input_location, lhsaddr));
1238 if (var_on_left)
1239 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
1240 else
1241 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
1244 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1245 && TREE_CODE (type) != COMPLEX_TYPE)
1246 x = fold_build1_loc (input_location, REALPART_EXPR,
1247 TREE_TYPE (TREE_TYPE (rhs)), x);
1249 gfc_add_block_to_block (&block, &lse.pre);
1250 gfc_add_block_to_block (&block, &rse.pre);
1252 if (aop == OMP_ATOMIC)
1254 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1255 gfc_add_expr_to_block (&block, x);
1257 else
1259 if (aop == OMP_ATOMIC_CAPTURE_NEW)
1261 code = code->next;
1262 expr2 = code->expr2;
1263 if (expr2->expr_type == EXPR_FUNCTION
1264 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1265 expr2 = expr2->value.function.actual->expr;
1267 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
1268 gfc_conv_expr (&vse, code->expr1);
1269 gfc_add_block_to_block (&block, &vse.pre);
1271 gfc_init_se (&lse, NULL);
1272 gfc_conv_expr (&lse, expr2);
1273 gfc_add_block_to_block (&block, &lse.pre);
1275 x = build2 (aop, type, lhsaddr, convert (type, x));
1276 x = convert (TREE_TYPE (vse.expr), x);
1277 gfc_add_modify (&block, vse.expr, x);
1280 return gfc_finish_block (&block);
1283 static tree
1284 gfc_trans_omp_barrier (void)
1286 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
1287 return build_call_expr_loc (input_location, decl, 0);
1290 static tree
1291 gfc_trans_omp_critical (gfc_code *code)
1293 tree name = NULL_TREE, stmt;
1294 if (code->ext.omp_name != NULL)
1295 name = get_identifier (code->ext.omp_name);
1296 stmt = gfc_trans_code (code->block->next);
1297 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
1300 typedef struct dovar_init_d {
1301 tree var;
1302 tree init;
1303 } dovar_init;
1306 static tree
1307 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1308 gfc_omp_clauses *do_clauses, tree par_clauses)
1310 gfc_se se;
1311 tree dovar, stmt, from, to, step, type, init, cond, incr;
1312 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1313 stmtblock_t block;
1314 stmtblock_t body;
1315 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1316 int i, collapse = clauses->collapse;
1317 vec<dovar_init> inits = vNULL;
1318 dovar_init *di;
1319 unsigned ix;
1321 if (collapse <= 0)
1322 collapse = 1;
1324 code = code->block->next;
1325 gcc_assert (code->op == EXEC_DO);
1327 init = make_tree_vec (collapse);
1328 cond = make_tree_vec (collapse);
1329 incr = make_tree_vec (collapse);
1331 if (pblock == NULL)
1333 gfc_start_block (&block);
1334 pblock = &block;
1337 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1339 for (i = 0; i < collapse; i++)
1341 int simple = 0;
1342 int dovar_found = 0;
1343 tree dovar_decl;
1345 if (clauses)
1347 gfc_namelist *n;
1348 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1349 n = n->next)
1350 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1351 break;
1352 if (n != NULL)
1353 dovar_found = 1;
1354 else if (n == NULL)
1355 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1356 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1357 break;
1358 if (n != NULL)
1359 dovar_found++;
1362 /* Evaluate all the expressions in the iterator. */
1363 gfc_init_se (&se, NULL);
1364 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1365 gfc_add_block_to_block (pblock, &se.pre);
1366 dovar = se.expr;
1367 type = TREE_TYPE (dovar);
1368 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1370 gfc_init_se (&se, NULL);
1371 gfc_conv_expr_val (&se, code->ext.iterator->start);
1372 gfc_add_block_to_block (pblock, &se.pre);
1373 from = gfc_evaluate_now (se.expr, pblock);
1375 gfc_init_se (&se, NULL);
1376 gfc_conv_expr_val (&se, code->ext.iterator->end);
1377 gfc_add_block_to_block (pblock, &se.pre);
1378 to = gfc_evaluate_now (se.expr, pblock);
1380 gfc_init_se (&se, NULL);
1381 gfc_conv_expr_val (&se, code->ext.iterator->step);
1382 gfc_add_block_to_block (pblock, &se.pre);
1383 step = gfc_evaluate_now (se.expr, pblock);
1384 dovar_decl = dovar;
1386 /* Special case simple loops. */
1387 if (TREE_CODE (dovar) == VAR_DECL)
1389 if (integer_onep (step))
1390 simple = 1;
1391 else if (tree_int_cst_equal (step, integer_minus_one_node))
1392 simple = -1;
1394 else
1395 dovar_decl
1396 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1398 /* Loop body. */
1399 if (simple)
1401 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1402 /* The condition should not be folded. */
1403 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
1404 ? LE_EXPR : GE_EXPR,
1405 boolean_type_node, dovar, to);
1406 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1407 type, dovar, step);
1408 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1409 MODIFY_EXPR,
1410 type, dovar,
1411 TREE_VEC_ELT (incr, i));
1413 else
1415 /* STEP is not 1 or -1. Use:
1416 for (count = 0; count < (to + step - from) / step; count++)
1418 dovar = from + count * step;
1419 body;
1420 cycle_label:;
1421 } */
1422 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
1423 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
1424 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
1425 step);
1426 tmp = gfc_evaluate_now (tmp, pblock);
1427 count = gfc_create_var (type, "count");
1428 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1429 build_int_cst (type, 0));
1430 /* The condition should not be folded. */
1431 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
1432 boolean_type_node,
1433 count, tmp);
1434 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1435 type, count,
1436 build_int_cst (type, 1));
1437 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1438 MODIFY_EXPR, type, count,
1439 TREE_VEC_ELT (incr, i));
1441 /* Initialize DOVAR. */
1442 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
1443 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
1444 dovar_init e = {dovar, tmp};
1445 inits.safe_push (e);
1448 if (!dovar_found)
1450 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1451 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1452 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1454 else if (dovar_found == 2)
1456 tree c = NULL;
1458 tmp = NULL;
1459 if (!simple)
1461 /* If dovar is lastprivate, but different counter is used,
1462 dovar += step needs to be added to
1463 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1464 will have the value on entry of the last loop, rather
1465 than value after iterator increment. */
1466 tmp = gfc_evaluate_now (step, pblock);
1467 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
1468 tmp);
1469 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
1470 dovar, tmp);
1471 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1472 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1473 && OMP_CLAUSE_DECL (c) == dovar_decl)
1475 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1476 break;
1479 if (c == NULL && par_clauses != NULL)
1481 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1483 && OMP_CLAUSE_DECL (c) == dovar_decl)
1485 tree l = build_omp_clause (input_location,
1486 OMP_CLAUSE_LASTPRIVATE);
1487 OMP_CLAUSE_DECL (l) = dovar_decl;
1488 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1489 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1490 omp_clauses = l;
1491 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1492 break;
1495 gcc_assert (simple || c != NULL);
1497 if (!simple)
1499 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1500 OMP_CLAUSE_DECL (tmp) = count;
1501 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1504 if (i + 1 < collapse)
1505 code = code->block->next;
1508 if (pblock != &block)
1510 pushlevel ();
1511 gfc_start_block (&block);
1514 gfc_start_block (&body);
1516 FOR_EACH_VEC_ELT (inits, ix, di)
1517 gfc_add_modify (&body, di->var, di->init);
1518 inits.release ();
1520 /* Cycle statement is implemented with a goto. Exit statement must not be
1521 present for this loop. */
1522 cycle_label = gfc_build_label_decl (NULL_TREE);
1524 /* Put these labels where they can be found later. */
1526 code->cycle_label = cycle_label;
1527 code->exit_label = NULL_TREE;
1529 /* Main loop body. */
1530 tmp = gfc_trans_omp_code (code->block->next, true);
1531 gfc_add_expr_to_block (&body, tmp);
1533 /* Label for cycle statements (if needed). */
1534 if (TREE_USED (cycle_label))
1536 tmp = build1_v (LABEL_EXPR, cycle_label);
1537 gfc_add_expr_to_block (&body, tmp);
1540 /* End of loop body. */
1541 stmt = make_node (OMP_FOR);
1543 TREE_TYPE (stmt) = void_type_node;
1544 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1545 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1546 OMP_FOR_INIT (stmt) = init;
1547 OMP_FOR_COND (stmt) = cond;
1548 OMP_FOR_INCR (stmt) = incr;
1549 gfc_add_expr_to_block (&block, stmt);
1551 return gfc_finish_block (&block);
1554 static tree
1555 gfc_trans_omp_flush (void)
1557 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
1558 return build_call_expr_loc (input_location, decl, 0);
1561 static tree
1562 gfc_trans_omp_master (gfc_code *code)
1564 tree stmt = gfc_trans_code (code->block->next);
1565 if (IS_EMPTY_STMT (stmt))
1566 return stmt;
1567 return build1_v (OMP_MASTER, stmt);
1570 static tree
1571 gfc_trans_omp_ordered (gfc_code *code)
1573 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1576 static tree
1577 gfc_trans_omp_parallel (gfc_code *code)
1579 stmtblock_t block;
1580 tree stmt, omp_clauses;
1582 gfc_start_block (&block);
1583 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1584 code->loc);
1585 stmt = gfc_trans_omp_code (code->block->next, true);
1586 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1587 omp_clauses);
1588 gfc_add_expr_to_block (&block, stmt);
1589 return gfc_finish_block (&block);
1592 static tree
1593 gfc_trans_omp_parallel_do (gfc_code *code)
1595 stmtblock_t block, *pblock = NULL;
1596 gfc_omp_clauses parallel_clauses, do_clauses;
1597 tree stmt, omp_clauses = NULL_TREE;
1599 gfc_start_block (&block);
1601 memset (&do_clauses, 0, sizeof (do_clauses));
1602 if (code->ext.omp_clauses != NULL)
1604 memcpy (&parallel_clauses, code->ext.omp_clauses,
1605 sizeof (parallel_clauses));
1606 do_clauses.sched_kind = parallel_clauses.sched_kind;
1607 do_clauses.chunk_size = parallel_clauses.chunk_size;
1608 do_clauses.ordered = parallel_clauses.ordered;
1609 do_clauses.collapse = parallel_clauses.collapse;
1610 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1611 parallel_clauses.chunk_size = NULL;
1612 parallel_clauses.ordered = false;
1613 parallel_clauses.collapse = 0;
1614 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1615 code->loc);
1617 do_clauses.nowait = true;
1618 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1619 pblock = &block;
1620 else
1621 pushlevel ();
1622 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1623 if (TREE_CODE (stmt) != BIND_EXPR)
1624 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1625 else
1626 poplevel (0, 0);
1627 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1628 omp_clauses);
1629 OMP_PARALLEL_COMBINED (stmt) = 1;
1630 gfc_add_expr_to_block (&block, stmt);
1631 return gfc_finish_block (&block);
1634 static tree
1635 gfc_trans_omp_parallel_sections (gfc_code *code)
1637 stmtblock_t block;
1638 gfc_omp_clauses section_clauses;
1639 tree stmt, omp_clauses;
1641 memset (&section_clauses, 0, sizeof (section_clauses));
1642 section_clauses.nowait = true;
1644 gfc_start_block (&block);
1645 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1646 code->loc);
1647 pushlevel ();
1648 stmt = gfc_trans_omp_sections (code, &section_clauses);
1649 if (TREE_CODE (stmt) != BIND_EXPR)
1650 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1651 else
1652 poplevel (0, 0);
1653 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1654 omp_clauses);
1655 OMP_PARALLEL_COMBINED (stmt) = 1;
1656 gfc_add_expr_to_block (&block, stmt);
1657 return gfc_finish_block (&block);
1660 static tree
1661 gfc_trans_omp_parallel_workshare (gfc_code *code)
1663 stmtblock_t block;
1664 gfc_omp_clauses workshare_clauses;
1665 tree stmt, omp_clauses;
1667 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1668 workshare_clauses.nowait = true;
1670 gfc_start_block (&block);
1671 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1672 code->loc);
1673 pushlevel ();
1674 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1675 if (TREE_CODE (stmt) != BIND_EXPR)
1676 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1677 else
1678 poplevel (0, 0);
1679 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1680 omp_clauses);
1681 OMP_PARALLEL_COMBINED (stmt) = 1;
1682 gfc_add_expr_to_block (&block, stmt);
1683 return gfc_finish_block (&block);
1686 static tree
1687 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1689 stmtblock_t block, body;
1690 tree omp_clauses, stmt;
1691 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1693 gfc_start_block (&block);
1695 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1697 gfc_init_block (&body);
1698 for (code = code->block; code; code = code->block)
1700 /* Last section is special because of lastprivate, so even if it
1701 is empty, chain it in. */
1702 stmt = gfc_trans_omp_code (code->next,
1703 has_lastprivate && code->block == NULL);
1704 if (! IS_EMPTY_STMT (stmt))
1706 stmt = build1_v (OMP_SECTION, stmt);
1707 gfc_add_expr_to_block (&body, stmt);
1710 stmt = gfc_finish_block (&body);
1712 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
1713 omp_clauses);
1714 gfc_add_expr_to_block (&block, stmt);
1716 return gfc_finish_block (&block);
1719 static tree
1720 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1722 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1723 tree stmt = gfc_trans_omp_code (code->block->next, true);
1724 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
1725 omp_clauses);
1726 return stmt;
1729 static tree
1730 gfc_trans_omp_task (gfc_code *code)
1732 stmtblock_t block;
1733 tree stmt, omp_clauses;
1735 gfc_start_block (&block);
1736 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1737 code->loc);
1738 stmt = gfc_trans_omp_code (code->block->next, true);
1739 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
1740 omp_clauses);
1741 gfc_add_expr_to_block (&block, stmt);
1742 return gfc_finish_block (&block);
1745 static tree
1746 gfc_trans_omp_taskwait (void)
1748 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
1749 return build_call_expr_loc (input_location, decl, 0);
1752 static tree
1753 gfc_trans_omp_taskyield (void)
1755 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
1756 return build_call_expr_loc (input_location, decl, 0);
1759 static tree
1760 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1762 tree res, tmp, stmt;
1763 stmtblock_t block, *pblock = NULL;
1764 stmtblock_t singleblock;
1765 int saved_ompws_flags;
1766 bool singleblock_in_progress = false;
1767 /* True if previous gfc_code in workshare construct is not workshared. */
1768 bool prev_singleunit;
1770 code = code->block->next;
1772 pushlevel ();
1774 gfc_start_block (&block);
1775 pblock = &block;
1777 ompws_flags = OMPWS_WORKSHARE_FLAG;
1778 prev_singleunit = false;
1780 /* Translate statements one by one to trees until we reach
1781 the end of the workshare construct. Adjacent gfc_codes that
1782 are a single unit of work are clustered and encapsulated in a
1783 single OMP_SINGLE construct. */
1784 for (; code; code = code->next)
1786 if (code->here != 0)
1788 res = gfc_trans_label_here (code);
1789 gfc_add_expr_to_block (pblock, res);
1792 /* No dependence analysis, use for clauses with wait.
1793 If this is the last gfc_code, use default omp_clauses. */
1794 if (code->next == NULL && clauses->nowait)
1795 ompws_flags |= OMPWS_NOWAIT;
1797 /* By default, every gfc_code is a single unit of work. */
1798 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1799 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1801 switch (code->op)
1803 case EXEC_NOP:
1804 res = NULL_TREE;
1805 break;
1807 case EXEC_ASSIGN:
1808 res = gfc_trans_assign (code);
1809 break;
1811 case EXEC_POINTER_ASSIGN:
1812 res = gfc_trans_pointer_assign (code);
1813 break;
1815 case EXEC_INIT_ASSIGN:
1816 res = gfc_trans_init_assign (code);
1817 break;
1819 case EXEC_FORALL:
1820 res = gfc_trans_forall (code);
1821 break;
1823 case EXEC_WHERE:
1824 res = gfc_trans_where (code);
1825 break;
1827 case EXEC_OMP_ATOMIC:
1828 res = gfc_trans_omp_directive (code);
1829 break;
1831 case EXEC_OMP_PARALLEL:
1832 case EXEC_OMP_PARALLEL_DO:
1833 case EXEC_OMP_PARALLEL_SECTIONS:
1834 case EXEC_OMP_PARALLEL_WORKSHARE:
1835 case EXEC_OMP_CRITICAL:
1836 saved_ompws_flags = ompws_flags;
1837 ompws_flags = 0;
1838 res = gfc_trans_omp_directive (code);
1839 ompws_flags = saved_ompws_flags;
1840 break;
1842 default:
1843 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1846 gfc_set_backend_locus (&code->loc);
1848 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1850 if (prev_singleunit)
1852 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1853 /* Add current gfc_code to single block. */
1854 gfc_add_expr_to_block (&singleblock, res);
1855 else
1857 /* Finish single block and add it to pblock. */
1858 tmp = gfc_finish_block (&singleblock);
1859 tmp = build2_loc (input_location, OMP_SINGLE,
1860 void_type_node, tmp, NULL_TREE);
1861 gfc_add_expr_to_block (pblock, tmp);
1862 /* Add current gfc_code to pblock. */
1863 gfc_add_expr_to_block (pblock, res);
1864 singleblock_in_progress = false;
1867 else
1869 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1871 /* Start single block. */
1872 gfc_init_block (&singleblock);
1873 gfc_add_expr_to_block (&singleblock, res);
1874 singleblock_in_progress = true;
1876 else
1877 /* Add the new statement to the block. */
1878 gfc_add_expr_to_block (pblock, res);
1880 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1884 /* Finish remaining SINGLE block, if we were in the middle of one. */
1885 if (singleblock_in_progress)
1887 /* Finish single block and add it to pblock. */
1888 tmp = gfc_finish_block (&singleblock);
1889 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
1890 clauses->nowait
1891 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1892 : NULL_TREE);
1893 gfc_add_expr_to_block (pblock, tmp);
1896 stmt = gfc_finish_block (pblock);
1897 if (TREE_CODE (stmt) != BIND_EXPR)
1899 if (!IS_EMPTY_STMT (stmt))
1901 tree bindblock = poplevel (1, 0);
1902 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1904 else
1905 poplevel (0, 0);
1907 else
1908 poplevel (0, 0);
1910 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
1911 stmt = gfc_trans_omp_barrier ();
1913 ompws_flags = 0;
1914 return stmt;
1917 tree
1918 gfc_trans_omp_directive (gfc_code *code)
1920 switch (code->op)
1922 case EXEC_OMP_ATOMIC:
1923 return gfc_trans_omp_atomic (code);
1924 case EXEC_OMP_BARRIER:
1925 return gfc_trans_omp_barrier ();
1926 case EXEC_OMP_CRITICAL:
1927 return gfc_trans_omp_critical (code);
1928 case EXEC_OMP_DO:
1929 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1930 case EXEC_OMP_FLUSH:
1931 return gfc_trans_omp_flush ();
1932 case EXEC_OMP_MASTER:
1933 return gfc_trans_omp_master (code);
1934 case EXEC_OMP_ORDERED:
1935 return gfc_trans_omp_ordered (code);
1936 case EXEC_OMP_PARALLEL:
1937 return gfc_trans_omp_parallel (code);
1938 case EXEC_OMP_PARALLEL_DO:
1939 return gfc_trans_omp_parallel_do (code);
1940 case EXEC_OMP_PARALLEL_SECTIONS:
1941 return gfc_trans_omp_parallel_sections (code);
1942 case EXEC_OMP_PARALLEL_WORKSHARE:
1943 return gfc_trans_omp_parallel_workshare (code);
1944 case EXEC_OMP_SECTIONS:
1945 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1946 case EXEC_OMP_SINGLE:
1947 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1948 case EXEC_OMP_TASK:
1949 return gfc_trans_omp_task (code);
1950 case EXEC_OMP_TASKWAIT:
1951 return gfc_trans_omp_taskwait ();
1952 case EXEC_OMP_TASKYIELD:
1953 return gfc_trans_omp_taskyield ();
1954 case EXEC_OMP_WORKSHARE:
1955 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1956 default:
1957 gcc_unreachable ();